diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..e8f632af23 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,11 @@ + + +- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc). +- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes. +- [ ] You submit test cases (unit or integration tests) that back your changes. +- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only). diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000..f88b488879 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,73 @@ +name: "CodeQL" + +on: + push: + branches: [ 'main' ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ 'main' ] + schedule: + - cron: '23 3 * * 5' + +jobs: + analyze: + name: Analyze + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'java' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Use only 'java' to analyze code written in Java, Kotlin or both + # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - if: matrix.language == 'java' + name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'adopt' + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml new file mode 100644 index 0000000000..a5f764579a --- /dev/null +++ b/.github/workflows/project.yml @@ -0,0 +1,40 @@ +# GitHub Actions to automate GitHub issues for Spring Data Project Management + +name: Spring Data GitHub Issues + +on: + issues: + types: [opened, edited, reopened] + issue_comment: + types: [created] + pull_request_target: + types: [opened, edited, reopened] + +jobs: + Inbox: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null && !contains(join(github.event.issue.labels.*.name, ', '), 'dependency-upgrade') && !contains(github.event.issue.title, 'Release ') + steps: + - name: Create or Update Issue Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Pull-Request: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null + steps: + - name: Create or Update Pull Request Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Feedback-Provided: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback') + steps: + - name: Update Project Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} diff --git a/.gitignore b/.gitignore index 646c021fab..d9642d2c66 100644 --- a/.gitignore +++ b/.gitignore @@ -2,9 +2,22 @@ target/ .idea/ .settings/ *.iml +.flattened-pom.xml .project .classpath .springBeans .sonar4clipse *.sonar4clipseExternals *.graphml +package-lock.json +.mvn/.develocity +.mvn/maven.config + +build/ +node_modules +node + +#prevent license accepting file to get accidentially commited to git +container-license-acceptance.txt +spring-data-jdbc/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java +spring-data-r2dbc/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 0000000000..e0857eaa25 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,8 @@ + + + + io.spring.develocity.conventions + develocity-conventions-maven-extension + 0.0.22 + + diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..e27f6e8f5e --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,14 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED +--add-opens=java.base/java.util=ALL-UNNAMED +--add-opens=java.base/java.lang.reflect=ALL-UNNAMED +--add-opens=java.base/java.text=ALL-UNNAMED +--add-opens=java.desktop/java.awt.font=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100755 index 0000000000..01e6799737 Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100755 index 0000000000..5af18f05b4 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,2 @@ +#Thu Nov 07 09:47:30 CET 2024 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index c926555157..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: java - -jdk: - - oraclejdk8 - -addons: - apt: - packages: - - oracle-java8-installer - -cache: - directories: - - $HOME/.m2 - -sudo: false - -services: - - docker - -install: true - -script: "mvn clean dependency:list test -Pall-dbs -Dsort -U" diff --git a/CI.adoc b/CI.adoc new file mode 100644 index 0000000000..36dd46d897 --- /dev/null +++ b/CI.adoc @@ -0,0 +1,30 @@ += Continuous Integration + +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-jdbc%2Fmain&subject=Moore%20(main)["Spring Data JDBC", link="https://jenkins.spring.io/view/SpringData/job/spring-data-jdbc/"] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-jdbc%2F1.0.x&subject=Lovelace%20(1.0.x)["Spring Data JDBC", link="https://jenkins.spring.io/view/SpringData/job/spring-data-jdbc/"] + +== Running CI tasks locally + +Since this pipeline is purely Docker-based, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a tweak to your `test.sh` script before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what the CI server does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-jdbc-github -v /usr/bin/docker:/usr/bin/docker -v /var/run/docker.sock:/var/run/docker.sock adoptopenjdk/openjdk8:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-jdbc-github`. ++ +2. `cd spring-data-jdbc-github` ++ +Next, test everything from inside the container: ++ +3. `./mvnw -Pci,all-dbs clean dependency:list test -Dsort -B` (or whatever test configuration you must use) + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc deleted file mode 100644 index f64fb1b7a5..0000000000 --- a/CODE_OF_CONDUCT.adoc +++ /dev/null @@ -1,27 +0,0 @@ -= Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io. -All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. -Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident. - -This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/]. \ No newline at end of file diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc new file mode 100644 index 0000000000..740e8bd0bb --- /dev/null +++ b/CONTRIBUTING.adoc @@ -0,0 +1,3 @@ += Spring Data contribution guidelines + +You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..8919ba10f4 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,134 @@ +def p = [:] +node { + checkout scm + p = readProperties interpolate: true, file: 'ci/pipeline.properties' +} + +pipeline { + agent none + + triggers { + pollSCM 'H/10 * * * *' + upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS) + } + + options { + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '14')) + } + + stages { + stage("test: baseline (main)") { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + TESTCONTAINERS_IMAGE_SUBSTITUTOR = 'org.springframework.data.ProxyImageNameSubstitutor' + } + + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh "PROFILE=all-dbs JENKINS_USER_NAME=${p['jenkins.user.name']} ci/test.sh" + sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh" + } + } + } + } + } + + stage("Test other configurations") { + when { + beforeAgent(true) + allOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + parallel { + stage("test: baseline (next)") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + TESTCONTAINERS_IMAGE_SUBSTITUTOR = 'org.springframework.data.ProxyImageNameSubstitutor' + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.next.image']).inside(p['docker.java.inside.docker']) { + sh "PROFILE=all-dbs JENKINS_USER_NAME=${p['jenkins.user.name']} ci/test.sh" + sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh" + } + } + } + } + } + } + } + + stage('Release to artifactory') { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 20, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Pci,artifactory " + + "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " + + "-Dartifactory.server=${p['artifactory.url']} " + + "-Dartifactory.username=${ARTIFACTORY_USR} " + + "-Dartifactory.password=${ARTIFACTORY_PSW} " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + + "-Dartifactory.build-name=spring-data-relational " + + "-Dartifactory.build-number=spring-data-relational-${BRANCH_NAME}-build-${BUILD_NUMBER} " + + "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-jdbc " + + "-Dmaven.test.skip=true clean deploy -U -B" + } + } + } + } + } + } + + post { + changed { + script { + emailext( + subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", + mimeType: 'text/html', + recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']], + body: "${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}") + } + } + } +} diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..ff77379631 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.adoc b/README.adoc index 42ae7cf71f..1764530f46 100644 --- a/README.adoc +++ b/README.adoc @@ -1,347 +1,290 @@ -= Spring Data JDBC += Spring Data Relational image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-jdbc%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-jdbc/] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring%20Data%20Relational%20Parent"] -The primary goal of the http://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use data access technologies. *Spring Data JDBC* offers the popular Repository abstraction based on JDBC. +The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. -== This is NOT an ORM +Spring Data Relational, part of the larger Spring Data family, makes it easy to implement repositories for SQL databases. +This module deals with enhanced support for JDBC and R2DBC based data access layers. +It makes it easier to build Spring powered applications that use data access technologies. -Spring Data JDBC does not try to be an ORM. It is not a competitor to JPA. -Instead it is more of a construction kit for your personal ORM that you can define the way you like or need it. +It aims at being conceptually easy. +In order to achieve this it does NOT offer caching, lazy loading, write behind or many other features of JPA. +This makes Spring Data JDBC and Spring Data R2DBC a simple, limited, opinionated ORM. -This means that it does rather little out of the box. -But it offers plenty of places where you can put your own logic, or integrate it with the technology of your choice for generating SQL statements. +== Features -== The Aggregate Root +* Implementation of CRUD methods for Aggregates. +* `@Query` annotation +* Support for transparent auditing (created, last changed) +* Events for persistence events +* Possibility to integrate custom repository code +* JavaConfig based repository configuration through `@EnableJdbcRepositories` respective `@EnableR2dbcRepositories` +* JDBC-only: Integration with MyBatis -Spring Data repositories are inspired by the repository as described in the book Domain Driven Design by Eric Evans. -One consequence of this is that you should have a repository per Aggregate Root. -Aggregate Root is another concept from the same book and describes an entity which controls the lifecycle of other entities which together are an Aggregate. -An Aggregate is a subset of your model which is consistent between method calls to your Aggregate Root. +== Code of Conduct -Spring Data JDBC tries its best to encourage modelling your domain along these ideas. +This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. -== Maven Coordinates +== Getting Started with JDBC -[source,xml] ----- - - org.springframework.data - spring-data-jdbc - 1.0.0.BUILD-SNAPSHOT - ----- - -== Features +Here is a quick teaser of an application using Spring Data JDBC Repositories in Java: -=== CRUD operations - -In order use Spring Data JDBC you need the following: - -1. An entity with an attribute marked as _id_ using the spring datas https://docs.spring.io/spring-data/commons/docs/current/api/org/springframework/data/annotation/Id.html[`@Id`] annotation. -+ [source,java] ---- - public class Person { - @Id - Integer id; - } ----- -+ -1. A repository -+ -[source,java] ----- -public interface PersonRepository extends CrudRepository {} ----- -+ -1. Add `@EnableJdbcRepositories` to your application context configuration. -1. Make sure your application context contains a bean of type `DataSource`. +interface PersonRepository extends CrudRepository { -Now you can get an instance of the repository interface injected into your beans and use it: - -[source,java] ----- -@Autowired -private PersonRepository repository; + @Query("SELECT * FROM person WHERE lastname = :lastname") + List findByLastname(String lastname); -public void someMethod() { - Person person = repository.save(new Person()); + @Query("SELECT * FROM person WHERE firstname LIKE :firstname") + List findByFirstnameLike(String firstname); } ----- - -==== Supported types in your entity - -Properties of the following types are currently supported: -* all primitive types and their boxed types (`int`, `float`, `Integer`, `Float` ...) +@Service +class MyService { -* enums get mapped to their name. + private final PersonRepository repository; -* `String` + public MyService(PersonRepository repository) { + this.repository = repository; + } -* `java.util.Date`, `java.time.LocalDate`, `java.time.LocalDateTime`, `java.time.LocalTime` + public void doWork() { -and anything your database driver accepts. + repository.deleteAll(); -* references to other entities, which will be considered a one-to-one relationship. -The table of the referenced entity is expected to have an additional column named like the table of the referencing entity. -This name can be changed by implementing `NamingStrategy.getReverseColumnName(JdbcPersistentProperty property)` according to your preferences. + Person person = new Person(); + person.setFirstname("Jens"); + person.setLastname("Schauder"); + repository.save(person); -* `Set` will be considered a one-to-many relationship. -The table of the referenced entity is expected to have an additional column named like the table of the referencing entity. -This name can be changed by implementing `NamingStrategy.getReverseColumnName(JdbcPersistentProperty property)` according to your preferences. - -* `Map` will be considered a qualified one to many relationship. -The table of the referenced entity is expected to have two additional columns: One named like the table of the referencing entity for the foreign key and one with the same name and an additional `_key` suffix for the map key. -This name can be changed by implementing `NamingStrategy.getReverseColumnName(JdbcPersistentProperty property)` and `NamingStrategy.getKeyColumn(JdbcPersistentProperty property)` according to your preferences. - -The handling of referenced entities is very limited. -Part of this is because this project is still before it's first release. + List lastNameResults = repository.findByLastname("Schauder"); + List firstNameResults = repository.findByFirstnameLike("Je%"); + } +} -But another reason is the idea of <> as described above. -If you reference another entity that entity is by definition part of your Aggregate. -So if you remove the reference it will get deleted. -This also means references will be 1-1 or 1-n, but not n-1 or n-m. +@Configuration +@EnableJdbcRepositories +class ApplicationConfig extends AbstractJdbcConfiguration { -If your having n-1 or n-m references you are probably dealing with two separate Aggregates. -References between those should be encode as simple ids, which should map just fine with Spring Data JDBC. + @Bean + public DataSource dataSource() { + return …; + } -Also the mapping we offer is very limited for a third reason which already was mentioned at the very beginning of the document: This is not an ORM. -We will offer ways to plug in your own SQL in various ways. -But the default mapping itself will stay limited. -If you want highly customizable mappings which support almost everything one can imagine you will probably be much happier with (Spring Data) JPA. -Which is a very powerful and mature technology. + @Bean + public NamedParameterJdbcTemplate namedParameterJdbcTemplate(DataSource dataSource) { + return new NamedParameterJdbcTemplate(dataSource); + } +} +---- -=== Query annotation +=== Maven configuration -You can annotate a query method with `@Query` to specify a SQL statement to be used for that method. -You can bind method arguments using named parameters in the SQL statement like in the following example: +Add the Maven dependency: -[source,java] +[source,xml] ---- -@Query("SELECT * FROM DUMMYENTITY WHERE name < :upper and name > :lower") -List findByNameRange(@Param("lower") String lower, @Param("upper") String upper); + + org.springframework.data + spring-data-jdbc + ${version} + ---- -If you compile your sources with the `-parameters` compiler flag you can omit the `@Param` annotations. - -=== Id generation - -Spring Data JDBC uses the id to identify entities but also to determine if an entity is new or already existing in the database. -If the id is `null` or of a primitive type and `0` or `0.0` the entity is considered new. - -When your data base has some autoincrement column for the id column the generated value will get set in the entity after inserting it into the database. - -There are few ways to tweak this behavior. -If you don't like the logic to distinguish between new and existing entities you can implement https://docs.spring.io/spring-data/commons/docs/current/api/org/springframework/data/domain/Persistable.html[`Persistable`] with your entity and overwrite `isNew()` with your own logic. +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. -One important constraint is that after saving an entity the entity shouldn't be _new_ anymore. -With autoincrement columns this happens automatically since the the id gets set by Spring Data with the value from the id column. -If you are not using autoincrement columns you can use that using a `BeforeSave`-listener which sets the id of the entity (see below). - -=== NamingStrategy +[source,xml] +---- + + org.springframework.data + spring-data-jdbc + ${version}-SNAPSHOT + -When you use the standard implementations of `CrudRepository` as provided by Spring Data JDBC it will expect a certain table structure. -You can tweak that by providing a https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/mapping/model/NamingStrategy.java[`NamingStrategy`] in your application context. + + spring-snapshot + Spring Snapshot Repository + https://repo.spring.io/snapshot + +---- -=== Events +== Getting Started with R2DBC -Spring Data Jdbc triggers events which will get publish to any matching `ApplicationListener` in the application context. -For example the following listener will get invoked before an aggregate gets saved. +Here is a quick teaser of an application using Spring Data R2DBC Repositories in Java: [source,java] ---- -@Bean -public ApplicationListener timeStampingSaveTime() { +interface PersonRepository extends ReactiveCrudRepository { - return event -> { + @Query("SELECT * FROM person WHERE lastname = :lastname") + Flux findByLastname(String lastname); - Object entity = event.getEntity(); - if (entity instanceof Category) { - Category category = (Category) entity; - category.timeStamp(); - } - }; + @Query("SELECT * FROM person WHERE firstname LIKE :firstname") + Flux findByFirstnameLike(String firstname); } ----- - -.Available events -|=== -| Event | When It's Published - -| https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeDelete.java[`BeforeDelete`] -| before an aggregate root gets deleted. - -| https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/mapping/event/AfterDelete.java[`AfterDelete`] -| after an aggregate root got deleted. -| https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/mapping/event/AfterDelete.java[`BeforeSave`] -| before an aggregate root gets saved, i.e. inserted or updated but after the decision was made if it will get updated or deleted. -The event has a reference to an https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/core/conversion/AggregateChange.java[`AggregateChange`] instance. -The instance can be modified by adding or removing https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/core/conversion/DbAction.java[`DbAction`]s. +@Service +class MyService { -| https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/mapping/event/AfterSave.java[`AfterSave`] -| after an aggregate root gets saved, i.e. inserted or updated. + private final PersonRepository repository; -| https://github.com/spring-projects/spring-data-jdbc/blob/master/src/main/java/org/springframework/data/jdbc/mapping/event/AfterDelete.java[`AfterCreation`] -| after an aggregate root got created from a database `ResultSet` and all it's property set -|=== + public MyService(PersonRepository repository) { + this.repository = repository; + } + public Flux doWork() { -=== MyBatis + Person person = new Person(); + person.setFirstname("Jens"); + person.setLastname("Schauder"); + repository.save(person); -For each operation in `CrudRepository` Spring Data Jdbc will execute multiple statements. -If there is a https://github.com/mybatis/mybatis-3/blob/master/src/main/java/org/apache/ibatis/session/SqlSessionFactory.java[`SqlSessionFactory`] in the application context, it will checked if it offers a statement for each step. -If one is found that statement will be used (including its configured mapping to an entity). + Mono deleteAll = repository.deleteAll(); -The name of the statement is constructed by concatenating the fully qualified name of the entity type with `Mapper.` and a string determining the kind of statement. -E.g. if an instance of `org.example.User` is to be inserted Spring Data Jdbc will look for a statement named `org.example.UserMapper.insert`. + Flux lastNameResults = repository.findByLastname("Schauder"); + Flux firstNameResults = repository.findByFirstnameLike("Je%"); -Upon execution of the statement an instance of [`MyBatisContext`] will get passed as an argument which makes various arguments available to the statement. - -[cols="default,default,default,asciidoc"] -|=== -| Name | Purpose | CrudRepository methods which might trigger this statement | Attributes available in the `MyBatisContext` - -| `insert` | Insert for a single entity. This also applies for entities referenced by the aggregate root. | `save`, `saveAll`. | -`getInstance`: - the instance to be saved - -`getDomainType`: the type of the entity to be saved. - -`get()`: id of the referencing entity, where `` is the name of the back reference column as provided by the `NamingStrategy`. - - -| `update` | Update for a single entity. This also applies for entities referenced by the aggregate root. | `save`, `saveAll`.| -`getInstance`: the instance to be saved - -`getDomainType`: the type of the entity to be saved. - -| `delete` | Delete a single entity. | `delete`, `deleteById`.| -`getId`: the id of the instance to be deleted - -`getDomainType`: the type of the entity to be deleted. - -| `deleteAll.` | Delete all entities referenced by any aggregate root of the type used as prefix via the given property path. -Note that the type used for prefixing the statement name is the name of the aggregate root not the one of the entity to be deleted. | `deleteAll`.| - -`getDomainType`: the type of the entities to be deleted. - -| `deleteAll` | Delete all aggregate roots of the type used as the prefix | `deleteAll`.| - -`getDomainType`: the type of the entities to be deleted. - -| `delete.` | Delete all entities referenced by an aggregate root via the given propertyPath | `deleteById`.| - -`getId`: the id of the aggregate root for which referenced entities are to be deleted. - -`getDomainType`: the type of the entities to be deleted. - - -| `findById` | Select an aggregate root by id | `findById`.| - -`getId`: the id of the entity to load. - -`getDomainType`: the type of the entity to load. - -| `findAll` | Select all aggregate roots | `findAll`.| - -`getDomainType`: the type of the entity to load. - -| `findAllById` | Select a set of aggregate roots by ids | `findAllById`.| + return deleteAll.thenMany(lastNameResults.concatWith(firstNameResults)); + } +} -`getId`: list of ids of the entities to load. +@Configuration +@EnableR2dbcRepositories +class ApplicationConfig extends AbstractR2dbcConfiguration { -`getDomainType`: the type of the entity to load. + @Bean + public ConnectionFactory connectionFactory() { + return ConnectionFactories.get("r2dbc:://:/"); + } +} +---- -| `findAllByProperty.` | Select a set of entities that is referenced by another entity. The type of the referencing entity is used for the prefix. The referenced entities type as the suffix. | All `find*` methods.| +=== Maven configuration -`getId`: the id of the entity referencing the entities to be loaded. +Add the Maven dependency: -`getDomainType`: the type of the entity to load. +[source,xml] +---- + + org.springframework.data + spring-data-r2dbc + ${version} + +---- -| `count` | Count the number of aggregate root of the type used as prefix | `count` | +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. -`getDomainType` the type of aggregate roots to count. -|=== +[source,xml] +---- + + org.springframework.data + spring-data-r2dbc + ${version}-SNAPSHOT + -== Features planned for the not to far future + + spring-libs-snapshot + Spring Snapshot Repository + https://repo.spring.io/snapshot + +---- -=== Advance query annotation support +== Getting Help -* customizable `RowMapper` -* projections -* modifying queries -* SpEL expressions +Having trouble with Spring Data? +We’d love to help! -=== MyBatis per method support +* If you are new to Spring Data JDBC read the following two articles https://spring.io/blog/2018/09/17/introducing-spring-data-jdbc["Introducing Spring Data JDBC"] and https://spring.io/blog/2018/09/24/spring-data-jdbc-references-and-aggregates["Spring Data JDBC, References, and Aggregates"]. +* Check the +https://docs.spring.io/spring-data/relational/reference/[reference documentation], and https://docs.spring.io/spring-data/jdbc/docs/current/api/[Javadocs]. +* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. +If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. +* If you are upgrading, check out the https://github.com/spring-projects/spring-data-relational/releases[changelog] for "`new and noteworthy`" features. +* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data`]. -The current MyBatis supported is rather elaborate in that it allows to execute multiple statements for a single method call. -But sometimes less is more and it should be possible to annotate a method with a simple annotation to identify a SQL statement in a MyBatis mapping to be executed. +== Reporting Issues -=== Support of lists in entities +Spring Data uses GitHub as issue tracking system to record bugs and feature requests.If you want to raise an issue, please follow the recommendations below: -== Spring Boot integration +* Before you log a bug, please search the Spring Data JDBCs https://github.com/spring-projects/spring-data-relational/issues[issue tracker] to see if someone has already reported the problem. +* If the issue doesn’t already exist, https://github.com/spring-projects/spring-data-relational/issues/new[create a new issue]. +* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using and JVM version. +Please include full stack traces when applicable. +* If you need to paste code, or include a stack trace use triple backticks before and after your text. +* If possible try to create a test-case or project that replicates the issue. +Attach a link to your code or a compressed file containing your code. +Use an in-memory database when possible. +If you need a different database include the setup using https://github.com/testcontainers[Testcontainers] in your test. -There is https://github.com/schauder/spring-data-jdbc-boot-starter[preliminary Spring Boot integration]. +== Building from Source -Currently you will need to build it locally. +You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper]. +You also need JDK 17. -== Getting Help +[source,bash] +---- + $ ./mvnw clean install +---- -Right now the best source of information is the source code in this repository. -Especially the integration tests (When you are reading this on github type `t` and then `IntegrationTests.java`) +If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above]. -We are keeping an eye on the (soon to be created) https://stackoverflow.com/questions/tagged/spring-data-jdbc[spring-data-jdbc tag on stackoverflow]. +_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ -If you think you found a bug, or have a feature request please https://jira.spring.io/browse/DATAJDBC/?selectedTab=com.atlassian.jira.jira-projects-plugin:summary-panel[create a ticket in our issue tracker]. +=== Running Integration Tests -== Execute Tests +[source,bash] +---- + $ ./mvnw clean install +---- -=== Fast running tests +Runs integration test against a single in memory database. -Fast running tests can executed with a simple +To run integration tests against all supported databases specify the Maven Profile `all-dbs`. -[source] +[source,bash] ---- -mvn test +./mvnw clean install -Pall-dbs ---- -This will execute unit tests and integration tests using an in-memory database. - -=== Running tests with a real database +This requires an appropriate `container-license-acceptance.txt` to be on the classpath, signaling that you accept the license of the databases used. -To run the integration tests against a specific database you need to have the database running on your local machine and then execute. +If you don't want to accept these licences you may add the Maven Profile `ignore-missing-license`. +This will ignore the tests that require an explicit license acceptance. -[source] +[source,bash] ---- -mvn test -Dspring.profiles.active= +./mvnw clean install -Pall-dbs,ignore-missing-license ---- -This will also execute the unit tests. +If you want to run an integration tests against a different database you can do so by activating an apropriate Spring Profile. +Available are the following Spring Profiles: -Currently the following _databasetypes_ are available: +`db2`, `h2`, `hsql` (default), `mariadb`, `mssql`, `mysql`, `oracle`, `postgres` -* hsql (default, does not require a running database) -* mysql -* postgres +=== Building reference documentation -=== Run tests with all databases +Building the documentation builds also the project without running tests. -[source] +[source,bash] ---- -mvn test -Pall-dbs + $ ./mvnw clean install -Pantora ---- -This will execute the unit tests, and all the integration tests with all the databases we currently support for testing. The databases must be running. +The generated documentation is available from `spring-data-jdbc-distribution/target/antora/site/index.html`. + +== Modules + +There are a number of modules in this project, here is a quick overview: + +* Spring Data Relational: Common infrastructure abstracting general aspects of relational database access. +* link:spring-data-jdbc[Spring Data JDBC]: Repository support for JDBC-based datasources. +* link:spring-data-r2dbc[Spring Data R2DBC]: Repository support for R2DBC-based datasources. -== Contributing to Spring Data JDBC +== Examples -Here are some ways for you to get involved in the community: +* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. -* Get involved with the Spring community by helping out on http://stackoverflow.com/questions/tagged/spring-data-jdbc[stackoverflow] by responding to questions and joining the debate. -* Create https://jira.spring.io/browse/DATAJDBC[JIRA] tickets for bugs and new features and comment and vote on the ones that you are interested in. -* Github is for social coding: if you want to write code, we encourage contributions through pull requests from http://help.github.com/forking/[forks of this repository]. If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing. -* Watch for upcoming articles on Spring by http://spring.io/blog[subscribing] to spring.io. +== License -Before we accept a non-trivial patch or pull request we will need you to https://cla.pivotal.io/sign/spring[sign the Contributor License Agreement]. Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests. +Spring Data Relational is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license]. diff --git a/SECURITY.adoc b/SECURITY.adoc new file mode 100644 index 0000000000..7ec7d0f07a --- /dev/null +++ b/SECURITY.adoc @@ -0,0 +1,9 @@ +# Security Policy + +## Supported Versions + +Please see the https://spring.io/projects/spring-data-jdbc[Spring Data JDBC] project page for supported versions. + +## Reporting a Vulnerability + +Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly. diff --git a/ci/accept-third-party-license.sh b/ci/accept-third-party-license.sh new file mode 100755 index 0000000000..fa45441a6b --- /dev/null +++ b/ci/accept-third-party-license.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +{ + echo "mcr.microsoft.com/mssql/server:2022-latest" + echo "ibmcom/db2:11.5.7.0a" + echo "docker-hub.usw1.packages.broadcom.com/mssql/server:2022-latest" + echo "docker-hub.usw1.packages.broadcom.com/ibmcom/db2:11.5.7.0a" +} > spring-data-jdbc/src/test/resources/container-license-acceptance.txt + +{ + echo "mcr.microsoft.com/mssql/server:2022-latest" + echo "ibmcom/db2:11.5.7.0a" + echo "docker-hub.usw1.packages.broadcom.com/mssql/server:2022-latest" + echo "docker-hub.usw1.packages.broadcom.com/ibmcom/db2:11.5.7.0a" +} > spring-data-r2dbc/src/test/resources/container-license-acceptance.txt diff --git a/ci/clean.sh b/ci/clean.sh new file mode 100755 index 0000000000..178a62bc78 --- /dev/null +++ b/ci/clean.sh @@ -0,0 +1,8 @@ +#!/bin/bash -x + +set -euo pipefail + +export JENKINS_USER=${JENKINS_USER_NAME} + +MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \ + ./mvnw -s settings.xml -Dscan=false clean -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-jdbc -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root diff --git a/ci/pipeline.properties b/ci/pipeline.properties new file mode 100644 index 0000000000..8dd2295acc --- /dev/null +++ b/ci/pipeline.properties @@ -0,0 +1,32 @@ +# Java versions +java.main.tag=17.0.15_6-jdk-focal +java.next.tag=24.0.1_9-jdk-noble + +# Docker container images - standard +docker.java.main.image=library/eclipse-temurin:${java.main.tag} +docker.java.next.image=library/eclipse-temurin:${java.next.tag} + +# Supported versions of MongoDB +docker.mongodb.6.0.version=6.0.23 +docker.mongodb.7.0.version=7.0.20 +docker.mongodb.8.0.version=8.0.9 + +# Supported versions of Redis +docker.redis.6.version=6.2.13 +docker.redis.7.version=7.2.4 +docker.valkey.8.version=8.1.1 + +# Docker environment settings +docker.java.inside.basic=-v $HOME:/tmp/jenkins-home +docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home + +# Credentials +docker.registry= +docker.credentials=hub.docker.com-springbuildmaster +docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com +docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token +artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local +develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/test.sh b/ci/test.sh new file mode 100755 index 0000000000..6cdf8602d6 --- /dev/null +++ b/ci/test.sh @@ -0,0 +1,17 @@ +#!/bin/bash -x + +set -euo pipefail + +ci/accept-third-party-license.sh + +echo "Copying ProxyImageNameSubstitutor into JDBC and R2DBC..." +cp spring-data-relational/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java spring-data-jdbc/src/test/java/org/springframework/data +cp spring-data-relational/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java spring-data-r2dbc/src/test/java/org/springframework/data + +mkdir -p /tmp/jenkins-home + +export JENKINS_USER=${JENKINS_USER_NAME} + +MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \ + ./mvnw -s settings.xml \ + -P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-jdbc -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root diff --git a/mvnw b/mvnw new file mode 100755 index 0000000000..8b9da3b8b6 --- /dev/null +++ b/mvnw @@ -0,0 +1,286 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + # TODO classpath? +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + wget "$jarUrl" -O "$wrapperJarPath" + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + curl -o "$wrapperJarPath" "$jarUrl" + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100755 index 0000000000..b3f995811c --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,161 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven2 Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" +FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + echo Found %WRAPPER_JAR% +) else ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" + echo Finished downloading %WRAPPER_JAR% +) +@REM End of extension + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml index 634c3fe1f5..027b696714 100644 --- a/pom.xml +++ b/pom.xml @@ -1,42 +1,69 @@ - + 4.0.0 - org.springframework.data - spring-data-jdbc - 1.0.0.BUILD-SNAPSHOT + org.springframework.data + spring-data-relational-parent + 3.5.0-SNAPSHOT + pom - Spring Data JDBC - Spring Data module for JDBC repositories. - http://projects.spring.io/spring-data-jdbc + Spring Data Relational Parent + Parent module for Spring Data Relational repositories. + https://projects.spring.io/spring-data-jdbc org.springframework.data.build spring-data-parent - 2.1.0.BUILD-SNAPSHOT + 3.5.0-SNAPSHOT - - DATAJDBC - - 2.1.0.BUILD-SNAPSHOT - spring.data.jdbc + spring-data-jdbc + 3.5.0-SNAPSHOT + 4.21.1 reuseReports - 3.6.2 - 0.1.4 - 2.2.8 - 3.4.4 - 1.3.1 - 5.1.41 - 42.0.0 - 1.6.0 - + + 3.0.2 + 3.0.2 + 3.5.13 + + + + 12.1.0.0 + 2.3.232 + 5.1.0 + 2.7.3 + 3.5.3 + 12.10.0.jre11 + 9.2.0 + 42.7.5 + 23.8.0.25.04 + + + 1.0.7.RELEASE + 1.0.0.RELEASE + 1.1.4 + 1.0.2.RELEASE + 1.4.1 + 1.3.0 + + + 1.3.0 + + 1.37 + 0.4.0.BUILD-SNAPSHOT - 2017-2018 + 2017 + + + spring-data-relational + spring-data-jdbc + spring-data-r2dbc + spring-data-jdbc-distribution + @@ -50,6 +77,17 @@ +1 + + mpaluch + Mark Paluch + mpaluch(at)pivotal.io + Pivotal Software, Inc. + https://pivotal.io + + Project Lead + + +1 + gregturn Greg L. Turnquist @@ -61,251 +99,231 @@ -6 + + ogierke + Oliver Gierke + ogierke(at)pivotal.io + Pivotal Software, Inc. + https://pivotal.io + + Project Contributor + + +1 + + + kurtn718 + Kurt Niemi + kniemi(at)vmware.com + VMware. + https://vmware.com + + Project Contributor + + -5 + + + - release + no-jacoco - org.jfrog.buildinfo - artifactory-maven-plugin - false + org.jacoco + jacoco-maven-plugin + + + jacoco-initialize + none + + + - all-dbs + ignore-missing-license org.apache.maven.plugins maven-surefire-plugin + + + ignore-test + + + + + + + + + jmh + + + com.github.mp911de.microbenchmark-runner + microbenchmark-runner-junit5 + ${mbr.version} + test + + + org.openjdk.jmh + jmh-core + ${jmh.version} + test + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh.version} + test + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.3.0 - mysql-test - test + add-source + generate-sources - test + add-test-source - - **/*IntegrationTests.java - - - **/*HsqlIntegrationTests.java - - - mysql - + + src/jmh/java + + + + + org.apache.maven.plugins + maven-surefire-plugin + + true + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + true + + + + org.codehaus.mojo + exec-maven-plugin + 3.1.0 + - postgres-test - test + run-benchmarks + pre-integration-test - test + exec - - **/*IntegrationTests.java - - - **/*HsqlIntegrationTests.java - - - postgres - + test + java + + -classpath + + org.openjdk.jmh.Main + .* + + + + jitpack.io + https://jitpack.io + + - - - - ${project.groupId} - spring-data-commons - ${springdata.commons} - - - - org.springframework - spring-tx - - - - org.springframework - spring-context - - - - org.springframework - spring-beans - - - - org.springframework - spring-jdbc - - - - org.springframework - spring-core - - - commons-logging - commons-logging - - - - - - org.mybatis - mybatis-spring - ${mybatis-spring.version} - true - - - - org.mybatis - mybatis - ${mybatis.version} - true - - - - org.hsqldb - hsqldb - ${hsqldb.version} - test - - - - org.assertj - assertj-core - ${assertj-core.version} - test - - - - mysql - mysql-connector-java - ${mysql-connector-java.version} - test - - - - org.postgresql - postgresql - ${postgresql.version} - test - - - - de.schauderhaft.degraph - degraph-check - ${degraph-check.version} - test - - - - org.testcontainers - mysql - ${testcontainers.version} - test - - - - org.testcontainers - postgresql - ${testcontainers.version} - test - - - - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.12 - - - - - - org.jacoco - jacoco-maven-plugin - ${jacoco} - - ${jacoco.destfile} - + org.apache.maven.plugins + maven-surefire-plugin - jacoco-initialize - - prepare-agent - + default-test + + + **/*Tests.java + **/*Tests$*.java + + + **/*IntegrationTests.java + **/*IntegrationTests$*.java + + org.apache.maven.plugins - maven-surefire-plugin + maven-failsafe-plugin default-test + integration-test + + integration-test + - **/*Tests.java + **/*IntegrationTests.java + **/*IntegrationTests$*.java + + + + + failsafe-verify + integration-test + + verify + + - - org.apache.maven.plugins - maven-assembly-plugin - - - org.codehaus.mojo - wagon-maven-plugin - - - org.asciidoctor - asciidoctor-maven-plugin - - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-snapshot + https://repo.spring.io/snapshot + + true + + + false + + + + spring-milestone + https://repo.spring.io/milestone - - - spring-plugins-snapshot - https://repo.spring.io/plugins-snapshot - - - diff --git a/run-tests-against-all-dbs.sh b/run-tests-against-all-dbs.sh deleted file mode 100755 index 1fb700b4d1..0000000000 --- a/run-tests-against-all-dbs.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -mvn clean install -Pall-dbs diff --git a/settings.xml b/settings.xml new file mode 100644 index 0000000000..b3227cc110 --- /dev/null +++ b/settings.xml @@ -0,0 +1,29 @@ + + + + + spring-plugins-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-snapshot + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-milestone + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + + \ No newline at end of file diff --git a/spring-data-jdbc-distribution/package.json b/spring-data-jdbc-distribution/package.json new file mode 100644 index 0000000000..4689506b3f --- /dev/null +++ b/spring-data-jdbc-distribution/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "antora": "3.2.0-alpha.6", + "@antora/atlas-extension": "1.0.0-alpha.2", + "@antora/collector-extension": "1.0.0-alpha.7", + "@asciidoctor/tabs": "1.0.0-beta.6", + "@springio/antora-extensions": "1.13.0", + "@springio/asciidoctor-extensions": "1.0.0-alpha.11" + } +} diff --git a/spring-data-jdbc-distribution/pom.xml b/spring-data-jdbc-distribution/pom.xml new file mode 100644 index 0000000000..9c02f50608 --- /dev/null +++ b/spring-data-jdbc-distribution/pom.xml @@ -0,0 +1,59 @@ + + + + 4.0.0 + + spring-data-jdbc-distribution + + pom + + Spring Data JDBC - Distribution + Distribution build for Spring Data JDBC + + + org.springframework.data + spring-data-relational-parent + 3.5.0-SNAPSHOT + ../pom.xml + + + + ${basedir}/.. + ${project.basedir}/../src/main/antora/antora-playbook.yml + + + + + + ${project.basedir}/../src/main/antora/resources/antora-resources + true + + + + + org.apache.maven.plugins + maven-resources-plugin + + + + resources + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + + org.antora + antora-maven-plugin + + + + + diff --git a/spring-data-jdbc/README.adoc b/spring-data-jdbc/README.adoc new file mode 100644 index 0000000000..c793f9ade1 --- /dev/null +++ b/spring-data-jdbc/README.adoc @@ -0,0 +1,83 @@ += Spring Data JDBC + +The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use data access technologies. *Spring Data JDBC* offers the popular Repository abstraction based on JDBC. + +It aims at being conceptually easy. +In order to achieve this it does NOT offer caching, lazy loading, write behind or many other features of JPA. +This makes Spring Data JDBC a simple, limited, opinionated ORM. + +== Features + +* Implementation of CRUD methods for Aggregates. +* `@Query` annotation +* Support for transparent auditing (created, last changed) +* Events for persistence events +* Possibility to integrate custom repository code +* JavaConfig based repository configuration by introducing `EnableJdbcRepository` +* Integration with MyBatis + +== Getting Help + +If you are new to Spring Data JDBC read the following two articles https://spring.io/blog/2018/09/17/introducing-spring-data-jdbc["Introducing Spring Data JDBC"] and https://spring.io/blog/2018/09/24/spring-data-jdbc-references-and-aggregates["Spring Data JDBC, References, and Aggregates"] + +There are also examples in the https://github.com/spring-projects/spring-data-examples/tree/master/jdbc[Spring Data Examples] project. + +A very good source of information is the source code in this repository. +Especially the integration tests (if you are reading this on GitHub, type `t` and then `IntegrationTests.java`) + +We are keeping an eye on the https://stackoverflow.com/questions/tagged/spring-data-jdbc[spring-data-jdbc tag on Stack Overflow]. + +If you think you found a bug, or have a feature request, please https://github.com/spring-projects/spring-data-jdbc/issues[create a ticket in our issue tracker]. + +== Execute Tests + +=== Fast running tests + +Fast running tests can be executed with a simple + +[source] +---- +mvn test +---- + +This will execute unit tests and integration tests using an in-memory database. + +=== Running tests with a real database + +In order to run the integration tests against a specific database, you need to have a local Docker installation available, and then execute: + +[source] +---- +mvn verify -Dspring.profiles.active= +---- + +This will also execute the unit tests. + +Currently, the following _databasetypes_ are available: + +* hsql (default, does not require a running database) +* mysql +* mariadb +* postgres +* mariadb +* mssql +* oracle +* db2 + +Testing with Microsoft SQL Server and IBM DB2 requires you to accept the EULA of the respective Docker image so that the build may download and run it for you. +In order to accept the EULA, please add a file named `container-license-acceptance.txt` to the classpath, i.e. `src/test/resources` containing the name of the docker images. +At the time of this writing this would be + +``` +mcr.microsoft.com/mssql/server:2022-latest +ibmcom/db2:11.5.7.0a +``` + +=== Run tests with all databases + +[source] +---- +mvn test -Pall-dbs +---- + +This will execute the unit tests and all the integration tests with all the databases we currently support for testing. diff --git a/spring-data-jdbc/pom.xml b/spring-data-jdbc/pom.xml new file mode 100644 index 0000000000..87d4f9704a --- /dev/null +++ b/spring-data-jdbc/pom.xml @@ -0,0 +1,497 @@ + + + + 4.0.0 + + spring-data-jdbc + 3.5.0-SNAPSHOT + + Spring Data JDBC + Spring Data module for JDBC repositories. + https://projects.spring.io/spring-data-jdbc + + + org.springframework.data + spring-data-relational-parent + 3.5.0-SNAPSHOT + + + + spring.data.jdbc + ${basedir}/.. + + + + + + org.testcontainers + testcontainers-bom + ${testcontainers} + pom + import + + + + + + + + ${project.groupId} + spring-data-relational + ${project.version} + + + + ${project.groupId} + spring-data-commons + ${springdata.commons} + + + + org.springframework + spring-tx + + + + org.springframework + spring-context + + + + org.springframework + spring-beans + + + + org.springframework + spring-jdbc + + + + org.springframework + spring-core + + + + org.jetbrains.kotlin + kotlin-stdlib + true + + + + org.jetbrains.kotlin + kotlin-reflect + true + + + + org.mybatis + mybatis-spring + ${mybatis-spring.version} + true + + + + org.mybatis + mybatis + ${mybatis.version} + true + + + + org.liquibase + liquibase-core + ${liquibase.version} + true + + + + + + com.h2database + h2 + ${h2.version} + true + + + + org.hsqldb + hsqldb + ${hsqldb.version} + test + + + + com.mysql + mysql-connector-j + ${mysql-connector-java.version} + test + + + + org.postgresql + postgresql + ${postgresql.version} + true + + + + org.mariadb.jdbc + mariadb-java-client + ${mariadb-java-client.version} + test + + + + com.microsoft.sqlserver + mssql-jdbc + ${mssql.version} + true + + + + com.ibm.db2 + jcc + ${db2.version} + test + + + + com.oracle.database.jdbc + ojdbc11 + ${oracle.version} + test + + + + + + org.awaitility + awaitility + ${awaitility} + test + + + + org.assertj + assertj-core + ${assertj} + test + + + net.bytebuddy + byte-buddy + + + + + + org.jmolecules.integrations + jmolecules-spring + ${jmolecules-integration} + test + + + + com.tngtech.archunit + archunit + ${archunit.version} + test + + + + io.mockk + mockk-jvm + ${mockk} + test + + + + + + org.testcontainers + mysql + test + + + org.slf4j + jcl-over-slf4j + + + + + + org.testcontainers + postgresql + test + + + + org.testcontainers + mariadb + test + + + + org.testcontainers + mssqlserver + test + + + + org.testcontainers + db2 + test + + + + org.testcontainers + oracle-free + test + + + + com.zaxxer + HikariCP + ${hikari.version} + test + + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + default-test + integration-test + + integration-test + + + + hsql + + + + + + + + + + + + + + postgres + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + postgres-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + + + postgres + + + + + + + + + + all-dbs + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + h2-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + h2 + + + + + failsafe-verify-h2 + integration-test + + verify + + + + mysql-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + mysql + + + + + failsafe-verify-mysql + integration-test + + verify + + + + postgres-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + postgres + + + + + failsafe-verify-postgres + integration-test + + verify + + + + mariadb-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + mariadb + + + + + failsafe-verify-mariadb + integration-test + + verify + + + + db2-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + db2 + + + + + failsafe-verify-db2 + integration-test + + verify + + + + oracle-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + oracle + + + + + failsafe-verify-oracle + integration-test + + verify + + + + mssql-test + integration-test + + integration-test + + + + **/*IntegrationTests.java + + + + mssql + + + + + failsafe-verify-mssql + integration-test + + verify + + + + + + + + + + diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/aot/JdbcRuntimeHints.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/aot/JdbcRuntimeHints.java new file mode 100644 index 0000000000..3a5eb3a73e --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/aot/JdbcRuntimeHints.java @@ -0,0 +1,66 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.aot; + +import java.util.Arrays; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.jdbc.core.dialect.JdbcPostgresDialect; +import org.springframework.data.jdbc.repository.support.SimpleJdbcRepository; +import org.springframework.data.relational.auditing.RelationalAuditingCallback; +import org.springframework.data.relational.core.mapping.event.AfterConvertCallback; +import org.springframework.data.relational.core.mapping.event.AfterDeleteCallback; +import org.springframework.data.relational.core.mapping.event.AfterSaveCallback; +import org.springframework.data.relational.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.relational.core.mapping.event.BeforeDeleteCallback; +import org.springframework.data.relational.core.mapping.event.BeforeSaveCallback; +import org.springframework.lang.Nullable; + +/** + * {@link RuntimeHintsRegistrar} for JDBC. + * + * @author Christoph Strobl + * @since 3.0 + */ +class JdbcRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + Arrays.asList(TypeReference.of(SimpleJdbcRepository.class), TypeReference.of(AfterConvertCallback.class), + TypeReference.of(AfterDeleteCallback.class), TypeReference.of(AfterSaveCallback.class), + TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeDeleteCallback.class), + TypeReference.of(BeforeSaveCallback.class), TypeReference.of(RelationalAuditingCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + hints.proxies().registerJdkProxy(TypeReference.of("org.springframework.data.jdbc.core.convert.RelationResolver"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.aop.framework.Advised"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + + hints.reflection().registerType(TypeReference.of("org.postgresql.jdbc.TypeInfoCache"), + MemberCategory.PUBLIC_CLASSES); + + for (Class simpleType : JdbcPostgresDialect.INSTANCE.simpleTypes()) { + hints.reflection().registerType(TypeReference.of(simpleType), MemberCategory.PUBLIC_CLASSES); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/AggregateChangeExecutor.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/AggregateChangeExecutor.java new file mode 100644 index 0000000000..1de697ad09 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/AggregateChangeExecutor.java @@ -0,0 +1,121 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.relational.core.conversion.AggregateChange; +import org.springframework.data.relational.core.conversion.DbAction; +import org.springframework.data.relational.core.conversion.DbActionExecutionException; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; + +import java.util.List; + +/** + * Executes an {@link MutableAggregateChange}. + * + * @author Jens Schauder + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @since 2.0 + */ +class AggregateChangeExecutor { + + private final JdbcConverter converter; + private final DataAccessStrategy accessStrategy; + + AggregateChangeExecutor(JdbcConverter converter, DataAccessStrategy accessStrategy) { + + this.converter = converter; + this.accessStrategy = accessStrategy; + } + + /** + * Execute a save aggregate change. It returns the resulting root entities, with all changes that might apply. This + * might be the original instances or new instances, depending on their mutability. + * + * @param aggregateChange the aggregate change to be executed. Must not be {@literal null}. + * @param the type of the aggregate root. + * @return the aggregate roots resulting from the change, if there are any. May be empty. + * @since 3.0 + */ + List executeSave(AggregateChange aggregateChange) { + + JdbcAggregateChangeExecutionContext executionContext = new JdbcAggregateChangeExecutionContext(converter, + accessStrategy); + + aggregateChange.forEachAction(action -> execute(action, executionContext)); + + return executionContext.populateIdsIfNecessary(); + } + + /** + * Execute a delete aggregate change. + * + * @param aggregateChange the aggregate change to be executed. Must not be {@literal null}. + * @param the type of the aggregate root. + * @since 3.0 + */ + void executeDelete(AggregateChange aggregateChange) { + + JdbcAggregateChangeExecutionContext executionContext = new JdbcAggregateChangeExecutionContext(converter, + accessStrategy); + + aggregateChange.forEachAction(action -> execute(action, executionContext)); + } + + private void execute(DbAction action, JdbcAggregateChangeExecutionContext executionContext) { + + try { + if (action instanceof DbAction.InsertRoot insertRoot) { + executionContext.executeInsertRoot(insertRoot); + } else if (action instanceof DbAction.BatchInsertRoot batchInsertRoot) { + executionContext.executeBatchInsertRoot(batchInsertRoot); + } else if (action instanceof DbAction.Insert insert) { + executionContext.executeInsert(insert); + } else if (action instanceof DbAction.BatchInsert batchInsert) { + executionContext.executeBatchInsert(batchInsert); + } else if (action instanceof DbAction.UpdateRoot updateRoot) { + executionContext.executeUpdateRoot(updateRoot); + } else if (action instanceof DbAction.Delete delete) { + executionContext.executeDelete(delete); + } else if (action instanceof DbAction.BatchDelete batchDelete) { + executionContext.executeBatchDelete(batchDelete); + } else if (action instanceof DbAction.DeleteAll deleteAll) { + executionContext.executeDeleteAll(deleteAll); + } else if (action instanceof DbAction.DeleteRoot deleteRoot) { + executionContext.executeDeleteRoot(deleteRoot); + } else if (action instanceof DbAction.BatchDeleteRoot batchDeleteRoot) { + executionContext.executeBatchDeleteRoot(batchDeleteRoot); + } else if (action instanceof DbAction.DeleteAllRoot deleteAllRoot) { + executionContext.executeDeleteAllRoot(deleteAllRoot); + } else if (action instanceof DbAction.AcquireLockRoot acquireLockRoot) { + executionContext.executeAcquireLock(acquireLockRoot); + } else if (action instanceof DbAction.AcquireLockAllRoot acquireLockAllRoot) { + executionContext.executeAcquireLockAllRoot(acquireLockAllRoot); + } else { + throw new RuntimeException("unexpected action"); + } + } catch (Exception e) { + + if (e instanceof OptimisticLockingFailureException) { + throw e; + } + throw new DbActionExecutionException(action, e); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutionContext.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutionContext.java new file mode 100644 index 0000000000..2ec070ab76 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutionContext.java @@ -0,0 +1,555 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import java.util.*; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; + +import org.springframework.dao.IncorrectUpdateSemanticsDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.data.jdbc.core.convert.InsertSubject; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.JdbcIdentifierBuilder; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPathAccessor; +import org.springframework.data.relational.core.conversion.DbAction; +import org.springframework.data.relational.core.conversion.DbActionExecutionResult; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.util.Pair; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * A container for the data required and produced by an aggregate change execution. Most importantly it holds the + * results of the various actions performed. + * + * @author Jens Schauder + * @author Umut Erturk + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @author Mark Paluch + */ +@SuppressWarnings("rawtypes") +class JdbcAggregateChangeExecutionContext { + + private static final String UPDATE_FAILED = "Failed to update entity [%s]; Id [%s] not found in database"; + private static final String UPDATE_FAILED_OPTIMISTIC_LOCKING = "Failed to update entity [%s]; The entity was updated since it was rea or it isn't in the database at all"; + + private final RelationalMappingContext context; + private final JdbcConverter converter; + private final DataAccessStrategy accessStrategy; + + private final Map, DbActionExecutionResult> results = new LinkedHashMap<>(); + + JdbcAggregateChangeExecutionContext(JdbcConverter converter, DataAccessStrategy accessStrategy) { + + this.converter = converter; + this.context = converter.getMappingContext(); + this.accessStrategy = accessStrategy; + } + + void executeInsertRoot(DbAction.InsertRoot insert) { + + Object id = accessStrategy.insert(insert.getEntity(), insert.getEntityType(), Identifier.empty(), + insert.getIdValueSource()); + add(new DbActionExecutionResult(insert, id)); + } + + void executeBatchInsertRoot(DbAction.BatchInsertRoot batchInsertRoot) { + + List> inserts = batchInsertRoot.getActions(); + List> insertSubjects = inserts.stream() + .map(insert -> InsertSubject.describedBy(insert.getEntity(), Identifier.empty())).collect(Collectors.toList()); + + Object[] ids = accessStrategy.insert(insertSubjects, batchInsertRoot.getEntityType(), + batchInsertRoot.getBatchValue()); + + for (int i = 0; i < inserts.size(); i++) { + add(new DbActionExecutionResult(inserts.get(i), ids.length > 0 ? ids[i] : null)); + } + } + + void executeInsert(DbAction.Insert insert) { + + Identifier parentKeys = getParentKeys(insert, converter); + Object id = accessStrategy.insert(insert.getEntity(), insert.getEntityType(), parentKeys, + insert.getIdValueSource()); + add(new DbActionExecutionResult(insert, id)); + } + + void executeBatchInsert(DbAction.BatchInsert batchInsert) { + + List> inserts = batchInsert.getActions(); + List> insertSubjects = inserts.stream() + .map(insert -> InsertSubject.describedBy(insert.getEntity(), getParentKeys(insert, converter))) + .collect(Collectors.toList()); + + Object[] ids = accessStrategy.insert(insertSubjects, batchInsert.getEntityType(), batchInsert.getBatchValue()); + + for (int i = 0; i < inserts.size(); i++) { + add(new DbActionExecutionResult(inserts.get(i), ids.length > 0 ? ids[i] : null)); + } + } + + void executeUpdateRoot(DbAction.UpdateRoot update) { + + if (update.getPreviousVersion() != null) { + updateWithVersion(update); + } else { + updateWithoutVersion(update); + } + add(new DbActionExecutionResult(update)); + } + + void executeDeleteRoot(DbAction.DeleteRoot delete) { + + if (delete.getPreviousVersion() != null) { + accessStrategy.deleteWithVersion(delete.getId(), delete.getEntityType(), delete.getPreviousVersion()); + } else { + accessStrategy.delete(delete.getId(), delete.getEntityType()); + } + } + + void executeBatchDeleteRoot(DbAction.BatchDeleteRoot batchDelete) { + + List rootIds = batchDelete.getActions().stream().map(DbAction.DeleteRoot::getId).toList(); + accessStrategy.delete(rootIds, batchDelete.getEntityType()); + } + + void executeDelete(DbAction.Delete delete) { + + accessStrategy.delete(delete.getRootId(), delete.getPropertyPath()); + } + + void executeBatchDelete(DbAction.BatchDelete batchDelete) { + + List rootIds = batchDelete.getActions().stream().map(DbAction.Delete::getRootId).toList(); + accessStrategy.delete(rootIds, batchDelete.getBatchValue()); + } + + void executeDeleteAllRoot(DbAction.DeleteAllRoot deleteAllRoot) { + + accessStrategy.deleteAll(deleteAllRoot.getEntityType()); + } + + void executeDeleteAll(DbAction.DeleteAll delete) { + + accessStrategy.deleteAll(delete.getPropertyPath()); + } + + void executeAcquireLock(DbAction.AcquireLockRoot acquireLock) { + accessStrategy.acquireLockById(acquireLock.getId(), LockMode.PESSIMISTIC_WRITE, acquireLock.getEntityType()); + } + + void executeAcquireLockAllRoot(DbAction.AcquireLockAllRoot acquireLock) { + accessStrategy.acquireLockAll(LockMode.PESSIMISTIC_WRITE, acquireLock.getEntityType()); + } + + private void add(DbActionExecutionResult result) { + results.put(result.getAction(), result); + } + + private Identifier getParentKeys(DbAction.WithDependingOn action, JdbcConverter converter) { + + Object id = getParentId(action); + + JdbcIdentifierBuilder identifier = JdbcIdentifierBuilder // + .forBackReferences(converter, context.getAggregatePath(action.getPropertyPath()), id); + + for (Map.Entry, Object> qualifier : action.getQualifiers() + .entrySet()) { + identifier = identifier.withQualifier(context.getAggregatePath(qualifier.getKey()), qualifier.getValue()); + } + + return identifier.build(); + } + + private Object getParentId(DbAction.WithDependingOn action) { + + DbAction.WithEntity idOwningAction = getIdOwningAction(action, + context.getAggregatePath(action.getPropertyPath()).getIdDefiningParentPath()); + + return getPotentialGeneratedIdFrom(idOwningAction); + } + + private DbAction.WithEntity getIdOwningAction(DbAction.WithEntity action, AggregatePath idPath) { + + if (!(action instanceof DbAction.WithDependingOn withDependingOn)) { + + Assert.state(idPath.isRoot(), + "When the id path is not empty the id providing action should be of type WithDependingOn"); + + return action; + } + + if (idPath.equals(context.getAggregatePath(withDependingOn.getPropertyPath()))) { + return action; + } + + return getIdOwningAction(withDependingOn.getDependingOn(), idPath); + } + + private Object getPotentialGeneratedIdFrom(DbAction.WithEntity idOwningAction) { + + if (IdValueSource.GENERATED.equals(idOwningAction.getIdValueSource())) { + + DbActionExecutionResult dbActionExecutionResult = results.get(idOwningAction); + Object generatedId = Optional.ofNullable(dbActionExecutionResult) // + .map(DbActionExecutionResult::getGeneratedId) // + .orElse(null); + + if (generatedId != null) { + return generatedId; + } + } + + return getIdFrom(idOwningAction); + } + + private Object getIdFrom(DbAction.WithEntity idOwningAction) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(idOwningAction.getEntityType()); + Object identifier = persistentEntity.getIdentifierAccessor(idOwningAction.getEntity()).getIdentifier(); + + Assert.state(identifier != null, () -> "Couldn't obtain a required id value for " + persistentEntity); + + return identifier; + } + + List populateIdsIfNecessary() { + + // have the results so that the inserts on the leaves come first. + List reverseResults = new ArrayList<>(results.values()); + Collections.reverse(reverseResults); + + StagedValues cascadingValues = new StagedValues(); + + List roots = new ArrayList<>(reverseResults.size()); + + for (DbActionExecutionResult result : reverseResults) { + + DbAction.WithEntity action = result.getAction(); + + Object newEntity = setIdAndCascadingProperties(action, result.getGeneratedId(), cascadingValues); + + if (action instanceof DbAction.InsertRoot || action instanceof DbAction.UpdateRoot) { + // noinspection unchecked + roots.add((T) newEntity); + } + + // the id property was immutable, so we have to propagate changes up the tree + if (action instanceof DbAction.Insert insert) { + + Pair qualifier = insert.getQualifier(); + Object qualifierValue = qualifier == null ? null : qualifier.getSecond(); + + if (newEntity != action.getEntity()) { + + cascadingValues.stage(insert.getDependingOn(), insert.getPropertyPath(), + qualifierValue, newEntity); + } else if (insert.getPropertyPath().getLeafProperty().isCollectionLike()) { + + cascadingValues.gather(insert.getDependingOn(), insert.getPropertyPath(), + qualifierValue, newEntity); + } + } + } + + if (roots.isEmpty()) { + throw new IllegalStateException( + String.format("Cannot retrieve the resulting instance(s) unless a %s or %s action was successfully executed", + DbAction.InsertRoot.class.getName(), DbAction.UpdateRoot.class.getName())); + } + + Collections.reverse(roots); + + return roots; + } + + @SuppressWarnings("unchecked") + private Object setIdAndCascadingProperties(DbAction.WithEntity action, @Nullable Object generatedId, + StagedValues cascadingValues) { + + S originalEntity = action.getEntity(); + + RelationalPersistentEntity persistentEntity = (RelationalPersistentEntity) context + .getRequiredPersistentEntity(action.getEntityType()); + PersistentPropertyPathAccessor propertyAccessor = converter.getPropertyAccessor(persistentEntity, + originalEntity); + + if (IdValueSource.GENERATED.equals(action.getIdValueSource())) { + propertyAccessor.setProperty(persistentEntity.getRequiredIdProperty(), generatedId); + } + + // set values of changed immutables referenced by this entity + cascadingValues.forEachPath(action, (persistentPropertyPath, o) -> propertyAccessor + .setProperty(getRelativePath(action, persistentPropertyPath), o)); + + return propertyAccessor.getBean(); + } + + @SuppressWarnings("unchecked") + private PersistentPropertyPath getRelativePath(DbAction action, PersistentPropertyPath pathToValue) { + + if (action instanceof DbAction.Insert insert) { + return pathToValue.getExtensionForBaseOf(insert.getPropertyPath()); + } + + if (action instanceof DbAction.InsertRoot) { + return pathToValue; + } + + if (action instanceof DbAction.UpdateRoot) { + return pathToValue; + } + + throw new IllegalArgumentException(String.format("DbAction of type %s is not supported", action.getClass())); + } + + @SuppressWarnings("unchecked") + private RelationalPersistentEntity getRequiredPersistentEntity(Class type) { + return (RelationalPersistentEntity) context.getRequiredPersistentEntity(type); + } + + private void updateWithoutVersion(DbAction.UpdateRoot update) { + + if (!accessStrategy.update(update.getEntity(), update.getEntityType())) { + + throw new IncorrectUpdateSemanticsDataAccessException( + String.format(UPDATE_FAILED, update.getEntity(), getIdFrom(update))); + } + } + + private void updateWithVersion(DbAction.UpdateRoot update) { + + Number previousVersion = update.getPreviousVersion(); + Assert.notNull(previousVersion, "The root aggregate cannot be updated because the version property is null"); + + if (!accessStrategy.updateWithVersion(update.getEntity(), update.getEntityType(), previousVersion)) { + + throw new OptimisticLockingFailureException(String.format(UPDATE_FAILED_OPTIMISTIC_LOCKING, update.getEntity())); + } + } + + /** + * Accumulates information about staged immutable objects in an aggregate that require updating because their state + * changed because of {@link DbAction} execution. + */ + private static class StagedValues { + + static final List> aggregators = Arrays.asList(SetAggregator.INSTANCE, MapAggregator.INSTANCE, + ListAggregator.INSTANCE, SingleElementAggregator.INSTANCE); + + Map> values = new HashMap<>(); + + /** + * Adds a value that needs to be set in an entity higher up in the tree of entities in the aggregate. If the + * attribute to be set is multivalued this method expects only a single element. + * + * @param action The action responsible for persisting the entity that needs the added value set. Must not be + * {@literal null}. + * @param path The path to the property in which to set the value. Must not be {@literal null}. + * @param qualifier If {@code path} is a qualified multivalued properties this parameter contains the qualifier. May + * be {@literal null}. + * @param value The value to be set. Must not be {@literal null}. + */ + void stage(DbAction action, PersistentPropertyPath path, @Nullable Object qualifier, Object value) { + + StagedValue gather = gather(action, path, qualifier, value); + gather.isStaged = true; + } + + @SuppressWarnings("unchecked") + StagedValue gather(DbAction action, PersistentPropertyPath path, @Nullable Object qualifier, Object value) { + + MultiValueAggregator aggregator = getAggregatorFor(path); + + Map valuesForPath = this.values.computeIfAbsent(action, + dbAction -> new HashMap<>()); + + StagedValue stagedValue = valuesForPath.computeIfAbsent(path, + persistentPropertyPath -> new StagedValue(aggregator.createEmptyInstance())); + T currentValue = (T) stagedValue.value; + + stagedValue.value = aggregator.add(currentValue, qualifier, value); + + valuesForPath.put(path, stagedValue); + + return stagedValue; + } + + @SuppressWarnings("unchecked") + private MultiValueAggregator getAggregatorFor(PersistentPropertyPath path) { + + PersistentProperty property = path.getLeafProperty(); + for (MultiValueAggregator aggregator : aggregators) { + if (aggregator.handles(property)) { + return (MultiValueAggregator) aggregator; + } + } + + throw new IllegalStateException(String.format("Can't handle path %s", path)); + } + + /** + * Performs the given action for each entry in this the staging area that are provided by {@link DbAction} until all + * {@link PersistentPropertyPath} have been processed or the action throws an exception. The {@link BiConsumer + * action} is called with each applicable {@link PersistentPropertyPath} and {@code value} that is assignable to the + * property. + */ + void forEachPath(DbAction dbAction, BiConsumer action) { + values.getOrDefault(dbAction, Collections.emptyMap()).forEach((persistentPropertyPath, stagedValue) -> { + if (stagedValue.isStaged) { + action.accept(persistentPropertyPath, stagedValue.value); + } + }); + } + + } + + private static class StagedValue { + @Nullable Object value; + boolean isStaged; + + public StagedValue(@Nullable Object value) { + this.value = value; + } + } + + interface MultiValueAggregator { + + default Class handledType() { + return Object.class; + } + + default boolean handles(PersistentProperty property) { + return handledType().isAssignableFrom(property.getType()); + } + + @Nullable + T createEmptyInstance(); + + T add(@Nullable T aggregate, @Nullable Object qualifier, Object value); + + } + + private enum SetAggregator implements MultiValueAggregator { + + INSTANCE; + + @Override + public Class handledType() { + return Set.class; + } + + @Override + public Set createEmptyInstance() { + return new HashSet(); + } + + @SuppressWarnings("unchecked") + @Override + public Set add(@Nullable Set set, @Nullable Object qualifier, Object value) { + + Assert.notNull(set, "Set must not be null"); + + set.add(value); + return set; + } + } + + private enum ListAggregator implements MultiValueAggregator { + + INSTANCE; + + @Override + public boolean handles(PersistentProperty property) { + return property.isCollectionLike(); + } + + @Override + public List createEmptyInstance() { + return new ArrayList(); + } + + @SuppressWarnings("unchecked") + @Override + public List add(@Nullable List list, @Nullable Object qualifier, Object value) { + + Assert.notNull(list, "List must not be null"); + Assert.notNull(qualifier, "ListAggregator can't handle a null qualifier"); + + int index = (int) qualifier; + if (index >= list.size()) { + list.add(value); + } else { + list.add(index, value); + } + + return list; + } + } + + private enum MapAggregator implements MultiValueAggregator { + + INSTANCE; + + @Override + public Class handledType() { + return Map.class; + } + + @Override + public Map createEmptyInstance() { + return new HashMap(); + } + + @SuppressWarnings("unchecked") + @Override + public Map add(@Nullable Map map, @Nullable Object qualifier, Object value) { + + Assert.notNull(map, "Map must not be null"); + + map.put(qualifier, value); + return map; + } + } + + private enum SingleElementAggregator implements MultiValueAggregator { + + INSTANCE; + + @Override + @Nullable + public Object createEmptyInstance() { + return null; + } + + @Override + public Object add(@Nullable Object __null, @Nullable Object qualifier, Object value) { + return value; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateOperations.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateOperations.java new file mode 100644 index 0000000000..ef6844ad23 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateOperations.java @@ -0,0 +1,327 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.dao.IncorrectUpdateSemanticsDataAccessException; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.relational.core.query.Query; +import org.springframework.lang.Nullable; + +/** + * Specifies operations one can perform on a database, based on an Domain Type. + * + * @author Jens Schauder + * @author Thomas Lang + * @author Milan Milanov + * @author Chirag Tailor + * @author Diego Krupitza + * @author Myeonghyeon Lee + * @author Sergey Korotaev + */ +public interface JdbcAggregateOperations { + + /** + * Saves an instance of an aggregate, including all the members of the aggregate. + * + * @param instance the aggregate root of the aggregate to be saved. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the saved instance. + * @throws IncorrectUpdateSemanticsDataAccessException when the instance is determined to be not new and the resulting + * update does not update any rows. + */ + T save(T instance); + + /** + * Saves all aggregate instances, including all the members of each aggregate instance. + * + * @param instances the aggregate roots to be saved. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the saved instances. + * @throws IncorrectUpdateSemanticsDataAccessException when at least one instance is determined to be not new and the + * resulting update does not update any rows. + * @since 3.0 + */ + List saveAll(Iterable instances); + + /** + * Dedicated insert function. This skips the test if the aggregate root is new and makes an insert. + *

+ * This is useful if the client provides an id for new aggregate roots. + *

+ * + * @param instance the aggregate root of the aggregate to be inserted. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the saved instance. + */ + T insert(T instance); + + /** + * Inserts all aggregate instances, including all the members of each aggregate instance. + *

+ * This is useful if the client provides an id for new aggregate roots. + *

+ * + * @param instances the aggregate roots to be inserted. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the saved instances. + * @since 3.1 + */ + Iterable insertAll(Iterable instances); + + /** + * Dedicated update function. This skips the test if the aggregate root is new or not and always performs an update + * operation. + * + * @param instance the aggregate root of the aggregate to be inserted. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the saved instance. + */ + T update(T instance); + + /** + * Updates all aggregate instances, including all the members of each aggregate instance. + * + * @param instances the aggregate roots to be inserted. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the saved instances. + * @since 3.1 + */ + List updateAll(Iterable instances); + + /** + * Counts the number of aggregates of a given type. + * + * @param domainType the type of the aggregates to be counted. + * @return the number of instances stored in the database. Guaranteed to be not {@code null}. + */ + long count(Class domainType); + + /** + * Counts the number of aggregates of a given type that match the given query. + * + * @param query must not be {@literal null}. + * @param domainType the entity type must not be {@literal null}. + * @return the number of instances stored in the database. Guaranteed to be not {@code null}. + * @since 3.0 + */ + long count(Query query, Class domainType); + + /** + * Determine whether there are aggregates that match the {@link Query} + * + * @param query must not be {@literal null}. + * @param domainType the entity type must not be {@literal null}. + * @return {@literal true} if the object exists. + * @since 3.0 + */ + boolean exists(Query query, Class domainType); + + /** + * Checks if an aggregate identified by type and id exists in the database. + * + * @param id the id of the aggregate root. + * @param domainType the type of the aggregate root. + * @param the type of the aggregate root. + * @return whether the aggregate exists. + */ + boolean existsById(Object id, Class domainType); + + /** + * Load an aggregate from the database. + * + * @param id the id of the aggregate to load. Must not be {@code null}. + * @param domainType the type of the aggregate root. Must not be {@code null}. + * @param the type of the aggregate root. + * @return the loaded aggregate. Might return {@code null}. + */ + @Nullable + T findById(Object id, Class domainType); + + /** + * Load all aggregates of a given type that are identified by the given ids. + * + * @param ids of the aggregate roots identifying the aggregates to load. Must not be {@code null}. + * @param domainType the type of the aggregate roots. Must not be {@code null}. + * @param the type of the aggregate roots. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + */ + List findAllById(Iterable ids, Class domainType); + + /** + * Loads all entities that match one of the ids passed as an argument to a {@link Stream}. + * It is not guaranteed that the number of ids passed in matches the number of entities returned. + * + * @param ids the Ids of the entities to load. Must not be {@code null}. + * @param domainType the type of entities to load. Must not be {@code null}. + * @param type of entities to load. + * @return the loaded entities. Guaranteed to be not {@code null}. + */ + Stream streamAllByIds(Iterable ids, Class domainType); + + /** + * Load all aggregates of a given type. + * + * @param domainType the type of the aggregate roots. Must not be {@code null}. + * @param the type of the aggregate roots. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + */ + List findAll(Class domainType); + + /** + * Load all aggregates of a given type to a {@link Stream}. + * + * @param domainType the type of the aggregate roots. Must not be {@code null}. + * @param the type of the aggregate roots. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + */ + Stream streamAll(Class domainType); + + /** + * Load all aggregates of a given type, sorted. + * + * @param domainType the type of the aggregate roots. Must not be {@code null}. + * @param the type of the aggregate roots. Must not be {@code null}. + * @param sort the sorting information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + List findAll(Class domainType, Sort sort); + + /** + * Loads all entities of the given type to a {@link Stream}, sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param sort the sorting information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + Stream streamAll(Class domainType, Sort sort); + + /** + * Load a page of (potentially sorted) aggregates of a given type. + * + * @param domainType the type of the aggregate roots. Must not be {@code null}. + * @param the type of the aggregate roots. Must not be {@code null}. + * @param pageable the pagination information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + Page findAll(Class domainType, Pageable pageable); + + /** + * Execute a {@code SELECT} query and convert the resulting item to an entity ensuring exactly one result. + * + * @param query must not be {@literal null}. + * @param domainType the entity type must not be {@literal null}. + * @return exactly one result or {@link Optional#empty()} if no match found. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + Optional findOne(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link List} that is sorted. + * + * @param query must not be {@literal null}. + * @param domainType the entity type must not be {@literal null}. + * @return a non-null sorted list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + List findAll(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Stream}. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + Stream streamAll(Query query, Class domainType); + + /** + * Returns a {@link Page} of entities matching the given {@link Query}. In case no match could be found, an empty + * {@link Page} is returned. + * + * @param query must not be {@literal null}. + * @param domainType the entity type must not be {@literal null}. + * @param pageable can be null. + * @return a {@link Page} of entities matching the given {@link Example}. + * @since 3.0 + */ + Page findAll(Query query, Class domainType, Pageable pageable); + + /** + * Deletes a single Aggregate including all entities contained in that aggregate. + *

+ * Since no version attribute is provided this method will never throw a + * {@link org.springframework.dao.OptimisticLockingFailureException}. If no rows match the generated delete operation + * this fact will be silently ignored. + *

+ * + * @param id the id of the aggregate root of the aggregate to be deleted. Must not be {@code null}. + * @param domainType the type of the aggregate root. + * @param the type of the aggregate root. + */ + void deleteById(Object id, Class domainType); + + /** + * Deletes all aggregates identified by their aggregate root ids. + *

+ * Since no version attribute is provided this method will never throw a + * {@link org.springframework.dao.OptimisticLockingFailureException}. If no rows match the generated delete operation + * this fact will be silently ignored. + *

+ * + * @param ids the ids of the aggregate roots of the aggregates to be deleted. Must not be {@code null}. + * @param domainType the type of the aggregate root. + * @param the type of the aggregate root. + */ + void deleteAllById(Iterable ids, Class domainType); + + /** + * Delete an aggregate identified by its aggregate root. + * + * @param aggregateRoot to delete. Must not be {@code null}. + * @param the type of the aggregate root. + */ + void delete(T aggregateRoot); + + /** + * Delete all aggregates of a given type. + * + * @param domainType type of the aggregate roots to be deleted. Must not be {@code null}. + */ + void deleteAll(Class domainType); + + /** + * Delete all aggregates identified by their aggregate roots. + * + * @param aggregateRoots to delete. Must not be {@code null}. + * @param the type of the aggregate roots. + */ + void deleteAll(Iterable aggregateRoots); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateTemplate.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateTemplate.java new file mode 100644 index 0000000000..928a18fcd6 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/JdbcAggregateTemplate.java @@ -0,0 +1,686 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.mapping.IdentifierAccessor; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.relational.core.EntityLifecycleEventDelegate; +import org.springframework.data.relational.core.conversion.AggregateChange; +import org.springframework.data.relational.core.conversion.BatchingAggregateChange; +import org.springframework.data.relational.core.conversion.DeleteAggregateChange; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.conversion.RelationalEntityDeleteWriter; +import org.springframework.data.relational.core.conversion.RelationalEntityInsertWriter; +import org.springframework.data.relational.core.conversion.RelationalEntityUpdateWriter; +import org.springframework.data.relational.core.conversion.RelationalEntityVersionUtils; +import org.springframework.data.relational.core.conversion.RootAggregateChange; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.mapping.event.*; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.support.PageableExecutionUtils; +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * {@link JdbcAggregateOperations} implementation, storing aggregates in and obtaining them from a JDBC data store. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Thomas Lang + * @author Christoph Strobl + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @author Diego Krupitza + * @author Sergey Korotaev + */ +public class JdbcAggregateTemplate implements JdbcAggregateOperations { + + private final EntityLifecycleEventDelegate eventDelegate = new EntityLifecycleEventDelegate(); + private final RelationalMappingContext context; + + private final RelationalEntityDeleteWriter jdbcEntityDeleteWriter; + + private final DataAccessStrategy accessStrategy; + private final AggregateChangeExecutor executor; + private final JdbcConverter converter; + + private EntityCallbacks entityCallbacks = EntityCallbacks.create(); + + /** + * Creates a new {@link JdbcAggregateTemplate} given {@link ApplicationContext}, {@link RelationalMappingContext} and + * {@link DataAccessStrategy}. + * + * @param publisher must not be {@literal null}. + * @param context must not be {@literal null}. + * @param dataAccessStrategy must not be {@literal null}. + * @since 1.1 + */ + public JdbcAggregateTemplate(ApplicationContext publisher, RelationalMappingContext context, JdbcConverter converter, + DataAccessStrategy dataAccessStrategy) { + + Assert.notNull(publisher, "ApplicationContext must not be null"); + Assert.notNull(context, "RelationalMappingContext must not be null"); + Assert.notNull(converter, "RelationalConverter must not be null"); + Assert.notNull(dataAccessStrategy, "DataAccessStrategy must not be null"); + + this.eventDelegate.setPublisher(publisher); + this.context = context; + this.accessStrategy = dataAccessStrategy; + this.converter = converter; + + this.jdbcEntityDeleteWriter = new RelationalEntityDeleteWriter(context); + + this.executor = new AggregateChangeExecutor(converter, accessStrategy); + + setEntityCallbacks(EntityCallbacks.create(publisher)); + } + + /** + * Creates a new {@link JdbcAggregateTemplate} given {@link ApplicationEventPublisher}, + * {@link RelationalMappingContext} and {@link DataAccessStrategy}. + * + * @param publisher must not be {@literal null}. + * @param context must not be {@literal null}. + * @param dataAccessStrategy must not be {@literal null}. + */ + public JdbcAggregateTemplate(ApplicationEventPublisher publisher, RelationalMappingContext context, + JdbcConverter converter, DataAccessStrategy dataAccessStrategy) { + + Assert.notNull(publisher, "ApplicationEventPublisher must not be null"); + Assert.notNull(context, "RelationalMappingContext must not be null"); + Assert.notNull(converter, "RelationalConverter must not be null"); + Assert.notNull(dataAccessStrategy, "DataAccessStrategy must not be null"); + + this.eventDelegate.setPublisher(publisher); + this.context = context; + this.accessStrategy = dataAccessStrategy; + this.converter = converter; + + this.jdbcEntityDeleteWriter = new RelationalEntityDeleteWriter(context); + this.executor = new AggregateChangeExecutor(converter, accessStrategy); + } + + /** + * Sets the callbacks to be invoked on life cycle events. + * + * @param entityCallbacks must not be {@literal null}. + * @since 1.1 + */ + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "Callbacks must not be null"); + + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether lifecycle events such as {@link AfterSaveEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 3.0 + * @see AbstractRelationalEvent + */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override + public T save(T instance) { + + Assert.notNull(instance, "Aggregate instance must not be null"); + + verifyIdProperty(instance); + + return performSave(new EntityAndChangeCreator<>(instance, changeCreatorSelectorForSave(instance))); + } + + @Override + public List saveAll(Iterable instances) { + return doInBatch(instances, (first) -> (second -> changeCreatorSelectorForSave(first).apply(second))); + } + + /** + * Dedicated insert function to do just the insert of an instance of an aggregate, including all the members of the + * aggregate. + * + * @param instance the aggregate root of the aggregate to be inserted. Must not be {@code null}. + * @return the saved instance. + */ + @Override + public T insert(T instance) { + + Assert.notNull(instance, "Aggregate instance must not be null"); + + return performSave( + new EntityAndChangeCreator<>(instance, entity -> createInsertChange(prepareVersionForInsert(entity)))); + } + + @Override + public List insertAll(Iterable instances) { + return doInBatch(instances, (__) -> (entity -> createInsertChange(prepareVersionForInsert(entity)))); + } + + /** + * Dedicated update function to do just an update of an instance of an aggregate, including all the members of the + * aggregate. + * + * @param instance the aggregate root of the aggregate to be inserted. Must not be {@code null}. + * @return the saved instance. + */ + @Override + public T update(T instance) { + + Assert.notNull(instance, "Aggregate instance must not be null"); + + return performSave( + new EntityAndChangeCreator<>(instance, entity -> createUpdateChange(prepareVersionForUpdate(entity)))); + } + + @Override + public List updateAll(Iterable instances) { + return doInBatch(instances, (__) -> (entity -> createUpdateChange(prepareVersionForUpdate(entity)))); + } + + private List doInBatch(Iterable instances,Function>> changeCreatorFunction) { + + Assert.notNull(instances, "Aggregate instances must not be null"); + + if (!instances.iterator().hasNext()) { + return Collections.emptyList(); + } + + List> entityAndChangeCreators = new ArrayList<>(); + for (T instance : instances) { + verifyIdProperty(instance); + entityAndChangeCreators.add(new EntityAndChangeCreator(instance, changeCreatorFunction.apply(instance))); + } + return performSaveAll(entityAndChangeCreators); + } + + @Override + public long count(Class domainType) { + + Assert.notNull(domainType, "Domain type must not be null"); + + return accessStrategy.count(domainType); + } + + @Override + public long count(Query query, Class domainType) { + return accessStrategy.count(query, domainType); + } + + @Override + public boolean exists(Query query, Class domainType) { + return accessStrategy.exists(query, domainType); + } + + @Override + public boolean existsById(Object id, Class domainType) { + + Assert.notNull(id, "Id must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + + return accessStrategy.existsById(id, domainType); + } + + @Override + public T findById(Object id, Class domainType) { + + Assert.notNull(id, "Id must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + + T entity = accessStrategy.findById(id, domainType); + if (entity == null) { + return null; + } + return triggerAfterConvert(entity); + } + + @Override + public List findAll(Class domainType, Sort sort) { + + Assert.notNull(domainType, "Domain type must not be null"); + + Iterable all = accessStrategy.findAll(domainType, sort); + return triggerAfterConvert(all); + } + + @Override + public Stream streamAll(Class domainType, Sort sort) { + + Assert.notNull(domainType, "Domain type must not be null"); + + Stream allStreamable = accessStrategy.streamAll(domainType, sort); + + return allStreamable.map(this::triggerAfterConvert); + } + + @Override + public Page findAll(Class domainType, Pageable pageable) { + + Assert.notNull(domainType, "Domain type must not be null"); + + Iterable items = triggerAfterConvert(accessStrategy.findAll(domainType, pageable)); + List content = StreamSupport.stream(items.spliterator(), false).collect(Collectors.toList()); + + return PageableExecutionUtils.getPage(content, pageable, () -> accessStrategy.count(domainType)); + } + + @Override + public Optional findOne(Query query, Class domainType) { + return accessStrategy.findOne(query, domainType).map(this::triggerAfterConvert); + } + + @Override + public List findAll(Query query, Class domainType) { + + Iterable all = accessStrategy.findAll(query, domainType); + + return triggerAfterConvert(all); + } + + @Override + public Stream streamAll(Query query, Class domainType) { + return accessStrategy.streamAll(query, domainType).map(this::triggerAfterConvert); + } + + @Override + public Page findAll(Query query, Class domainType, Pageable pageable) { + + Iterable items = triggerAfterConvert(accessStrategy.findAll(query, domainType, pageable)); + List content = StreamSupport.stream(items.spliterator(), false).collect(Collectors.toList()); + + return PageableExecutionUtils.getPage(content, pageable, () -> accessStrategy.count(query, domainType)); + } + + @Override + public List findAll(Class domainType) { + + Assert.notNull(domainType, "Domain type must not be null"); + + Iterable all = accessStrategy.findAll(domainType); + return triggerAfterConvert(all); + } + + @Override + public Stream streamAll(Class domainType) { + + Iterable items = triggerAfterConvert(accessStrategy.findAll(domainType)); + return StreamSupport.stream(items.spliterator(), false).map(this::triggerAfterConvert); + } + + @Override + public List findAllById(Iterable ids, Class domainType) { + + Assert.notNull(ids, "Ids must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + + Iterable allById = accessStrategy.findAllById(ids, domainType); + return triggerAfterConvert(allById); + } + + @Override + public Stream streamAllByIds(Iterable ids, Class domainType) { + + Assert.notNull(ids, "Ids must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + + Stream allByIdStreamable = accessStrategy.streamAllByIds(ids, domainType); + + return allByIdStreamable.map(this::triggerAfterConvert); + } + + @Override + public void delete(S aggregateRoot) { + + Assert.notNull(aggregateRoot, "Aggregate root must not be null"); + + Class domainType = (Class) aggregateRoot.getClass(); + IdentifierAccessor identifierAccessor = context.getRequiredPersistentEntity(domainType) + .getIdentifierAccessor(aggregateRoot); + + deleteTree(identifierAccessor.getRequiredIdentifier(), aggregateRoot, domainType); + } + + @Override + public void deleteById(Object id, Class domainType) { + + Assert.notNull(id, "Id must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + + deleteTree(id, null, domainType); + } + + @Override + public void deleteAllById(Iterable ids, Class domainType) { + + if (!ids.iterator().hasNext()) { + return; + } + + BatchingAggregateChange> batchingAggregateChange = BatchingAggregateChange + .forDelete(domainType); + + ids.forEach(id -> { + + DeleteAggregateChange change = createDeletingChange(id, null, domainType); + triggerBeforeDelete(null, id, change); + batchingAggregateChange.add(change); + }); + + executor.executeDelete(batchingAggregateChange); + + ids.forEach(id -> triggerAfterDelete(null, id, batchingAggregateChange)); + } + + @Override + public void deleteAll(Class domainType) { + + Assert.notNull(domainType, "Domain type must not be null"); + + MutableAggregateChange change = createDeletingChange(domainType); + executor.executeDelete(change); + } + + @Override + public void deleteAll(Iterable instances) { + + if (!instances.iterator().hasNext()) { + return; + } + + Map> groupedByType = new HashMap<>(); + + for (T instance : instances) { + + Class type = instance.getClass(); + final List list = groupedByType.computeIfAbsent(type, __ -> new ArrayList<>()); + list.add(instance); + } + + for (Class type : groupedByType.keySet()) { + doDeleteAll(groupedByType.get(type), type); + } + } + + private void verifyIdProperty(T instance) { + // accessing the id property just to raise an exception in the case it does not exist. + context.getRequiredPersistentEntity(instance.getClass()).getRequiredIdProperty(); + } + + private void doDeleteAll(Iterable instances, Class domainType) { + + BatchingAggregateChange> batchingAggregateChange = BatchingAggregateChange + .forDelete(domainType); + Map instancesBeforeExecute = new LinkedHashMap<>(); + + instances.forEach(instance -> { + + Object id = context.getRequiredPersistentEntity(domainType).getIdentifierAccessor(instance) + .getRequiredIdentifier(); + DeleteAggregateChange change = createDeletingChange(id, instance, domainType); + instancesBeforeExecute.put(id, triggerBeforeDelete(instance, id, change)); + batchingAggregateChange.add(change); + }); + + executor.executeDelete(batchingAggregateChange); + + instancesBeforeExecute.forEach((id, instance) -> triggerAfterDelete(instance, id, batchingAggregateChange)); + } + + private T afterExecute(AggregateChange change, T entityAfterExecution) { + + Object identifier = context.getRequiredPersistentEntity(change.getEntityType()) + .getIdentifierAccessor(entityAfterExecution).getIdentifier(); + + Assert.notNull(identifier, "After saving the identifier must not be null"); + + return triggerAfterSave(entityAfterExecution, change); + } + + private RootAggregateChange beforeExecute(EntityAndChangeCreator instance) { + + Assert.notNull(instance.entity, "Aggregate instance must not be null"); + + T aggregateRoot = triggerBeforeConvert(instance.entity); + + RootAggregateChange change = instance.changeCreator.apply(aggregateRoot); + + aggregateRoot = triggerBeforeSave(change.getRoot(), change); + + change.setRoot(aggregateRoot); + + return change; + } + + private void deleteTree(Object id, @Nullable T entity, Class domainType) { + + MutableAggregateChange change = createDeletingChange(id, entity, domainType); + + entity = triggerBeforeDelete(entity, id, change); + + executor.executeDelete(change); + + triggerAfterDelete(entity, id, change); + } + + private T performSave(EntityAndChangeCreator instance) { + + // noinspection unchecked + BatchingAggregateChange> batchingAggregateChange = // + BatchingAggregateChange.forSave((Class) ClassUtils.getUserClass(instance.entity)); + batchingAggregateChange.add(beforeExecute(instance)); + + Iterator afterExecutionIterator = executor.executeSave(batchingAggregateChange).iterator(); + + Assert.isTrue(afterExecutionIterator.hasNext(), "Instances after execution must not be empty"); + + return afterExecute(batchingAggregateChange, afterExecutionIterator.next()); + } + + private List performSaveAll(Iterable> instances) { + + BatchingAggregateChange> batchingAggregateChange = null; + + for (EntityAndChangeCreator instance : instances) { + if (batchingAggregateChange == null) { + // noinspection unchecked + batchingAggregateChange = BatchingAggregateChange.forSave((Class) ClassUtils.getUserClass(instance.entity)); + } + batchingAggregateChange.add(beforeExecute(instance)); + } + + Assert.notNull(batchingAggregateChange, "Iterable in saveAll must not be empty"); + + List instancesAfterExecution = executor.executeSave(batchingAggregateChange); + + ArrayList results = new ArrayList<>(instancesAfterExecution.size()); + for (T instance : instancesAfterExecution) { + results.add(afterExecute(batchingAggregateChange, instance)); + } + + return results; + } + + private Function> changeCreatorSelectorForSave(T instance) { + + return context.getRequiredPersistentEntity(instance.getClass()).isNew(instance) + ? entity -> createInsertChange(prepareVersionForInsert(entity)) + : entity -> createUpdateChange(prepareVersionForUpdate(entity)); + } + + private RootAggregateChange createInsertChange(T instance) { + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(instance); + new RelationalEntityInsertWriter(context).write(instance, aggregateChange); + return aggregateChange; + } + + private RootAggregateChange createUpdateChange(EntityAndPreviousVersion entityAndVersion) { + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entityAndVersion.entity, + entityAndVersion.version); + new RelationalEntityUpdateWriter(context).write(entityAndVersion.entity, aggregateChange); + return aggregateChange; + } + + private T prepareVersionForInsert(T instance) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(instance); + T preparedInstance = instance; + if (persistentEntity.hasVersionProperty()) { + RelationalPersistentProperty versionProperty = persistentEntity.getRequiredVersionProperty(); + + long initialVersion = versionProperty.getActualType().isPrimitive() ? 1L : 0; + + preparedInstance = RelationalEntityVersionUtils.setVersionNumberOnEntity( // + instance, initialVersion, persistentEntity, converter); + } + return preparedInstance; + } + + private EntityAndPreviousVersion prepareVersionForUpdate(T instance) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(instance); + T preparedInstance = instance; + Number previousVersion = null; + if (persistentEntity.hasVersionProperty()) { + // If the root aggregate has a version property, increment it. + previousVersion = RelationalEntityVersionUtils.getVersionNumberFromEntity(instance, persistentEntity, converter); + + long newVersion = (previousVersion == null ? 0 : previousVersion.longValue()) + 1; + + preparedInstance = RelationalEntityVersionUtils.setVersionNumberOnEntity(instance, newVersion, persistentEntity, + converter); + } + return new EntityAndPreviousVersion<>(preparedInstance, previousVersion); + } + + @SuppressWarnings("unchecked") + private RelationalPersistentEntity getRequiredPersistentEntity(T instance) { + return (RelationalPersistentEntity) context.getRequiredPersistentEntity(instance.getClass()); + } + + private DeleteAggregateChange createDeletingChange(Object id, @Nullable T entity, Class domainType) { + + Number previousVersion = null; + if (entity != null) { + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(entity); + if (persistentEntity.hasVersionProperty()) { + previousVersion = RelationalEntityVersionUtils.getVersionNumberFromEntity(entity, persistentEntity, converter); + } + } + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(domainType, previousVersion); + jdbcEntityDeleteWriter.write(id, aggregateChange); + return aggregateChange; + } + + private MutableAggregateChange createDeletingChange(Class domainType) { + + MutableAggregateChange aggregateChange = MutableAggregateChange.forDelete(domainType); + jdbcEntityDeleteWriter.write(null, aggregateChange); + return aggregateChange; + } + + private List triggerAfterConvert(Iterable all) { + + List result = new ArrayList<>(); + + for (T e : all) { + result.add(triggerAfterConvert(e)); + } + + return result; + } + + private T triggerAfterConvert(T entity) { + + eventDelegate.publishEvent(() -> new AfterConvertEvent<>(entity)); + return entityCallbacks.callback(AfterConvertCallback.class, entity); + } + + private T triggerBeforeConvert(T aggregateRoot) { + + eventDelegate.publishEvent(() -> new BeforeConvertEvent<>(aggregateRoot)); + return entityCallbacks.callback(BeforeConvertCallback.class, aggregateRoot); + } + + private T triggerBeforeSave(T aggregateRoot, AggregateChange change) { + + eventDelegate.publishEvent(() -> new BeforeSaveEvent<>(aggregateRoot, change)); + + return entityCallbacks.callback(BeforeSaveCallback.class, aggregateRoot, change); + } + + private T triggerAfterSave(T aggregateRoot, AggregateChange change) { + + eventDelegate.publishEvent(() -> new AfterSaveEvent<>(aggregateRoot, change)); + return entityCallbacks.callback(AfterSaveCallback.class, aggregateRoot); + } + + private void triggerAfterDelete(@Nullable T aggregateRoot, Object id, AggregateChange change) { + + eventDelegate.publishEvent(() -> new AfterDeleteEvent<>(Identifier.of(id), aggregateRoot, change)); + + if (aggregateRoot != null) { + entityCallbacks.callback(AfterDeleteCallback.class, aggregateRoot); + } + } + + @Nullable + private T triggerBeforeDelete(@Nullable T aggregateRoot, Object id, MutableAggregateChange change) { + + eventDelegate.publishEvent(() -> new BeforeDeleteEvent<>(Identifier.of(id), aggregateRoot, change)); + + if (aggregateRoot != null) { + return entityCallbacks.callback(BeforeDeleteCallback.class, aggregateRoot, change); + } + + return null; + } + + private record EntityAndPreviousVersion (T entity, @Nullable Number version) { + } + + private record EntityAndChangeCreator (T entity, Function> changeCreator) { + } +} diff --git a/src/main/java/org/springframework/data/jdbc/core/UnableToSetId.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/UnableToSetId.java similarity index 88% rename from src/main/java/org/springframework/data/jdbc/core/UnableToSetId.java rename to spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/UnableToSetId.java index d05dc614c8..cbc6525103 100644 --- a/src/main/java/org/springframework/data/jdbc/core/UnableToSetId.java +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/UnableToSetId.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ * Signals failure to set the id property of an entity. * * @author Jens Schauder - * @since 2.0 */ public class UnableToSetId extends NonTransientDataAccessException { diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/AggregateReader.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/AggregateReader.java new file mode 100644 index 0000000000..be3629f9d7 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/AggregateReader.java @@ -0,0 +1,230 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.query.CriteriaDefinition; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sqlgeneration.AliasFactory; +import org.springframework.data.relational.core.sqlgeneration.SingleQuerySqlGenerator; +import org.springframework.data.relational.core.sqlgeneration.SqlGenerator; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.Streamable; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.lang.Nullable; + +/** + * Reads complete Aggregates from the database, by generating appropriate SQL using a {@link SingleQuerySqlGenerator} + * through {@link org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate}. Results are converted into an + * intermediate {@link RowDocumentResultSetExtractor RowDocument} and mapped via + * {@link org.springframework.data.relational.core.conversion.RelationalConverter#read(Class, RowDocument)}. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 3.2 + */ +class AggregateReader implements PathToColumnMapping { + + private final AliasFactory aliasFactory; + private final SqlGenerator sqlGenerator; + private final JdbcConverter converter; + private final NamedParameterJdbcOperations jdbcTemplate; + private final RowDocumentResultSetExtractor extractor; + + AggregateReader(Dialect dialect, JdbcConverter converter, NamedParameterJdbcOperations jdbcTemplate) { + + this.aliasFactory = new AliasFactory(); + this.converter = converter; + this.jdbcTemplate = jdbcTemplate; + this.sqlGenerator = new SingleQuerySqlGenerator(converter.getMappingContext(), aliasFactory, dialect); + this.extractor = new RowDocumentResultSetExtractor(converter.getMappingContext(), this); + } + + @Override + public String column(AggregatePath path) { + + String alias = aliasFactory.getColumnAlias(path); + + if (alias == null) { + throw new IllegalStateException(String.format("Alias for '%s' must not be null", path)); + } + + return alias; + } + + @Override + public String keyColumn(AggregatePath path) { + return aliasFactory.getKeyAlias(path); + } + + /** + * Select a single aggregate by its identifier. + * + * @param id the identifier, must not be {@literal null}. + * @param entity the persistent entity type must not be {@literal null}. + * @return the found aggregate root, or {@literal null} if not found. + * @param aggregator type. + */ + @Nullable + public T findById(Object id, RelationalPersistentEntity entity) { + + Query query = Query.query(Criteria.where(entity.getRequiredIdProperty().getName()).is(id)).limit(1); + + return findOne(query, entity); + } + + /** + * Select a single aggregate by a {@link Query}. + * + * @param query the query to run, must not be {@literal null}. + * @param entity the persistent entity type must not be {@literal null}. + * @return the found aggregate root, or {@literal null} if not found. + * @param aggregator type. + */ + @Nullable + public T findOne(Query query, RelationalPersistentEntity entity) { + return doFind(query, entity, rs -> extractZeroOrOne(rs, entity)); + } + + /** + * Select aggregates by their identifiers. + * + * @param ids the identifiers, must not be {@literal null}. + * @param entity the persistent entity type must not be {@literal null}. + * @return the found aggregate roots. The resulting list can be empty or may not contain objects that correspond to + * the identifiers when the objects are not found in the database. + * @param aggregator type. + */ + public List findAllById(Iterable ids, RelationalPersistentEntity entity) { + + Collection identifiers = ids instanceof Collection idl ? idl : Streamable.of(ids).toList(); + Query query = Query.query(Criteria.where(entity.getRequiredIdProperty().getName()).in(identifiers)); + + return findAll(query, entity); + } + + /** + * Select all aggregates by type. + * + * @param entity the persistent entity type must not be {@literal null}. + * @return the found aggregate roots. + * @param aggregator type. + */ + @SuppressWarnings("ConstantConditions") + public List findAll(RelationalPersistentEntity entity) { + return jdbcTemplate.query(sqlGenerator.findAll(entity), + (ResultSetExtractor>) rs -> extractAll(rs, entity)); + } + + /** + * Select all aggregates by query. + * + * @param query the query to run, must not be {@literal null}. + * @param entity the persistent entity type must not be {@literal null}. + * @return the found aggregate roots. + * @param aggregator type. + */ + public List findAll(Query query, RelationalPersistentEntity entity) { + return doFind(query, entity, rs -> extractAll(rs, entity)); + } + + @SuppressWarnings("ConstantConditions") + private R doFind(Query query, RelationalPersistentEntity entity, ResultSetExtractor extractor) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + Condition condition = createCondition(query, parameterSource, entity); + String sql = sqlGenerator.findAll(entity, condition); + + return jdbcTemplate.query(sql, parameterSource, extractor); + } + + @Nullable + private Condition createCondition(Query query, MapSqlParameterSource parameterSource, + RelationalPersistentEntity entity) { + + QueryMapper queryMapper = new QueryMapper(converter); + + Optional criteria = query.getCriteria(); + return criteria.map(criteriaDefinition -> queryMapper.getMappedObject(parameterSource, criteriaDefinition, + Table.create(entity.getQualifiedTableName()), entity)).orElse(null); + } + + /** + * Extracts a list of aggregates from the given {@link ResultSet} by utilizing the + * {@link RowDocumentResultSetExtractor} and the {@link JdbcConverter}. When used as a method reference this conforms + * to the {@link org.springframework.jdbc.core.ResultSetExtractor} contract. + * + * @param rs the {@link ResultSet} from which to extract the data. Must not be {(}@literal null}. + * @return a {@code List} of aggregates, fully converted. + * @throws SQLException on underlying JDBC errors. + */ + private List extractAll(ResultSet rs, RelationalPersistentEntity entity) throws SQLException { + + Iterator iterate = extractor.iterate(entity, rs); + List resultList = new ArrayList<>(); + + while (iterate.hasNext()) { + resultList.add(converter.read(entity.getType(), iterate.next())); + } + + return resultList; + } + + /** + * Extracts a single aggregate or {@literal null} from the given {@link ResultSet} by utilizing the + * {@link RowDocumentResultSetExtractor} and the {@link JdbcConverter}. When used as a method reference this conforms + * to the {@link org.springframework.jdbc.core.ResultSetExtractor} contract. + * + * @param rs the {@link ResultSet} from which to extract the data. Must not be {(}@literal null}. + * @return The single instance when the conversion results in exactly one instance. If the {@literal ResultSet} is + * empty, null is returned. + * @throws SQLException on underlying JDBC errors. + * @throws IncorrectResultSizeDataAccessException when the conversion yields more than one instance. + */ + @Nullable + private T extractZeroOrOne(ResultSet rs, RelationalPersistentEntity entity) throws SQLException { + + Iterator iterate = extractor.iterate(entity, rs); + + if (iterate.hasNext()) { + + RowDocument object = iterate.next(); + if (iterate.hasNext()) { + throw new IncorrectResultSizeDataAccessException(1); + } + return converter.read(entity.getType(), object); + } + + return null; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/AggregateReferenceConverters.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/AggregateReferenceConverters.java new file mode 100644 index 0000000000..b3d0a2ce3c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/AggregateReferenceConverters.java @@ -0,0 +1,136 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import org.springframework.core.ResolvableType; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.TypeDescriptor; +import org.springframework.core.convert.converter.GenericConverter; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.lang.Nullable; + +/** + * Converters for aggregate references. They need a {@link ConversionService} in order to delegate the conversion of the + * content of the {@link AggregateReference}. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 2.3 + */ +class AggregateReferenceConverters { + + /** + * Returns the converters to be registered. + * + * @return a collection of converters. Guaranteed to be not {@literal null}. + */ + public static Collection getConvertersToRegister(ConversionService conversionService) { + + return Arrays.asList(new AggregateReferenceToSimpleTypeConverter(conversionService), + new SimpleTypeToAggregateReferenceConverter(conversionService)); + } + + /** + * Converts from an AggregateReference to its id, leaving the conversion of the id to the ultimate target type to the + * delegate {@link ConversionService}. + */ + @WritingConverter + private static class AggregateReferenceToSimpleTypeConverter implements GenericConverter { + + private static final Set CONVERTIBLE_TYPES = Collections + .singleton(new ConvertiblePair(AggregateReference.class, Object.class)); + + private final ConversionService delegate; + + AggregateReferenceToSimpleTypeConverter(ConversionService delegate) { + this.delegate = delegate; + } + + @Override + public Set getConvertibleTypes() { + return CONVERTIBLE_TYPES; + } + + @Override + public Object convert(@Nullable Object source, TypeDescriptor sourceDescriptor, TypeDescriptor targetDescriptor) { + + if (source == null) { + return null; + } + + // if the target type is an AggregateReference we are going to assume it is of the correct type, + // because it was already converted. + Class objectType = targetDescriptor.getObjectType(); + if (objectType.isAssignableFrom(AggregateReference.class)) { + return source; + } + + Object id = ((AggregateReference) source).getId(); + + if (id == null) { + throw new IllegalStateException( + String.format("Aggregate references id must not be null when converting to %s from %s to %s", source, + sourceDescriptor, targetDescriptor)); + } + + return delegate.convert(id, TypeDescriptor.valueOf(id.getClass()), targetDescriptor); + } + } + + /** + * Convert any simple type to an {@link AggregateReference}. If the {@literal targetDescriptor} contains information + * about the generic type id will properly get converted to the desired type by the delegate + * {@link ConversionService}. + */ + @ReadingConverter + private static class SimpleTypeToAggregateReferenceConverter implements GenericConverter { + + private static final Set CONVERTIBLE_TYPES = Collections + .singleton(new ConvertiblePair(Object.class, AggregateReference.class)); + + private final ConversionService delegate; + + SimpleTypeToAggregateReferenceConverter(ConversionService delegate) { + this.delegate = delegate; + } + + @Override + public Set getConvertibleTypes() { + return CONVERTIBLE_TYPES; + } + + @Override + public Object convert(@Nullable Object source, TypeDescriptor sourceDescriptor, TypeDescriptor targetDescriptor) { + + if (source == null) { + return null; + } + + ResolvableType componentType = targetDescriptor.getResolvableType().getGenerics()[1]; + TypeDescriptor targetType = TypeDescriptor.valueOf(componentType.resolve()); + Object convertedId = delegate.convert(source, TypeDescriptor.valueOf(source.getClass()), targetType); + + return AggregateReference.to(convertedId); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ArrayUtils.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ArrayUtils.java new file mode 100644 index 0000000000..411684bc84 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ArrayUtils.java @@ -0,0 +1,464 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.util.Assert; + +/** + * A collection of utility methods for dealing with arrays. + *

+ * Mainly for internal use within the framework. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 1.1 + */ +final class ArrayUtils { + + /** + * An empty immutable {@code boolean} array. + */ + public static final boolean[] EMPTY_BOOLEAN_ARRAY = new boolean[0]; + + /** + * An empty immutable {@link Boolean} array. + */ + public static final Boolean[] EMPTY_BOOLEAN_OBJECT_ARRAY = new Boolean[0]; + + /** + * An empty immutable {@code byte} array. + */ + public static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; + + /** + * An empty immutable {@link Byte} array. + */ + public static final Byte[] EMPTY_BYTE_OBJECT_ARRAY = new Byte[0]; + + /** + * An empty immutable {@code char} array. + */ + public static final char[] EMPTY_CHAR_ARRAY = new char[0]; + + /** + * An empty immutable {@link Character} array. + */ + public static final Character[] EMPTY_CHARACTER_OBJECT_ARRAY = new Character[0]; + + /** + * An empty immutable {@code double} array. + */ + public static final double[] EMPTY_DOUBLE_ARRAY = new double[0]; + + /** + * An empty immutable {@code Double} array. + */ + public static final Double[] EMPTY_DOUBLE_OBJECT_ARRAY = new Double[0]; + + /** + * An empty immutable {@code float} array. + */ + public static final float[] EMPTY_FLOAT_ARRAY = new float[0]; + + /** + * An empty immutable {@code Float} array. + */ + public static final Float[] EMPTY_FLOAT_OBJECT_ARRAY = new Float[0]; + + /** + * An empty immutable {@code int} array. + */ + public static final int[] EMPTY_INT_ARRAY = new int[0]; + + /** + * An empty immutable {@link Integer} array. + */ + public static final Integer[] EMPTY_INTEGER_OBJECT_ARRAY = new Integer[0]; + + /** + * An empty immutable {@code long} array. + */ + public static final long[] EMPTY_LONG_ARRAY = new long[0]; + + /** + * An empty immutable {@link Long} array. + */ + public static final Long[] EMPTY_LONG_OBJECT_ARRAY = new Long[0]; + + /** + * An empty immutable {@code short} array. + */ + public static final short[] EMPTY_SHORT_ARRAY = new short[0]; + + /** + * An empty immutable {@link Short} array. + */ + public static final Short[] EMPTY_SHORT_OBJECT_ARRAY = new Short[0]; + + private ArrayUtils() { + } + + /** + * Converts an {@code Boolean[]} into a {@code boolean[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code boolean[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static boolean[] toPrimitive(Boolean[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_BOOLEAN_ARRAY; + } + + boolean[] booleans = new boolean[array.length]; + for (int i = 0; i < array.length; i++) { + booleans[i] = array[i]; + } + + return booleans; + } + + /** + * Converts an {@code boolean[]} into a {@code Boolean[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Boolean[]} of same size with the boxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Boolean[] toObject(boolean[] array) { + + if (array.length == 0) { + return EMPTY_BOOLEAN_OBJECT_ARRAY; + } + + Boolean[] booleans = new Boolean[array.length]; + for (int i = 0; i < array.length; i++) { + booleans[i] = array[i]; + } + + return booleans; + } + + /** + * Converts an {@code Byte[]} into a {@code byte[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code byte[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static byte[] toPrimitive(Byte[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_BYTE_ARRAY; + } + + byte[] bytes = new byte[array.length]; + for (int i = 0; i < array.length; i++) { + bytes[i] = array[i]; + } + + return bytes; + } + + /** + * Converts an {@code byte[]} into a {@code Byte[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Byte[]} of same size with the boxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Byte[] toObject(byte[] array) { + + if (array.length == 0) { + return EMPTY_BYTE_OBJECT_ARRAY; + } + + Byte[] bytes = new Byte[array.length]; + for (int i = 0; i < array.length; i++) { + bytes[i] = array[i]; + } + + return bytes; + } + + /** + * Converts an {@code Character[]} into a {@code char[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code char[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static char[] toPrimitive(Character[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_CHAR_ARRAY; + } + + char[] chars = new char[array.length]; + for (int i = 0; i < array.length; i++) { + chars[i] = array[i]; + } + + return chars; + } + + /** + * Converts an {@code char[]} into a {@code Character[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Character[]} of same size with the boxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Character[] toObject(char[] array) { + + if (array.length == 0) { + return EMPTY_CHARACTER_OBJECT_ARRAY; + } + + Character[] objects = new Character[array.length]; + for (int i = 0; i < array.length; i++) { + objects[i] = array[i]; + } + return objects; + } + + /** + * Converts an {@code Double[]} into a {@code double[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code double[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static double[] toPrimitive(Double[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_DOUBLE_ARRAY; + } + + double[] doubles = new double[array.length]; + for (int i = 0; i < array.length; i++) { + doubles[i] = array[i]; + } + + return doubles; + } + + /** + * Converts an {@code double[]} into a {@code Double[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Double[]} of same size with the boxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Double[] toObject(double[] array) { + + if (array.length == 0) { + return EMPTY_DOUBLE_OBJECT_ARRAY; + } + + Double[] objects = new Double[array.length]; + for (int i = 0; i < array.length; i++) { + objects[i] = array[i]; + } + + return objects; + } + + /** + * Converts an {@code Float[]} into a {@code float[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code float[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static float[] toPrimitive(Float[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_FLOAT_ARRAY; + } + + float[] floats = new float[array.length]; + for (int i = 0; i < array.length; i++) { + floats[i] = array[i]; + } + + return floats; + } + + /** + * Converts an {@code float[]} into a {@code Float[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Float[]} of same size with the boxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Float[] toObject(float[] array) { + + if (array.length == 0) { + return EMPTY_FLOAT_OBJECT_ARRAY; + } + + Float[] objects = new Float[array.length]; + for (int i = 0; i < array.length; i++) { + objects[i] = array[i]; + } + + return objects; + } + + /** + * Converts an {@code Integer[]} into a {@code int[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code int[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static int[] toPrimitive(Integer[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_INT_ARRAY; + } + + int[] ints = new int[array.length]; + for (int i = 0; i < array.length; i++) { + ints[i] = array[i]; + } + + return ints; + } + + /** + * Converts an {@code int[]} into a {@code Integer[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Integer[]} of same size with the boxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Integer[] toObject(int[] array) { + + if (array.length == 0) { + return EMPTY_INTEGER_OBJECT_ARRAY; + } + + Integer[] objects = new Integer[array.length]; + for (int i = 0; i < array.length; i++) { + objects[i] = array[i]; + } + + return objects; + } + + /** + * Converts an {@code Long[]} into a {@code long[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code long[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static long[] toPrimitive(Long[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_LONG_ARRAY; + } + + long[] longs = new long[array.length]; + for (int i = 0; i < array.length; i++) { + longs[i] = array[i]; + } + + return longs; + } + + /** + * Converts an {@code long[]} into a {@code Long[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Long[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Long[] toObject(long[] array) { + + if (array.length == 0) { + return EMPTY_LONG_OBJECT_ARRAY; + } + + Long[] objects = new Long[array.length]; + for (int i = 0; i < array.length; i++) { + objects[i] = array[i]; + } + return objects; + } + + /** + * Converts an {@code Short[]} into a {@code short[]}. + * + * @param array the array to be converted. Must not be {@literal null} and must not contain {@literal null} elements. + * @return a {@code short[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static short[] toPrimitive(Short[] array) { + + Assert.noNullElements(array, "Array must not contain null elements"); + + if (array.length == 0) { + return EMPTY_SHORT_ARRAY; + } + + short[] shorts = new short[array.length]; + for (int i = 0; i < array.length; i++) { + shorts[i] = array[i]; + } + + return shorts; + } + + /** + * Converts an {@code short[]} into a {@code Short[]}. + * + * @param array the array to be converted. Must not be {@literal null}. + * @return a {@code Short[]} of same size with the unboxed values of the input array. Guaranteed to be not + * {@literal null}. + */ + static Short[] toObject(short[] array) { + + if (array.length == 0) { + return EMPTY_SHORT_OBJECT_ARRAY; + } + + Short[] objects = new Short[array.length]; + for (int i = 0; i < array.length; i++) { + objects[i] = array[i]; + } + + return objects; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/BatchInsertStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/BatchInsertStrategy.java new file mode 100644 index 0000000000..7a63d216e6 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/BatchInsertStrategy.java @@ -0,0 +1,36 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.jdbc.core.namedparam.SqlParameterSource; + +/** + * Strategy for executing a batch insert. + * + * @author Chirag Tailor + * @since 2.4 + */ +interface BatchInsertStrategy { + + /** + * @param sql the insert sql. Must not be {@code null}. + * @param sqlParameterSources the sql parameters for each record to be inserted. Must not be {@code null}. + * @return the ids corresponding to each record that was inserted, if ids were generated. If ids were not generated, + * elements will be {@code null}. + * @since 2.4 + */ + Object[] execute(String sql, SqlParameterSource[] sqlParameterSources); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/BindParameterNameSanitizer.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/BindParameterNameSanitizer.java new file mode 100644 index 0000000000..64213bd939 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/BindParameterNameSanitizer.java @@ -0,0 +1,34 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core.convert; + +import java.util.regex.Pattern; + +/** + * Sanitizes the name of bind parameters, so they don't contain any illegal characters. + * + * @author Jens Schauder + * @since 3.0.2 + */ +abstract class BindParameterNameSanitizer { + + private static final Pattern parameterPattern = Pattern.compile("\\W"); + + static String sanitize(String rawName) { + return parameterPattern.matcher(rawName).replaceAll(""); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/CachingResultSet.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/CachingResultSet.java new file mode 100644 index 0000000000..d98fb9f5b6 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/CachingResultSet.java @@ -0,0 +1,124 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.lang.Nullable; + +/** + * Despite its name not really a {@link ResultSet}, but it offers the part of the {@literal ResultSet} API that is used + * by {@link AggregateReader}. It allows peeking in the next row of a ResultSet by caching one row of the ResultSet. + * + * @author Jens Schauder + * @since 3.2 + */ +class CachingResultSet { + + private final ResultSetAccessor accessor; + private final ResultSet resultSet; + private Cache cache; + + CachingResultSet(ResultSet resultSet) { + + this.accessor = new ResultSetAccessor(resultSet); + this.resultSet = resultSet; + } + + public boolean next() { + + if (isPeeking()) { + + final boolean next = cache.next; + cache = null; + return next; + } + + try { + return resultSet.next(); + } catch (SQLException e) { + throw new RuntimeException("Failed to advance CachingResultSet", e); + } + } + + @Nullable + public Object getObject(String columnLabel) { + + Object returnValue; + if (isPeeking()) { + returnValue = cache.values.get(columnLabel); + } else { + returnValue = safeGetFromDelegate(columnLabel); + } + + return returnValue; + } + + @Nullable + Object peek(String columnLabel) { + + if (!isPeeking()) { + createCache(); + } + + if (!cache.next) { + return null; + } + + return safeGetFromDelegate(columnLabel); + } + + @Nullable + private Object safeGetFromDelegate(String columnLabel) { + return accessor.getObject(columnLabel); + } + + private void createCache() { + cache = new Cache(); + + try { + int columnCount = resultSet.getMetaData().getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + // at least some databases return lower case labels although rs.getObject(UPPERCASE_LABEL) returns the expected + // value. The aliases we use happen to be uppercase. So we transform everything to upper case. + cache.add(resultSet.getMetaData().getColumnLabel(i).toLowerCase(), + accessor.getObject(resultSet.getMetaData().getColumnLabel(i))); + } + + cache.next = resultSet.next(); + } catch (SQLException se) { + throw new RuntimeException("Can't cache result set data", se); + } + + } + + private boolean isPeeking() { + return cache != null; + } + + private static class Cache { + + boolean next; + Map values = new HashMap<>(); + + void add(String columnName, Object value) { + values.put(columnName, value); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/CascadingDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/CascadingDataAccessStrategy.java new file mode 100644 index 0000000000..d3c3124a20 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/CascadingDataAccessStrategy.java @@ -0,0 +1,220 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.lang.Boolean.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.LockMode; + +/** + * Delegates each method to the {@link DataAccessStrategy}s passed to the constructor in turn until the first that does + * not throw an exception. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Tyler Van Gorder + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @author Diego Krupitza + * @author Sergey Korotaev + * @since 1.1 + */ +public class CascadingDataAccessStrategy implements DataAccessStrategy { + + private final List strategies; + + public CascadingDataAccessStrategy(List strategies) { + this.strategies = new ArrayList<>(strategies); + } + + @Override + public Object insert(T instance, Class domainType, Identifier identifier, IdValueSource idValueSource) { + return collect(das -> das.insert(instance, domainType, identifier, idValueSource)); + } + + @Override + public Object[] insert(List> insertSubjects, Class domainType, IdValueSource idValueSource) { + return collect(das -> das.insert(insertSubjects, domainType, idValueSource)); + } + + @Override + public boolean update(S instance, Class domainType) { + return collect(das -> das.update(instance, domainType)); + } + + @Override + public boolean updateWithVersion(S instance, Class domainType, Number previousVersion) { + return collect(das -> das.updateWithVersion(instance, domainType, previousVersion)); + } + + @Override + public void delete(Object id, Class domainType) { + collectVoid(das -> das.delete(id, domainType)); + } + + @Override + public void delete(Iterable ids, Class domainType) { + collectVoid(das -> das.delete(ids, domainType)); + } + + @Override + public void deleteWithVersion(Object id, Class domainType, Number previousVersion) { + collectVoid(das -> das.deleteWithVersion(id, domainType, previousVersion)); + } + + @Override + public void delete(Object rootId, PersistentPropertyPath propertyPath) { + collectVoid(das -> das.delete(rootId, propertyPath)); + } + + @Override + public void delete(Iterable rootIds, PersistentPropertyPath propertyPath) { + collectVoid(das -> das.delete(rootIds, propertyPath)); + } + + @Override + public void deleteAll(Class domainType) { + collectVoid(das -> das.deleteAll(domainType)); + } + + @Override + public void deleteAll(PersistentPropertyPath propertyPath) { + collectVoid(das -> das.deleteAll(propertyPath)); + } + + @Override + public void acquireLockById(Object id, LockMode lockMode, Class domainType) { + collectVoid(das -> das.acquireLockById(id, lockMode, domainType)); + } + + @Override + public void acquireLockAll(LockMode lockMode, Class domainType) { + collectVoid(das -> das.acquireLockAll(lockMode, domainType)); + } + + @Override + public long count(Class domainType) { + return collect(das -> das.count(domainType)); + } + + @Override + public T findById(Object id, Class domainType) { + return collect(das -> das.findById(id, domainType)); + } + + @Override + public Iterable findAll(Class domainType) { + return collect(das -> das.findAll(domainType)); + } + + @Override + public Stream streamAll(Class domainType) { + return collect(das -> das.streamAll(domainType)); + } + + @Override + public Iterable findAllById(Iterable ids, Class domainType) { + return collect(das -> das.findAllById(ids, domainType)); + } + + @Override + public Stream streamAllByIds(Iterable ids, Class domainType) { + return collect(das -> das.streamAllByIds(ids, domainType)); + } + + @Override + public Iterable findAllByPath(Identifier identifier, + PersistentPropertyPath path) { + return collect(das -> das.findAllByPath(identifier, path)); + } + + @Override + public boolean existsById(Object id, Class domainType) { + return collect(das -> das.existsById(id, domainType)); + } + + @Override + public Iterable findAll(Class domainType, Sort sort) { + return collect(das -> das.findAll(domainType, sort)); + } + + @Override + public Stream streamAll(Class domainType, Sort sort) { + return collect(das -> das.streamAll(domainType, sort)); + } + + @Override + public Iterable findAll(Class domainType, Pageable pageable) { + return collect(das -> das.findAll(domainType, pageable)); + } + + @Override + public Optional findOne(Query query, Class domainType) { + return collect(das -> das.findOne(query, domainType)); + } + + @Override + public Iterable findAll(Query query, Class domainType) { + return collect(das -> das.findAll(query, domainType)); + } + + @Override + public Stream streamAll(Query query, Class domainType) { + return collect(das -> das.streamAll(query, domainType)); + } + + @Override + public Iterable findAll(Query query, Class domainType, Pageable pageable) { + return collect(das -> das.findAll(query, domainType, pageable)); + } + + @Override + public boolean exists(Query query, Class domainType) { + return collect(das -> das.exists(query, domainType)); + } + + @Override + public long count(Query query, Class domainType) { + return collect(das -> das.count(query, domainType)); + } + + private T collect(Function function) { + + return strategies.stream().collect(new FunctionCollector<>(function)); + } + + private void collectVoid(Consumer consumer) { + + collect(das -> { + consumer.accept(das); + return TRUE; + }); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DataAccessStrategy.java new file mode 100644 index 0000000000..560e3bdef0 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DataAccessStrategy.java @@ -0,0 +1,381 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.JdbcAggregateOperations; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.lang.Nullable; + +/** + * Abstraction for accesses to the database that should be implementable with a single SQL statement per method and + * relates to a single entity as opposed to {@link JdbcAggregateOperations} which provides interactions related to + * complete aggregates. + * + * @author Jens Schauder + * @author Tyler Van Gorder + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @author Diego Krupitza + * @author Sergey Korotaev + */ +public interface DataAccessStrategy extends ReadingDataAccessStrategy, RelationResolver { + + /** + * Inserts the data of a single entity. Referenced entities don't get handled. + * + * @param the type of the instance. + * @param instance the instance to be stored. Must not be {@code null}. + * @param domainType the type of the instance. Must not be {@code null}. + * @param identifier information about data that needs to be considered for the insert but which is not part of the + * entity. Namely, references back to a parent entity and key/index columns for entities that are stored in a + * {@link Map} or {@link List}. + * @param idValueSource the {@link IdValueSource} for the insert. + * @return the id generated by the database if any. + * @since 2.4 + */ + @Nullable + Object insert(T instance, Class domainType, Identifier identifier, IdValueSource idValueSource); + + /** + * Inserts the data of multiple entities. + * + * @param the type of the instance. + * @param insertSubjects the subjects to be inserted, where each subject contains the instance and its identifier. + * Must not be {@code null}. + * @param domainType the type of the instance. Must not be {@code null}. + * @param idValueSource the {@link IdValueSource} for the insert. + * @return the ids corresponding to each record that was inserted, if ids were generated. If ids were not generated, + * elements will be {@code null}. + * @since 2.4 + */ + Object[] insert(List> insertSubjects, Class domainType, IdValueSource idValueSource); + + /** + * Updates the data of a single entity in the database. Referenced entities don't get handled. + * + * @param instance the instance to save. Must not be {@code null}. + * @param domainType the type of the instance to save. Must not be {@code null}. + * @param the type of the instance to save. + * @return whether the update actually updated a row. + */ + boolean update(T instance, Class domainType); + + /** + * Updates the data of a single entity in the database and enforce optimistic record locking using the + * {@code previousVersion} property. Referenced entities don't get handled. + *

+ * The statement will be of the form : {@code UPDATE … SET … WHERE ID = :id and VERSION_COLUMN = :previousVersion } + * and throw an optimistic record locking exception if no rows have been updated. + * + * @param instance the instance to save. Must not be {@code null}. + * @param domainType the type of the instance to save. Must not be {@code null}. + * @param previousVersion The previous version assigned to the instance being saved. + * @param the type of the instance to save. + * @return whether the update actually updated a row. + * @throws OptimisticLockingFailureException if the update fails to update at least one row assuming the optimistic + * locking version check failed. + * @since 2.0 + */ + boolean updateWithVersion(T instance, Class domainType, Number previousVersion); + + /** + * Deletes a single row identified by the id, from the table identified by the domainType. Does not handle cascading + * deletes. + *

+ * The statement will be of the form : {@code DELETE FROM … WHERE ID = :id and VERSION_COLUMN = :version } and throw + * an optimistic record locking exception if no rows have been updated. + * + * @param id the id of the row to be deleted. Must not be {@code null}. + * @param domainType the type of entity to be deleted. Implicitly determines the table to operate on. Must not be + * {@code null}. + */ + void delete(Object id, Class domainType); + + /** + * Deletes multiple rows identified by the ids, from the table identified by the domainType. Does not handle cascading + * deletes. + *

+ * The statement will be of the form : {@code DELETE FROM … WHERE ID IN (:ids) } and throw an optimistic record + * locking exception if no rows have been updated. + * + * @param ids the ids of the rows to be deleted. Must not be {@code null}. + * @param domainType the type of entity to be deleted. Implicitly determines the table to operate on. Must not be + * {@code null}. + * @since 3.0 + */ + void delete(Iterable ids, Class domainType); + + /** + * Deletes a single entity from the database and enforce optimistic record locking using the version property. Does + * not handle cascading deletes. + * + * @param id the id of the row to be deleted. Must not be {@code null}. + * @param domainType the type of entity to be deleted. Implicitly determines the table to operate on. Must not be + * {@code null}. + * @param previousVersion The previous version assigned to the instance being saved. + * @throws OptimisticLockingFailureException if the update fails to update at least one row assuming the optimistic + * locking version check failed. + * @since 2.0 + */ + void deleteWithVersion(Object id, Class domainType, Number previousVersion); + + /** + * Deletes all entities reachable via {@literal propertyPath} from the instance identified by {@literal rootId}. + * + * @param rootId Id of the root object on which the {@literal propertyPath} is based. Must not be {@code null}. + * @param propertyPath Leading from the root object to the entities to be deleted. Must not be {@code null}. + */ + void delete(Object rootId, PersistentPropertyPath propertyPath); + + /** + * Deletes all entities reachable via {@literal propertyPath} from the instances identified by {@literal rootIds}. + * + * @param rootIds Ids of the root objects on which the {@literal propertyPath} is based. Must not be {@code null} or + * empty. + * @param propertyPath Leading from the root object to the entities to be deleted. Must not be {@code null}. + */ + void delete(Iterable rootIds, PersistentPropertyPath propertyPath); + + /** + * Deletes all entities of the given domain type. + * + * @param domainType the domain type for which to delete all entries. Must not be {@code null}. + * @param type of the domain type. + */ + void deleteAll(Class domainType); + + /** + * Deletes all entities reachable via {@literal propertyPath} from any instance. + * + * @param propertyPath Leading from the root object to the entities to be deleted. Must not be {@code null}. + */ + void deleteAll(PersistentPropertyPath propertyPath); + + /** + * Acquire a lock on the aggregate specified by id. + * + * @param id the id of the entity to load. Must not be {@code null}. + * @param lockMode the lock mode for select. Must not be {@code null}. + * @param domainType the domain type of the entity. Must not be {@code null}. + */ + void acquireLockById(Object id, LockMode lockMode, Class domainType); + + /** + * Acquire a lock on all aggregates of the given domain type. + * + * @param lockMode the lock mode for select. Must not be {@code null}. + * @param domainType the domain type of the entity. Must not be {@code null}. + */ + void acquireLockAll(LockMode lockMode, Class domainType); + + /** + * Counts the rows in the table representing the given domain type. + * + * @param domainType the domain type for which to count the elements. Must not be {@code null}. + * @return the count. Guaranteed to be not {@code null}. + */ + long count(Class domainType); + + /** + * Counts the rows in the table representing the given probe type, that match the given query. + * + * @param domainType the probe type for which to count the elements. Must not be {@code null}. + * @param query the query which elements have to match. + * @return the count. Guaranteed to be not {@code null}. + * @since 3.0 + */ + long count(Query query, Class domainType); + + /** + * Determine whether there is an aggregate of type domainType that matches the provided {@link Query}. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return {@literal true} if the object exists. + * @since 3.0 + */ + boolean exists(Query query, Class domainType); + + /** + * returns if a row with the given id exists for the given type. + * + * @param id the id of the entity for which to check. Must not be {@code null}. + * @param domainType the type of the entity to check for. Must not be {@code null}. + * @param the type of the entity. + * @return {@code true} if a matching row exists, otherwise {@code false}. + */ + boolean existsById(Object id, Class domainType); + + /** + * Loads a single entity identified by type and id. + * + * @param id the id of the entity to load. Must not be {@code null}. + * @param domainType the domain type of the entity. Must not be {@code null}. + * @param the type of the entity. + * @return Might return {@code null}. + */ + @Override + @Nullable + T findById(Object id, Class domainType); + + /** + * Loads all entities of the given type. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @return Guaranteed to be not {@code null}. + */ + @Override + Iterable findAll(Class domainType); + + /** + * Loads all entities of the given type to a {@link Stream}. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @return Guaranteed to be not {@code null}. + */ + @Override + Stream streamAll(Class domainType); + + /** + * Loads all entities that match one of the ids passed as an argument. It is not guaranteed that the number of ids + * passed in matches the number of entities returned. + * + * @param ids the Ids of the entities to load. Must not be {@code null}. + * @param domainType the type of entities to load. Must not be {@code null}. + * @param type of entities to load. + * @return the loaded entities. Guaranteed to be not {@code null}. + */ + @Override + Iterable findAllById(Iterable ids, Class domainType); + + /** + * Loads all entities that match one of the ids passed as an argument to a {@link Stream}. + * It is not guaranteed that the number of ids passed in matches the number of entities returned. + * + * @param ids the Ids of the entities to load. Must not be {@code null}. + * @param domainType the type of entities to load. Must not be {@code null}. + * @param type of entities to load. + * @return the loaded entities. Guaranteed to be not {@code null}. + */ + @Override + Stream streamAllByIds(Iterable ids, Class domainType); + + @Override + Iterable findAllByPath(Identifier identifier, + PersistentPropertyPath path); + + /** + * Loads all entities of the given type, sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param sort the sorting information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + @Override + Iterable findAll(Class domainType, Sort sort); + + /** + * Loads all entities of the given type to a {@link Stream}, sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param sort the sorting information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + @Override + Stream streamAll(Class domainType, Sort sort); + + /** + * Loads all entities of the given type, paged and sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param pageable the pagination information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + @Override + Iterable findAll(Class domainType, Pageable pageable); + + /** + * Execute a {@code SELECT} query and convert the resulting item to an entity ensuring exactly one result. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return exactly one result or {@link Optional#empty()} if no match found. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + @Override + Optional findOne(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Iterable}. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + @Override + Iterable findAll(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Stream}. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + @Override + Stream streamAll(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Iterable}. Applies the {@link Pageable} + * to the result. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@literal null}. + * @param pageable the pagination that should be applied. Must not be {@literal null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + @Override + Iterable findAll(Query query, Class domainType, Pageable pageable); + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DataAccessStrategyFactory.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DataAccessStrategyFactory.java new file mode 100644 index 0000000000..7b9854db2a --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DataAccessStrategyFactory.java @@ -0,0 +1,82 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.util.Assert; + +/** + * Factory to create a {@link DataAccessStrategy} based on the configuration of the provided components. Specifically, + * this factory creates a {@link SingleQueryFallbackDataAccessStrategy} that falls back to + * {@link DefaultDataAccessStrategy} if Single Query Loading is not supported. This factory encapsulates + * {@link DataAccessStrategy} for consistent access strategy creation. + * + * @author Mark Paluch + * @since 3.2 + */ +public class DataAccessStrategyFactory { + + private final SqlGeneratorSource sqlGeneratorSource; + private final JdbcConverter converter; + private final NamedParameterJdbcOperations operations; + private final SqlParametersFactory sqlParametersFactory; + private final InsertStrategyFactory insertStrategyFactory; + + /** + * Creates a new {@link DataAccessStrategyFactory}. + * + * @param sqlGeneratorSource must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param sqlParametersFactory must not be {@literal null}. + * @param insertStrategyFactory must not be {@literal null}. + */ + public DataAccessStrategyFactory(SqlGeneratorSource sqlGeneratorSource, JdbcConverter converter, + NamedParameterJdbcOperations operations, SqlParametersFactory sqlParametersFactory, + InsertStrategyFactory insertStrategyFactory) { + + Assert.notNull(sqlGeneratorSource, "SqlGeneratorSource must not be null"); + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + Assert.notNull(sqlParametersFactory, "SqlParametersFactory must not be null"); + Assert.notNull(insertStrategyFactory, "InsertStrategyFactory must not be null"); + + this.sqlGeneratorSource = sqlGeneratorSource; + this.converter = converter; + this.operations = operations; + this.sqlParametersFactory = sqlParametersFactory; + this.insertStrategyFactory = insertStrategyFactory; + } + + /** + * Creates a new {@link DataAccessStrategy}. + * + * @return a new {@link DataAccessStrategy}. + */ + public DataAccessStrategy create() { + + DefaultDataAccessStrategy defaultDataAccessStrategy = new DefaultDataAccessStrategy(sqlGeneratorSource, + this.converter.getMappingContext(), this.converter, this.operations, sqlParametersFactory, + insertStrategyFactory); + + if (this.converter.getMappingContext().isSingleQueryLoadingEnabled()) { + return new SingleQueryFallbackDataAccessStrategy(sqlGeneratorSource, converter, operations, + defaultDataAccessStrategy); + } + + return defaultDataAccessStrategy; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DefaultDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DefaultDataAccessStrategy.java new file mode 100644 index 0000000000..c638a3e763 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DefaultDataAccessStrategy.java @@ -0,0 +1,489 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.springframework.data.jdbc.core.convert.SqlGenerator.*; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.AggregatePath.TableInfo; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * The default {@link DataAccessStrategy} is to generate SQL statements based on metadata from the entity. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Thomas Lang + * @author Bastian Wilhelm + * @author Christoph Strobl + * @author Tom Hombergs + * @author Tyler Van Gorder + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Yunyoung LEE + * @author Radim Tlusty + * @author Chirag Tailor + * @author Diego Krupitza + * @author Sergey Korotaev + * @since 1.1 + */ +public class DefaultDataAccessStrategy implements DataAccessStrategy { + + private final SqlGeneratorSource sqlGeneratorSource; + private final RelationalMappingContext context; + private final JdbcConverter converter; + private final NamedParameterJdbcOperations operations; + private final SqlParametersFactory sqlParametersFactory; + private final InsertStrategyFactory insertStrategyFactory; + + /** + * Creates a {@link DefaultDataAccessStrategy} + * + * @param sqlGeneratorSource must not be {@literal null}. + * @param context must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param operations must not be {@literal null}. + * @since 1.1 + */ + public DefaultDataAccessStrategy(SqlGeneratorSource sqlGeneratorSource, RelationalMappingContext context, + JdbcConverter converter, NamedParameterJdbcOperations operations, SqlParametersFactory sqlParametersFactory, + InsertStrategyFactory insertStrategyFactory) { + + Assert.notNull(sqlGeneratorSource, "SqlGeneratorSource must not be null"); + Assert.notNull(context, "RelationalMappingContext must not be null"); + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + Assert.notNull(sqlParametersFactory, "SqlParametersFactory must not be null"); + Assert.notNull(insertStrategyFactory, "InsertStrategyFactory must not be null"); + + this.sqlGeneratorSource = sqlGeneratorSource; + this.context = context; + this.converter = converter; + this.operations = operations; + this.sqlParametersFactory = sqlParametersFactory; + this.insertStrategyFactory = insertStrategyFactory; + } + + @Override + public Object insert(T instance, Class domainType, Identifier identifier, IdValueSource idValueSource) { + + SqlIdentifierParameterSource parameterSource = sqlParametersFactory.forInsert(instance, domainType, identifier, + idValueSource); + + String insertSql = sql(domainType).getInsert(parameterSource.getIdentifiers()); + + return insertStrategyFactory.insertStrategy(idValueSource, getIdColumn(domainType)).execute(insertSql, + parameterSource); + } + + @Override + public Object[] insert(List> insertSubjects, Class domainType, IdValueSource idValueSource) { + + Assert.notEmpty(insertSubjects, "Batch insert must contain at least one InsertSubject"); + SqlIdentifierParameterSource[] sqlParameterSources = insertSubjects.stream() + .map(insertSubject -> sqlParametersFactory.forInsert( // + insertSubject.getInstance(), // + domainType, // + insertSubject.getIdentifier(), // + idValueSource // + ) // + ) // + .toArray(SqlIdentifierParameterSource[]::new); + + String insertSql = sql(domainType).getInsert(sqlParameterSources[0].getIdentifiers()); + + return insertStrategyFactory.batchInsertStrategy(idValueSource, getIdColumn(domainType)).execute(insertSql, + sqlParameterSources); + } + + @Override + public boolean update(S instance, Class domainType) { + + SqlIdentifierParameterSource parameterSource = sqlParametersFactory.forUpdate(instance, domainType); + if (parameterSource.size() <= 1) { + return true; // returning true, because conceptually the one row was correctly updated + } + return operations.update(sql(domainType).getUpdate(), parameterSource) != 0; + } + + @Override + public boolean updateWithVersion(S instance, Class domainType, Number previousVersion) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(domainType); + + // Adjust update statement to set the new version and use the old version in where clause. + SqlIdentifierParameterSource parameterSource = sqlParametersFactory.forUpdate(instance, domainType); + parameterSource.addValue(VERSION_SQL_PARAMETER, previousVersion); + + int affectedRows = operations.update(sql(domainType).getUpdateWithVersion(), parameterSource); + + if (affectedRows == 0) { + + throw new OptimisticLockingFailureException( + String.format("Optimistic lock exception on saving entity of type %s", persistentEntity.getName())); + } + + return true; + } + + @Override + public void delete(Object id, Class domainType) { + + String deleteByIdSql = sql(domainType).getDeleteById(); + SqlParameterSource parameter = sqlParametersFactory.forQueryById(id, domainType, ID_SQL_PARAMETER); + + operations.update(deleteByIdSql, parameter); + } + + @Override + public void delete(Iterable ids, Class domainType) { + + String deleteByIdInSql = sql(domainType).getDeleteByIdIn(); + SqlParameterSource parameter = sqlParametersFactory.forQueryByIds(ids, domainType); + + operations.update(deleteByIdInSql, parameter); + } + + @Override + public void deleteWithVersion(Object id, Class domainType, Number previousVersion) { + + Assert.notNull(id, "Id must not be null"); + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(domainType); + + SqlIdentifierParameterSource parameterSource = sqlParametersFactory.forQueryById(id, domainType, ID_SQL_PARAMETER); + parameterSource.addValue(VERSION_SQL_PARAMETER, previousVersion); + int affectedRows = operations.update(sql(domainType).getDeleteByIdAndVersion(), parameterSource); + + if (affectedRows == 0) { + throw new OptimisticLockingFailureException( + String.format("Optimistic lock exception deleting entity of type %s", persistentEntity.getName())); + } + } + + @Override + public void delete(Object rootId, PersistentPropertyPath propertyPath) { + + RelationalPersistentEntity rootEntity = context.getRequiredPersistentEntity(getBaseType(propertyPath)); + + RelationalPersistentProperty referencingProperty = propertyPath.getLeafProperty(); + Assert.notNull(referencingProperty, "No property found matching the PropertyPath " + propertyPath); + + String delete = sql(rootEntity.getType()).createDeleteByPath(propertyPath); + + SqlIdentifierParameterSource parameters = sqlParametersFactory.forQueryById(rootId, rootEntity.getType(), + ROOT_ID_PARAMETER); + operations.update(delete, parameters); + } + + @Override + public void delete(Iterable rootIds, PersistentPropertyPath propertyPath) { + + RelationalPersistentEntity rootEntity = context.getRequiredPersistentEntity(getBaseType(propertyPath)); + + RelationalPersistentProperty referencingProperty = propertyPath.getLeafProperty(); + + Assert.notNull(referencingProperty, "No property found matching the PropertyPath " + propertyPath); + + String delete = sql(rootEntity.getType()).createDeleteInByPath(propertyPath); + + SqlIdentifierParameterSource parameters = sqlParametersFactory.forQueryByIds(rootIds, rootEntity.getType()); + operations.update(delete, parameters); + } + + @Override + public void deleteAll(Class domainType) { + operations.getJdbcOperations().update(sql(domainType).createDeleteAllSql(null)); + } + + @Override + public void deleteAll(PersistentPropertyPath propertyPath) { + + operations.getJdbcOperations().update(sql(getBaseType(propertyPath)).createDeleteAllSql(propertyPath)); + } + + @Override + public void acquireLockById(Object id, LockMode lockMode, Class domainType) { + + String acquireLockByIdSql = sql(domainType).getAcquireLockById(lockMode); + SqlIdentifierParameterSource parameter = sqlParametersFactory.forQueryById(id, domainType, ID_SQL_PARAMETER); + + operations.query(acquireLockByIdSql, parameter, ResultSet::next); + } + + @Override + public void acquireLockAll(LockMode lockMode, Class domainType) { + + String acquireLockAllSql = sql(domainType).getAcquireLockAll(lockMode); + operations.getJdbcOperations().query(acquireLockAllSql, ResultSet::next); + } + + @Override + public long count(Class domainType) { + + Long result = operations.getJdbcOperations().queryForObject(sql(domainType).getCount(), Long.class); + + Assert.notNull(result, "The result of a count query must not be null"); + + return result; + } + + @Override + public T findById(Object id, Class domainType) { + + String findOneSql = sql(domainType).getFindOne(); + SqlIdentifierParameterSource parameter = sqlParametersFactory.forQueryById(id, domainType, ID_SQL_PARAMETER); + + try { + return operations.queryForObject(findOneSql, parameter, getEntityRowMapper(domainType)); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public List findAll(Class domainType) { + return operations.query(sql(domainType).getFindAll(), getEntityRowMapper(domainType)); + } + + @Override + public Stream streamAll(Class domainType) { + return operations.queryForStream(sql(domainType).getFindAll(), new MapSqlParameterSource(), + getEntityRowMapper(domainType)); + } + + @Override + public List findAllById(Iterable ids, Class domainType) { + + if (!ids.iterator().hasNext()) { + return Collections.emptyList(); + } + + SqlParameterSource parameterSource = sqlParametersFactory.forQueryByIds(ids, domainType); + String findAllInListSql = sql(domainType).getFindAllInList(); + return operations.query(findAllInListSql, parameterSource, getEntityRowMapper(domainType)); + } + + @Override + public Stream streamAllByIds(Iterable ids, Class domainType) { + + if (!ids.iterator().hasNext()) { + return Stream.empty(); + } + + SqlParameterSource parameterSource = sqlParametersFactory.forQueryByIds(ids, domainType); + String findAllInListSql = sql(domainType).getFindAllInList(); + + return operations.queryForStream(findAllInListSql, parameterSource, getEntityRowMapper(domainType)); + } + + @Override + @SuppressWarnings("unchecked") + public List findAllByPath(Identifier identifier, + PersistentPropertyPath propertyPath) { + + Assert.notNull(identifier, "identifier must not be null"); + Assert.notNull(propertyPath, "propertyPath must not be null"); + + AggregatePath path = context.getAggregatePath(propertyPath); + Class actualType = path.getLeafEntity().getType(); + + String findAllByProperty = sql(actualType) // + .getFindAllByProperty(identifier, propertyPath); + + SqlParameterSource parameterSource = sqlParametersFactory.forQueryByIdentifier(identifier); + return operations.query(findAllByProperty, parameterSource, new RowMapper<>() { + + @Override + public Object mapRow(ResultSet rs, int rowNum) throws SQLException { + + if (path.isMap()) { + return getMapEntityRowMapper(path, identifier).mapRow(rs, rowNum); + } + + // Add row number as key for paths that do not define an identifier and that are contained in a collection. + Identifier identifierToUse = identifier; + if (!path.hasIdProperty() && path.isQualified()) { + + TableInfo tableInfo = path.getTableInfo(); + identifierToUse = identifierToUse.withPart(tableInfo.qualifierColumnInfo().name(), rowNum, Object.class); + } + + return getEntityRowMapper(path, identifierToUse).mapRow(rs, rowNum); + } + }); + } + + @Override + public boolean existsById(Object id, Class domainType) { + + String existsSql = sql(domainType).getExists(); + SqlParameterSource parameter = sqlParametersFactory.forQueryById(id, domainType, ID_SQL_PARAMETER); + + Boolean result = operations.queryForObject(existsSql, parameter, Boolean.class); + Assert.state(result != null, "The result of an exists query must not be null"); + + return result; + } + + @Override + public List findAll(Class domainType, Sort sort) { + return operations.query(sql(domainType).getFindAll(sort), getEntityRowMapper(domainType)); + } + + @Override + public Stream streamAll(Class domainType, Sort sort) { + return operations.queryForStream(sql(domainType).getFindAll(sort), new MapSqlParameterSource(), + getEntityRowMapper(domainType)); + } + + @Override + public List findAll(Class domainType, Pageable pageable) { + return operations.query(sql(domainType).getFindAll(pageable), getEntityRowMapper(domainType)); + } + + @Override + public Optional findOne(Query query, Class domainType) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + String sqlQuery = sql(domainType).selectByQuery(query, parameterSource); + + try { + return Optional.ofNullable(operations.queryForObject(sqlQuery, parameterSource, getEntityRowMapper(domainType))); + } catch (EmptyResultDataAccessException e) { + return Optional.empty(); + } + } + + @Override + public List findAll(Query query, Class domainType) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + String sqlQuery = sql(domainType).selectByQuery(query, parameterSource); + + return operations.query(sqlQuery, parameterSource, getEntityRowMapper(domainType)); + } + + @Override + public Stream streamAll(Query query, Class domainType) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + String sqlQuery = sql(domainType).selectByQuery(query, parameterSource); + + return operations.queryForStream(sqlQuery, parameterSource, getEntityRowMapper(domainType)); + } + + @Override + public List findAll(Query query, Class domainType, Pageable pageable) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + String sqlQuery = sql(domainType).selectByQuery(query, parameterSource, pageable); + + return operations.query(sqlQuery, parameterSource, getEntityRowMapper(domainType)); + } + + @Override + public boolean exists(Query query, Class domainType) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + String sqlQuery = sql(domainType).existsByQuery(query, parameterSource); + + Boolean result = operations.queryForObject(sqlQuery, parameterSource, Boolean.class); + + Assert.state(result != null, "The result of an exists query must not be null"); + + return result; + } + + @Override + public long count(Query query, Class domainType) { + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + String sqlQuery = sql(domainType).countByQuery(query, parameterSource); + + Long result = operations.queryForObject(sqlQuery, parameterSource, Long.class); + + Assert.state(result != null, "The result of a count query must not be null."); + + return result; + } + + private EntityRowMapper getEntityRowMapper(Class domainType) { + return new EntityRowMapper<>(getRequiredPersistentEntity(domainType), converter); + } + + private EntityRowMapper getEntityRowMapper(AggregatePath path, Identifier identifier) { + return new EntityRowMapper<>(path, converter, identifier); + } + + private RowMapper getMapEntityRowMapper(AggregatePath path, Identifier identifier) { + + AggregatePath.ColumnInfo qualifierColumnInfo = path.getTableInfo().qualifierColumnInfo(); + Assert.notNull(qualifierColumnInfo, () -> "Qualifier column must not be null for " + path); + SqlIdentifier keyColumn = qualifierColumnInfo.name(); + + return new MapEntityRowMapper<>(path, converter, identifier, keyColumn); + } + + @SuppressWarnings("unchecked") + private RelationalPersistentEntity getRequiredPersistentEntity(Class domainType) { + return (RelationalPersistentEntity) context.getRequiredPersistentEntity(domainType); + } + + private SqlGenerator sql(Class domainType) { + return sqlGeneratorSource.getSqlGenerator(domainType); + } + + @Nullable + private SqlIdentifier getIdColumn(Class domainType) { + + return Optional.ofNullable(context.getRequiredPersistentEntity(domainType).getIdProperty()) + .map(RelationalPersistentProperty::getColumnName).orElse(null); + } + + private Class getBaseType(PersistentPropertyPath propertyPath) { + + RelationalPersistentProperty baseProperty = propertyPath.getBaseProperty(); + + Assert.notNull(baseProperty, "The base property must not be null"); + + return baseProperty.getOwner().getType(); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DefaultJdbcTypeFactory.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DefaultJdbcTypeFactory.java new file mode 100644 index 0000000000..040367b06a --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DefaultJdbcTypeFactory.java @@ -0,0 +1,96 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.Array; +import java.sql.SQLType; + +import org.springframework.jdbc.core.ConnectionCallback; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.util.Assert; + +/** + * A {@link JdbcTypeFactory} that performs the conversion by utilizing + * {@link JdbcOperations#execute(ConnectionCallback)}. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 1.1 + */ +public class DefaultJdbcTypeFactory implements JdbcTypeFactory { + + private final JdbcOperations operations; + private final org.springframework.data.jdbc.core.dialect.JdbcArrayColumns arrayColumns; + + /** + * Creates a new {@link DefaultJdbcTypeFactory}. + * + * @param operations must not be {@literal null}. + */ + public DefaultJdbcTypeFactory(JdbcOperations operations) { + this(operations, org.springframework.data.jdbc.core.dialect.JdbcArrayColumns.DefaultSupport.INSTANCE); + } + + /** + * Creates a new {@link DefaultJdbcTypeFactory}. + * + * @param operations must not be {@literal null}. + * @since 2.3 + * @deprecated use + * {@link #DefaultJdbcTypeFactory(JdbcOperations, org.springframework.data.jdbc.core.dialect.JdbcArrayColumns)} + * instead. + */ + @Deprecated(forRemoval = true, since = "3.5") + public DefaultJdbcTypeFactory(JdbcOperations operations, JdbcArrayColumns arrayColumns) { + + Assert.notNull(operations, "JdbcOperations must not be null"); + Assert.notNull(arrayColumns, "JdbcArrayColumns must not be null"); + + this.operations = operations; + this.arrayColumns = arrayColumns; + } + + /** + * Creates a new {@link DefaultJdbcTypeFactory}. + * + * @param operations must not be {@literal null}. + * @since 3.5 + */ + public DefaultJdbcTypeFactory(JdbcOperations operations, + org.springframework.data.jdbc.core.dialect.JdbcArrayColumns arrayColumns) { + + Assert.notNull(operations, "JdbcOperations must not be null"); + Assert.notNull(arrayColumns, "JdbcArrayColumns must not be null"); + + this.operations = operations; + this.arrayColumns = arrayColumns; + } + + @Override + public Array createArray(Object[] value) { + + Assert.notNull(value, "Value must not be null"); + + Class componentType = arrayColumns.getArrayType(value.getClass()); + SQLType jdbcType = arrayColumns.getSqlType(componentType); + + Assert.notNull(jdbcType, () -> String.format("Couldn't determine SQLType for %s", componentType)); + String typeName = arrayColumns.getArrayTypeName(jdbcType); + + return operations.execute((ConnectionCallback) c -> c.createArrayOf(typeName, value)); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DelegatingDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DelegatingDataAccessStrategy.java new file mode 100644 index 0000000000..1bec8222f0 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/DelegatingDataAccessStrategy.java @@ -0,0 +1,225 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.util.Assert; + +/** + * Delegates all method calls to an instance set after construction. This is useful for {@link DataAccessStrategy}s with + * cyclic dependencies. + * + * @author Jens Schauder + * @author Tyler Van Gorder + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @author Diego Krupitza + * @author Sergey Korotaev + * @since 1.1 + */ +public class DelegatingDataAccessStrategy implements DataAccessStrategy { + + private DataAccessStrategy delegate; + + public DelegatingDataAccessStrategy() {} + + public DelegatingDataAccessStrategy(DataAccessStrategy delegate) { + + Assert.notNull(delegate, "DataAccessStrategy must not be null"); + this.delegate = delegate; + } + + @Override + public Object insert(T instance, Class domainType, Identifier identifier, IdValueSource idValueSource) { + return delegate.insert(instance, domainType, identifier, idValueSource); + } + + @Override + public Object[] insert(List> insertSubjects, Class domainType, IdValueSource idValueSource) { + return delegate.insert(insertSubjects, domainType, idValueSource); + } + + @Override + public boolean update(S instance, Class domainType) { + return delegate.update(instance, domainType); + } + + @Override + public boolean updateWithVersion(S instance, Class domainType, Number nextVersion) { + return delegate.updateWithVersion(instance, domainType, nextVersion); + + } + + @Override + public void delete(Object rootId, PersistentPropertyPath propertyPath) { + delegate.delete(rootId, propertyPath); + } + + @Override + public void delete(Iterable rootIds, PersistentPropertyPath propertyPath) { + delegate.delete(rootIds, propertyPath); + } + + @Override + public void delete(Object id, Class domainType) { + delegate.delete(id, domainType); + } + + @Override + public void delete(Iterable ids, Class domainType) { + delegate.delete(ids, domainType); + } + + @Override + public void deleteWithVersion(Object id, Class domainType, Number previousVersion) { + delegate.deleteWithVersion(id, domainType, previousVersion); + } + + @Override + public void deleteAll(Class domainType) { + delegate.deleteAll(domainType); + } + + @Override + public void deleteAll(PersistentPropertyPath propertyPath) { + delegate.deleteAll(propertyPath); + } + + @Override + public void acquireLockById(Object id, LockMode lockMode, Class domainType) { + delegate.acquireLockById(id, lockMode, domainType); + } + + @Override + public void acquireLockAll(LockMode lockMode, Class domainType) { + delegate.acquireLockAll(lockMode, domainType); + } + + @Override + public long count(Class domainType) { + return delegate.count(domainType); + } + + @Override + public T findById(Object id, Class domainType) { + + Assert.notNull(delegate, "Delegate is null"); + + return delegate.findById(id, domainType); + } + + @Override + public Iterable findAll(Class domainType) { + return delegate.findAll(domainType); + } + + @Override + public Stream streamAll(Class domainType) { + return delegate.streamAll(domainType); + } + + @Override + public Iterable findAllById(Iterable ids, Class domainType) { + return delegate.findAllById(ids, domainType); + } + + @Override + public Stream streamAllByIds(Iterable ids, Class domainType) { + return delegate.streamAllByIds(ids, domainType); + } + + @Override + public Iterable findAllByPath(Identifier identifier, + PersistentPropertyPath path) { + return delegate.findAllByPath(identifier, path); + } + + @Override + public boolean existsById(Object id, Class domainType) { + return delegate.existsById(id, domainType); + } + + @Override + public Iterable findAll(Class domainType, Sort sort) { + return delegate.findAll(domainType, sort); + } + + @Override + public Stream streamAll(Class domainType, Sort sort) { + return delegate.streamAll(domainType, sort); + } + + @Override + public Iterable findAll(Class domainType, Pageable pageable) { + return delegate.findAll(domainType, pageable); + } + + @Override + public Optional findOne(Query query, Class domainType) { + return delegate.findOne(query, domainType); + } + + @Override + public Iterable findAll(Query query, Class domainType) { + return delegate.findAll(query, domainType); + } + + @Override + public Stream streamAll(Query query, Class domainType) { + return delegate.streamAll(query, domainType); + } + + @Override + public Iterable findAll(Query query, Class domainType, Pageable pageable) { + return delegate.findAll(query, domainType, pageable); + } + + @Override + public boolean exists(Query query, Class domainType) { + return delegate.exists(query, domainType); + } + + @Override + public long count(Query query, Class domainType) { + return delegate.count(query, domainType); + } + + /** + * Must be called exactly once before calling any of the other methods. + * + * @param delegate Must not be {@literal null} + * @deprecated since 3.0, use {@link #DelegatingDataAccessStrategy(DataAccessStrategy)} to avoid mutable state. + */ + @Deprecated(since = "3.0", forRemoval = true) + public void setDelegate(DataAccessStrategy delegate) { + + Assert.isNull(this.delegate, "The delegate must be set exactly once"); + Assert.notNull(delegate, "The delegate must not be set to null"); + + this.delegate = delegate; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/EntityRowMapper.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/EntityRowMapper.java new file mode 100644 index 0000000000..8ecaa161b3 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/EntityRowMapper.java @@ -0,0 +1,68 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; +import org.springframework.jdbc.core.RowMapper; + +/** + * Maps a {@link ResultSet} to an entity of type {@code T}, including entities referenced. This {@link RowMapper} might + * trigger additional SQL statements in order to load other members of the same aggregate. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Mark Paluch + * @author Maciej Walkowiak + * @author Bastian Wilhelm + * @since 1.1 + */ +public class EntityRowMapper implements RowMapper { + + private final TypeInformation typeInformation; + private final JdbcConverter converter; + private final Identifier identifier; + + private EntityRowMapper(TypeInformation typeInformation, JdbcConverter converter, Identifier identifier) { + + this.typeInformation = typeInformation; + this.converter = converter; + this.identifier = identifier; + } + + @SuppressWarnings("unchecked") + public EntityRowMapper(AggregatePath path, JdbcConverter converter, Identifier identifier) { + this(((RelationalPersistentEntity) path.getRequiredLeafEntity()).getTypeInformation(), converter, identifier); + } + + public EntityRowMapper(RelationalPersistentEntity entity, JdbcConverter converter) { + this(entity.getTypeInformation(), converter, Identifier.empty()); + } + + @Override + public T mapRow(ResultSet resultSet, int rowNumber) throws SQLException { + + RowDocument document = RowDocumentResultSetExtractor.toRowDocument(resultSet); + + return converter.readAndResolve(typeInformation, document, identifier); + } + +} diff --git a/src/main/java/org/springframework/data/jdbc/core/FunctionCollector.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/FunctionCollector.java similarity index 86% rename from src/main/java/org/springframework/data/jdbc/core/FunctionCollector.java rename to spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/FunctionCollector.java index 3d33b88850..d218e666c1 100644 --- a/src/main/java/org/springframework/data/jdbc/core/FunctionCollector.java +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/FunctionCollector.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.core; +package org.springframework.data.jdbc.core.convert; import java.util.Collections; import java.util.LinkedList; @@ -35,7 +35,7 @@ * * @author Jens Schauder */ -class FunctionCollector implements Collector.ResultOrException, T> { +class FunctionCollector implements Collector, T> { private final Function method; @@ -44,12 +44,12 @@ class FunctionCollector implements Collector supplier() { + public Supplier> supplier() { return ResultOrException::new; } @Override - public BiConsumer accumulator() { + public BiConsumer, DataAccessStrategy> accumulator() { return (roe, das) -> { @@ -65,7 +65,7 @@ public BiConsumer accumulator() { } @Override - public BinaryOperator combiner() { + public BinaryOperator> combiner() { return (roe1, roe2) -> { throw new UnsupportedOperationException("Can't combine method calls"); @@ -73,7 +73,7 @@ public BinaryOperator combiner() { } @Override - public Function finisher() { + public Function, T> finisher() { return roe -> { @@ -94,7 +94,7 @@ public Set characteristics() { * Stores intermediate results. I.e. a list of exceptions caught so far, any actual result and the fact, if there * actually is an result. */ - class ResultOrException { + static class ResultOrException { private T result; private final List exceptions = new LinkedList<>(); diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingBatchInsertStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingBatchInsertStrategy.java new file mode 100644 index 0000000000..78337bf52c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingBatchInsertStrategy.java @@ -0,0 +1,102 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.IdGeneration; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.jdbc.support.GeneratedKeyHolder; +import org.springframework.lang.Nullable; + +/** + * A {@link BatchInsertStrategy} that expects ids to be generated from the batch insert. When the {@link Dialect} does + * not support id generation for batch operations, this implementation falls back to performing the inserts serially. + * + * @author Chirag Tailor + * @author Kurt Niemi + * @author Jens Schauder + * @since 2.4 + */ +class IdGeneratingBatchInsertStrategy implements BatchInsertStrategy { + + private final InsertStrategy insertStrategy; + private final Dialect dialect; + private final NamedParameterJdbcOperations jdbcOperations; + private final SqlIdentifier idColumn; + + IdGeneratingBatchInsertStrategy(InsertStrategy insertStrategy, Dialect dialect, + NamedParameterJdbcOperations jdbcOperations, @Nullable SqlIdentifier idColumn) { + + this.insertStrategy = insertStrategy; + this.dialect = dialect; + this.jdbcOperations = jdbcOperations; + + this.idColumn = idColumn; + } + + @Override + public Object[] execute(String sql, SqlParameterSource[] sqlParameterSources) { + + if (!dialect.getIdGeneration().supportedForBatchOperations()) { + + return Arrays.stream(sqlParameterSources) + .map(sqlParameterSource -> insertStrategy.execute(sql, sqlParameterSource)).toArray(); + } + + GeneratedKeyHolder holder = new GeneratedKeyHolder(); + IdGeneration idGeneration = dialect.getIdGeneration(); + if (idGeneration.driverRequiresKeyColumnNames()) { + + String[] keyColumnNames = getKeyColumnNames(idGeneration); + if (keyColumnNames.length == 0) { + jdbcOperations.batchUpdate(sql, sqlParameterSources, holder); + } else { + jdbcOperations.batchUpdate(sql, sqlParameterSources, holder, keyColumnNames); + } + } else { + jdbcOperations.batchUpdate(sql, sqlParameterSources, holder); + } + Object[] ids = new Object[sqlParameterSources.length]; + List> keyList = holder.getKeyList(); + for (int i = 0; i < keyList.size(); i++) { + + Map keys = keyList.get(i); + if (keys.size() > 1) { + if (idColumn != null) { + ids[i] = keys.get(idColumn.getReference()); + } + } else { + ids[i] = keys.entrySet().stream().findFirst() // + .map(Map.Entry::getValue) // + .orElseThrow(() -> new IllegalStateException("KeyHolder contains an empty key list")); + } + } + return ids; + } + + private String[] getKeyColumnNames(IdGeneration idGeneration) { + + return Optional.ofNullable(idColumn).map(idColumn -> new String[] { idGeneration.getKeyColumnName(idColumn) }) + .orElse(new String[0]); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingEntityCallback.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingEntityCallback.java new file mode 100644 index 0000000000..38d0338ba8 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingEntityCallback.java @@ -0,0 +1,71 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.mapping.event.BeforeSaveCallback; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.util.Assert; + +/** + * Callback for generating identifier values through a database sequence. + * + * @author Mikhail Polivakha + * @author Mark Paluch + * @since 3.5 + */ +public class IdGeneratingEntityCallback implements BeforeSaveCallback { + + private final MappingContext, ? extends RelationalPersistentProperty> context; + private final SequenceEntityCallbackDelegate delegate; + + public IdGeneratingEntityCallback( + MappingContext, ? extends RelationalPersistentProperty> context, Dialect dialect, + NamedParameterJdbcOperations operations) { + + this.context = context; + this.delegate = new SequenceEntityCallbackDelegate(dialect, operations); + } + + @Override + public Object onBeforeSave(Object aggregate, MutableAggregateChange aggregateChange) { + + Assert.notNull(aggregate, "Aggregate must not be null"); + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(aggregate.getClass()); + + if (!entity.hasIdProperty()) { + return aggregate; + } + + RelationalPersistentProperty property = entity.getRequiredIdProperty(); + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(aggregate); + + if (!entity.isNew(aggregate) || delegate.hasValue(property, accessor) || !property.hasSequence()) { + return aggregate; + } + + delegate.generateSequenceValue(property, accessor); + + return accessor.getBean(); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingInsertStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingInsertStrategy.java new file mode 100644 index 0000000000..47b2f9d084 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IdGeneratingInsertStrategy.java @@ -0,0 +1,91 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Map; +import java.util.Optional; + +import org.springframework.dao.DataRetrievalFailureException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.IdGeneration; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.jdbc.support.GeneratedKeyHolder; +import org.springframework.jdbc.support.KeyHolder; +import org.springframework.lang.Nullable; + +/** + * An {@link InsertStrategy} that expects an id to be generated from the insert. + * + * @author Chirag Tailor + * @author Kurt Niemi + * @since 2.4 + */ +class IdGeneratingInsertStrategy implements InsertStrategy { + + private final Dialect dialect; + private final NamedParameterJdbcOperations jdbcOperations; + private final SqlIdentifier idColumn; + + IdGeneratingInsertStrategy(Dialect dialect, NamedParameterJdbcOperations jdbcOperations, + @Nullable SqlIdentifier idColumn) { + this.dialect = dialect; + this.jdbcOperations = jdbcOperations; + this.idColumn = idColumn; + } + + @Override + public Object execute(String sql, SqlParameterSource sqlParameterSource) { + + KeyHolder holder = new GeneratedKeyHolder(); + + IdGeneration idGeneration = dialect.getIdGeneration(); + + if (idGeneration.driverRequiresKeyColumnNames()) { + + String[] keyColumnNames = getKeyColumnNames(idGeneration); + if (keyColumnNames.length == 0) { + jdbcOperations.update(sql, sqlParameterSource, holder); + } else { + jdbcOperations.update(sql, sqlParameterSource, holder, keyColumnNames); + } + } else { + jdbcOperations.update(sql, sqlParameterSource, holder); + } + + try { + // MySQL just returns one value with a special name + return holder.getKey(); + } catch (DataRetrievalFailureException | InvalidDataAccessApiUsageException e) { + // Postgres returns a value for each column + // MS SQL Server returns a value that might be null. + + Map keys = holder.getKeys(); + if (keys == null || idColumn == null) { + return null; + } + + return keys.get(idColumn.getReference()); + } + } + + private String[] getKeyColumnNames(IdGeneration idGeneration) { + return Optional.ofNullable(idColumn).map(idColumn -> new String[] { idGeneration.getKeyColumnName(idColumn) }) + .orElse(new String[0]); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/Identifier.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/Identifier.java new file mode 100644 index 0000000000..5f9284a54b --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/Identifier.java @@ -0,0 +1,311 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * {@literal Identifier} represents a composite id of an entity that may be composed of one or many parts. Parts or all + * of the entity might not have a representation as a property in the entity but might only be derived from other + * entities referencing it. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 1.1 + */ +public final class Identifier { + + private static final Identifier EMPTY = new Identifier(Collections.emptyList()); + + private final List parts; + + private Identifier(List parts) { + this.parts = parts; + } + + /** + * Returns an empty {@link Identifier}. + * + * @return an empty {@link Identifier}. + */ + public static Identifier empty() { + return EMPTY; + } + + /** + * Creates an {@link Identifier} from {@code name}, {@code value}, and a {@link Class target type}. + * + * @param name must not be {@literal null}. + * @param value must not be {@literal null}. + * @param targetType must not be {@literal null}. + * @return the {@link Identifier} for {@code name}, {@code value}, and a {@link Class target type}. + */ + public static Identifier of(SqlIdentifier name, Object value, Class targetType) { + + Assert.notNull(name, "Name must not be null"); + Assert.notNull(value, "Value must not be null"); + Assert.notNull(targetType, "Target type must not be null"); + + return new Identifier(Collections.singletonList(new SingleIdentifierValue(name, value, targetType))); + } + + /** + * Creates an {@link Identifier} from a {@link Map} of name to value tuples. + * + * @param map must not be {@literal null}. + * @return the {@link Identifier} from a {@link Map} of name to value tuples. + */ + public static Identifier from(Map map) { + + Assert.notNull(map, "Map must not be null"); + + if (map.isEmpty()) { + return empty(); + } + + List values = new ArrayList<>(); + + map.forEach((k, v) -> { + + Assert.notNull(v, "The source map for identifier must not contain null values"); + + values.add(new SingleIdentifierValue(k, v, ClassUtils.getUserClass(v))); + }); + + return new Identifier(Collections.unmodifiableList(values)); + } + + /** + * Creates a new {@link Identifier} from the current instance and sets the value for {@code key}. Existing key + * definitions for {@code name} are overwritten if they already exist. + * + * @param name must not be {@literal null} or empty. + * @param value + * @param targetType must not be {@literal null}. + * @return the {@link Identifier} containing all existing keys and the key part for {@code name}, {@code value}, and a + * {@link Class target type}. + */ + public Identifier withPart(SqlIdentifier name, Object value, Class targetType) { + + Assert.notNull(name, "Name must not be null"); + Assert.notNull(targetType, "Target type must not be null"); + + boolean overwritten = false; + List keys = new ArrayList<>(this.parts.size() + 1); + + for (SingleIdentifierValue singleValue : this.parts) { + + if (singleValue.getName().equals(name)) { + overwritten = true; + keys.add(new SingleIdentifierValue(singleValue.getName(), value, targetType)); + } else { + keys.add(singleValue); + } + } + + if (!overwritten) { + keys.add(new SingleIdentifierValue(name, value, targetType)); + } + + return new Identifier(Collections.unmodifiableList(keys)); + } + + /** + * Returns a {@link Map} containing the identifier name to value tuples. + * + * @return a {@link Map} containing the identifier name to value tuples. + */ + public Map toMap() { + + Map result = new StringKeyedLinkedHashMap<>(getParts().size()); + forEach((name, value, type) -> result.put(name, value)); + return result; + } + + /** + * @return the {@link SingleIdentifierValue key parts}. + */ + public Collection getParts() { + return this.parts; + } + + /** + * Performs the given action for each element of the {@link Identifier} until all elements have been processed or the + * action throws an exception. Unless otherwise specified by the implementing class, actions are performed in the + * order of iteration (if an iteration order is specified). Exceptions thrown by the action are relayed to the caller. + * + * @param consumer the action, must not be {@literal null}. + */ + public void forEach(IdentifierConsumer consumer) { + + Assert.notNull(consumer, "IdentifierConsumer must not be null"); + + getParts().forEach(it -> consumer.accept(it.name, it.value, it.targetType)); + } + + /** + * Returns the number of key parts in this collection. + * + * @return the number of key parts in this collection. + */ + public int size() { + return this.parts.size(); + } + + @Nullable + public Object get(SqlIdentifier columnName) { + + for (SingleIdentifierValue part : parts) { + if (part.getName().equals(columnName)) { + return part.getValue(); + } + } + + return null; + } + + /** + * A single value of an Identifier consisting of the column name, the value and the target type which is to be used to + * store the element in the database. + * + * @author Jens Schauder + */ + static final class SingleIdentifierValue { + + private final SqlIdentifier name; + private final Object value; + private final Class targetType; + + private SingleIdentifierValue(SqlIdentifier name, Object value, Class targetType) { + + Assert.notNull(name, "Name must not be null"); + Assert.notNull(value, "Name must not be null"); + Assert.notNull(targetType, "TargetType must not be null"); + + this.name = name; + this.value = value; + this.targetType = targetType; + } + + public SqlIdentifier getName() { + return this.name; + } + + public Object getValue() { + return this.value; + } + + public Class getTargetType() { + return this.targetType; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + SingleIdentifierValue that = (SingleIdentifierValue) o; + return name.equals(that.name) && value.equals(that.value) && targetType.equals(that.targetType); + } + + @Override + public int hashCode() { + return Objects.hash(name, value, targetType); + } + + @Override + public String toString() { + + return "SingleIdentifierValue{" + "name=" + name + ", value=" + value + ", targetType=" + targetType + '}'; + } + } + + /** + * Represents an operation that accepts identifier key parts (name, value and {@link Class target type}) defining a + * contract to consume {@link Identifier} values. + * + * @author Mark Paluch + */ + @FunctionalInterface + public interface IdentifierConsumer { + + /** + * Performs this operation on the given arguments. + * + * @param name + * @param value + * @param targetType + */ + void accept(SqlIdentifier name, Object value, Class targetType); + } + + private static class StringKeyedLinkedHashMap extends LinkedHashMap { + + public StringKeyedLinkedHashMap(int initialCapacity) { + super(initialCapacity); + } + + @Override + public V get(Object key) { + + if (key instanceof String) { + + for (SqlIdentifier sqlIdentifier : keySet()) { + if (sqlIdentifier.getReference().equals(key)) { + return super.get(sqlIdentifier); + } + } + } + + return super.get(key); + } + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Identifier that = (Identifier) o; + return Objects.equals(parts, that.parts); + } + + @Override + public int hashCode() { + return Objects.hash(parts); + } + + @Override + public String toString() { + + return "Identifier{" + "parts=" + parts + '}'; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertStrategy.java new file mode 100644 index 0000000000..0c618e2466 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertStrategy.java @@ -0,0 +1,38 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.Nullable; + +/** + * Strategy for executing an insert. + * + * @author Chirag Tailor + * @since 2.4 + */ +interface InsertStrategy { + + /** + * @param sql the insert sql. Must not be {@code null}. + * @param sqlParameterSource the sql parameters for the record to be inserted. Must not be {@code null}. + * @return the id corresponding to the record that was inserted, if one was generated. If an id was not generated, + * this will be {@code null}. + * @since 2.4 + */ + @Nullable + Object execute(String sql, SqlParameterSource sqlParameterSource); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertStrategyFactory.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertStrategyFactory.java new file mode 100644 index 0000000000..a245e5235f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertStrategyFactory.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.Nullable; + +/** + * Factory which selects and builds the appropriate {@link InsertStrategy} or {@link BatchInsertStrategy} based on + * whether the insert is expected to generate ids. + * + * @author Chirag Tailor + * @author Jens Schauder + * @since 2.4 + */ +public class InsertStrategyFactory { + + private final NamedParameterJdbcOperations jdbcOperations; + private final Dialect dialect; + + public InsertStrategyFactory(NamedParameterJdbcOperations jdbcOperations, Dialect dialect) { + + this.jdbcOperations = jdbcOperations; + this.dialect = dialect; + } + + /** + * @param idValueSource the {@link IdValueSource} for the insert. + * @param idColumn the identifier for the id, if an id is expected to be generated. May be {@code null}. + * @return the {@link InsertStrategy} to be used for the insert. + * @since 2.4 + */ + InsertStrategy insertStrategy(IdValueSource idValueSource, @Nullable SqlIdentifier idColumn) { + + if (IdValueSource.GENERATED.equals(idValueSource)) { + return new IdGeneratingInsertStrategy(dialect, jdbcOperations, idColumn); + } + return new DefaultInsertStrategy(jdbcOperations); + } + + /** + * @param idValueSource the {@link IdValueSource} for the insert. + * @param idColumn the identifier for the id, if an ids are expected to be generated. May be {@code null}. + * @return the {@link BatchInsertStrategy} to be used for the batch insert. + * @since 2.4 + */ + BatchInsertStrategy batchInsertStrategy(IdValueSource idValueSource, @Nullable SqlIdentifier idColumn) { + + if (IdValueSource.GENERATED.equals(idValueSource)) { + return new IdGeneratingBatchInsertStrategy(new IdGeneratingInsertStrategy(dialect, jdbcOperations, idColumn), + dialect, jdbcOperations, idColumn); + } + return new DefaultBatchInsertStrategy(jdbcOperations); + } + + private static class DefaultInsertStrategy implements InsertStrategy { + + private final NamedParameterJdbcOperations jdbcOperations; + + DefaultInsertStrategy(NamedParameterJdbcOperations jdbcOperations) { + this.jdbcOperations = jdbcOperations; + } + + @Override + public Object execute(String sql, SqlParameterSource sqlParameterSource) { + + jdbcOperations.update(sql, sqlParameterSource); + return null; + } + } + + private static class DefaultBatchInsertStrategy implements BatchInsertStrategy { + + private final NamedParameterJdbcOperations jdbcOperations; + + DefaultBatchInsertStrategy(NamedParameterJdbcOperations jdbcOperations) { + this.jdbcOperations = jdbcOperations; + } + + @Override + public Object[] execute(String sql, SqlParameterSource[] sqlParameterSources) { + + jdbcOperations.batchUpdate(sql, sqlParameterSources); + return new Object[sqlParameterSources.length]; + } + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertSubject.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertSubject.java new file mode 100644 index 0000000000..6db9fcf7fd --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/InsertSubject.java @@ -0,0 +1,74 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Objects; + +import org.springframework.lang.Nullable; + +/** + * The subject of an insert, described by the entity instance and its {@link Identifier}, where identifier contains + * information about data that needs to be considered for the insert but which is not part of the entity. Namely + * references back to a parent entity and key/index columns for entities that are stored in a {@link java.util.Map} or + * {@link java.util.List}. + * + * @author Chirag Tailor + * @since 2.4 + */ +public final class InsertSubject { + + private final T instance; + private final Identifier identifier; + + public static InsertSubject describedBy(T instance, Identifier identifier) { + return new InsertSubject<>(instance, identifier); + } + + private InsertSubject(T instance, Identifier identifier) { + + this.instance = instance; + this.identifier = identifier; + } + + public T getInstance() { + return instance; + } + + public Identifier getIdentifier() { + return identifier; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + InsertSubject that = (InsertSubject) o; + return Objects.equals(instance, that.instance) && Objects.equals(identifier, that.identifier); + } + + @Override + public int hashCode() { + return Objects.hash(instance, identifier); + } + + @Override + public String toString() { + return "InsertSubject{" + "instance=" + instance + ", identifier=" + identifier + '}'; + } +} diff --git a/src/main/java/org/springframework/data/jdbc/core/IterableOfEntryToMapConverter.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IterableOfEntryToMapConverter.java similarity index 69% rename from src/main/java/org/springframework/data/jdbc/core/IterableOfEntryToMapConverter.java rename to spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IterableOfEntryToMapConverter.java index d53a69574b..f5203db269 100644 --- a/src/main/java/org/springframework/data/jdbc/core/IterableOfEntryToMapConverter.java +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/IterableOfEntryToMapConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.core; +package org.springframework.data.jdbc.core.convert; + +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.ConditionalConverter; @@ -21,31 +25,28 @@ import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; - /** * A converter for creating a {@link Map} from an {@link Iterable}. * * @author Jens Schauder */ -class IterableOfEntryToMapConverter implements ConditionalConverter, Converter { +class IterableOfEntryToMapConverter implements ConditionalConverter, Converter, Map> { + @SuppressWarnings("unchecked") @Nullable @Override - public Map convert(Iterable source) { + public Map convert(Iterable source) { Map result = new HashMap(); source.forEach(element -> { - if (!(element instanceof Entry)) { - throw new IllegalArgumentException(String.format("Cannot convert %s to Map.Entry", element.getClass())); + if (element instanceof Entry entry) { + result.put(entry.getKey(), entry.getValue()); + return; } - Entry entry = (Entry) element; - result.put(entry.getKey(), entry.getValue()); + throw new IllegalArgumentException(String.format("Cannot convert %s to Map.Entry", element.getClass())); }); return result; @@ -53,17 +54,17 @@ public Map convert(Iterable source) { /** * Tries to determine if the {@literal sourceType} can be converted to a {@link Map}. If this can not be determined, - * because the sourceTyppe does not contain information about the element type it returns {@literal true}. + * because the sourceType does not contain information about the element type it returns {@literal true}. * * @param sourceType {@link TypeDescriptor} to convert from. * @param targetType {@link TypeDescriptor} to convert to. - * @return + * @return if the sourceType can be converted to a Map. */ @Override public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) { - Assert.notNull(sourceType, "Source type must not be null."); - Assert.notNull(targetType, "Target type must not be null."); + Assert.notNull(sourceType, "Source type must not be null"); + Assert.notNull(targetType, "Target type must not be null"); if (!sourceType.isAssignableTo(TypeDescriptor.valueOf(Iterable.class))) return false; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcArrayColumns.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcArrayColumns.java new file mode 100644 index 0000000000..5f68fbb735 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcArrayColumns.java @@ -0,0 +1,94 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.SQLType; + +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.relational.core.dialect.ArrayColumns; + +/** + * {@link org.springframework.data.relational.core.dialect.ArrayColumns} that offer JDBC-specific functionality. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 2.3 + * @deprecated since 3.5, replacement moved to {@link org.springframework.data.jdbc.core.dialect.JdbcArrayColumns}. + */ +@Deprecated(forRemoval = true) +public interface JdbcArrayColumns extends org.springframework.data.jdbc.core.dialect.JdbcArrayColumns { + + @Override + default Class getArrayType(Class userType) { + return ArrayColumns.unwrapComponentType(userType); + } + + /** + * Determine the {@link SQLType} for a given {@link Class array component type}. + * + * @param componentType component type of the array. + * @return the dialect-supported array type. + * @since 3.1.3 + */ + default SQLType getSqlType(Class componentType) { + return JdbcUtil.targetSqlTypeFor(getArrayType(componentType)); + } + + /** + * The appropriate SQL type as a String which should be used to represent the given {@link SQLType} in an + * {@link java.sql.Array}. Defaults to the name of the argument. + * + * @param jdbcType the {@link SQLType} value representing the type that should be stored in the + * {@link java.sql.Array}. Must not be {@literal null}. + * @return the appropriate SQL type as a String which should be used to represent the given {@link SQLType} in an + * {@link java.sql.Array}. Guaranteed to be not {@literal null}. + */ + default String getArrayTypeName(SQLType jdbcType) { + return jdbcType.getName(); + } + + /** + * Default {@link ArrayColumns} implementation for dialects that do not support array-typed columns. + */ + enum Unsupported implements JdbcArrayColumns { + + INSTANCE; + + @Override + public boolean isSupported() { + return false; + } + + @Override + public String getArrayTypeName(SQLType jdbcType) { + throw new UnsupportedOperationException("Array types not supported"); + } + + } + + /** + * Default {@link ArrayColumns} implementation for dialects that do not support array-typed columns. + */ + enum DefaultSupport implements JdbcArrayColumns { + + INSTANCE; + + @Override + public boolean isSupported() { + return true; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcBackReferencePropertyValueProvider.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcBackReferencePropertyValueProvider.java new file mode 100644 index 0000000000..34f9e88de5 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcBackReferencePropertyValueProvider.java @@ -0,0 +1,54 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * {@link PropertyValueProvider} obtaining values from a {@link ResultSetAccessor}. For a given id property it provides + * the value in the resultset under which other entities refer back to it. + * + * @author Jens Schauder + * @author Kurt Niemi + * @author Mikhail Polivakha + * @since 2.0 + */ +class JdbcBackReferencePropertyValueProvider implements PropertyValueProvider { + + private final AggregatePath basePath; + private final ResultSetAccessor resultSet; + + /** + * @param basePath path from the aggregate root relative to which all properties get resolved. + * @param resultSet the {@link ResultSetAccessor} from which to obtain the actual values. + */ + JdbcBackReferencePropertyValueProvider(AggregatePath basePath, ResultSetAccessor resultSet) { + + this.resultSet = resultSet; + this.basePath = basePath; + } + + @Override + public T getPropertyValue(RelationalPersistentProperty property) { + return (T) resultSet.getObject(basePath.append(property).getTableInfo().reverseColumnInfo().alias().getReference()); + } + + public JdbcBackReferencePropertyValueProvider extendBy(RelationalPersistentProperty property) { + return new JdbcBackReferencePropertyValueProvider(basePath.append(property), resultSet); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcColumnTypes.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcColumnTypes.java new file mode 100644 index 0000000000..fa74b3b94f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcColumnTypes.java @@ -0,0 +1,62 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.time.ZonedDateTime; +import java.time.temporal.Temporal; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.util.ClassUtils; + +/** + * Utility that determines the necessary type conversions between Java types used in the domain model and types + * compatible with JDBC drivers. + * + * @author Jens Schauder + * @since 2.0 + */ +public enum JdbcColumnTypes { + + INSTANCE { + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public Class resolvePrimitiveType(Class type) { + + return javaToDbType.entrySet().stream() // + .filter(e -> e.getKey().isAssignableFrom(type)) // + .map(e -> (Class) e.getValue()) // + .findFirst() // + .orElseGet(() -> (Class) ClassUtils.resolvePrimitiveIfNecessary(type)); + } + }; + + private static final Map, Class> javaToDbType = new LinkedHashMap<>(); + + static { + + javaToDbType.put(Enum.class, String.class); + javaToDbType.put(ZonedDateTime.class, String.class); + javaToDbType.put(OffsetDateTime.class, OffsetDateTime.class); + javaToDbType.put(LocalDateTime.class, LocalDateTime.class); + javaToDbType.put(Temporal.class, Timestamp.class); + } + + public abstract Class resolvePrimitiveType(Class type); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcConverter.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcConverter.java new file mode 100644 index 0000000000..3e73a73cf7 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcConverter.java @@ -0,0 +1,131 @@ +/* +* Copyright 2019-2025 the original author or authors. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package org.springframework.data.jdbc.core.convert; + +import java.sql.SQLType; + +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.relational.core.conversion.RelationalConverter; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * A {@link JdbcConverter} is responsible for converting for values to the native relational representation and vice + * versa. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 1.1 + */ +public interface JdbcConverter extends RelationalConverter { + + /** + * Convert a property value into a {@link JdbcValue} that contains the converted value and information how to bind it + * to JDBC parameters. + * + * @param value a value as it is used in the object model. May be {@code null}. + * @param type {@literal Class} into which the value is to be converted. Must not be {@code null}. + * @param sqlType the {@link SQLType} to be used if non is specified by a converter. + * @return The converted value wrapped in a {@link JdbcValue}. Guaranteed to be not {@literal null}. + * @since 2.4 + */ + default JdbcValue writeJdbcValue(@Nullable Object value, Class type, SQLType sqlType) { + return writeJdbcValue(value, TypeInformation.of(type), sqlType); + } + + /** + * Convert a property value into a {@link JdbcValue} that contains the converted value and information how to bind it + * to JDBC parameters. + * + * @param value a value as it is used in the object model. May be {@code null}. + * @param type {@link TypeInformation} into which the value is to be converted. Must not be {@code null}. + * @param sqlType the {@link SQLType} to be used if non is specified by a converter. + * @return The converted value wrapped in a {@link JdbcValue}. Guaranteed to be not {@literal null}. + * @since 3.2.6 + */ + JdbcValue writeJdbcValue(@Nullable Object value, TypeInformation type, SQLType sqlType); + + /** + * Read a {@link RowDocument} into the requested {@link Class aggregate type} and resolve references by looking these + * up from {@link RelationResolver}. + * + * @param type target aggregate type. + * @param source source {@link RowDocument}. + * @return the converted object. + * @param aggregate type. + * @since 3.2 + * @see #read(Class, RowDocument) + */ + default R readAndResolve(Class type, RowDocument source) { + return readAndResolve(type, source, Identifier.empty()); + } + + /** + * Read a {@link RowDocument} into the requested {@link Class aggregate type} and resolve references by looking these + * up from {@link RelationResolver}. + * + * @param type target aggregate type. + * @param source source {@link RowDocument}. + * @param identifier identifier chain. + * @return the converted object. + * @param aggregate type. + * @since 3.2 + * @see #read(Class, RowDocument) + */ + default R readAndResolve(Class type, RowDocument source, Identifier identifier) { + return readAndResolve(TypeInformation.of(type), source, identifier); + } + + /** + * Read a {@link RowDocument} into the requested {@link TypeInformation aggregate type} and resolve references by + * looking these up from {@link RelationResolver}. + * + * @param type target aggregate type. + * @param source source {@link RowDocument}. + * @param identifier identifier chain. + * @return the converted object. + * @param aggregate type. + * @since 3.2.6 + * @see #read(Class, RowDocument) + */ + R readAndResolve(TypeInformation type, RowDocument source, Identifier identifier); + + /** + * The type to be used to store this property in the database. Multidimensional arrays are unwrapped to reflect a + * top-level array type (e.g. {@code String[][]} returns {@code String[]}). + * + * @return a {@link Class} that is suitable for usage with JDBC drivers. + * @see org.springframework.data.jdbc.support.JdbcUtil#targetSqlTypeFor(Class) + * @since 2.0 TODO: Introduce variant returning TypeInformation. + */ + Class getColumnType(RelationalPersistentProperty property); + + /** + * The SQL type constant used when using this property as a parameter for a SQL statement. + * + * @return Must not be {@code null}. + * @see java.sql.Types + * @since 2.0 + */ + SQLType getTargetSqlType(RelationalPersistentProperty property); + + @Override + RelationalMappingContext getMappingContext(); + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcCustomConversions.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcCustomConversions.java new file mode 100644 index 0000000000..81befa9abe --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcCustomConversions.java @@ -0,0 +1,121 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.springframework.core.convert.converter.GenericConverter.ConvertiblePair; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.jdbc.core.mapping.JdbcSimpleTypes; + +/** + * Value object to capture custom conversion. {@link JdbcCustomConversions} also act as factory for + * {@link org.springframework.data.mapping.model.SimpleTypeHolder} + * + * @author Mark Paluch + * @author Jens Schauder + * @author Christoph Strobl + * @see CustomConversions + * @see org.springframework.data.mapping.model.SimpleTypeHolder + * @see JdbcSimpleTypes + */ +public class JdbcCustomConversions extends CustomConversions { + + private static final Collection STORE_CONVERTERS = Collections + .unmodifiableCollection(Jsr310TimestampBasedConverters.getConvertersToRegister()); + + /** + * Creates an empty {@link JdbcCustomConversions} object. + */ + public JdbcCustomConversions() { + this(Collections.emptyList()); + } + + /** + * Create a new {@link JdbcCustomConversions} instance registering the given converters and the default store + * converters. + * + * @param converters must not be {@literal null}. + */ + public JdbcCustomConversions(List converters) { + super(constructConverterConfiguration(converters)); + } + + /** + * Create a new {@link JdbcCustomConversions} instance registering the given converters and the default store + * converters. + * + * @since 2.3 + */ + public JdbcCustomConversions(StoreConversions storeConversions, List userConverters) { + + super(new ConverterConfiguration( // + storeConversions, // + userConverters, // + JdbcCustomConversions::excludeConversionsBetweenDateAndJsr310Types // + )); + } + + /** + * Create a new {@link JdbcCustomConversions} instance given + * {@link org.springframework.data.convert.CustomConversions.ConverterConfiguration}. + * + * @param converterConfiguration must not be {@literal null}. + * @since 2.2 + */ + public JdbcCustomConversions(ConverterConfiguration converterConfiguration) { + super(converterConfiguration); + } + + private static ConverterConfiguration constructConverterConfiguration(List converters) { + + return new ConverterConfiguration( // + StoreConversions.of(JdbcSimpleTypes.HOLDER, STORE_CONVERTERS), // + converters, // + JdbcCustomConversions::excludeConversionsBetweenDateAndJsr310Types // + ); + } + + + /** + * Obtain a read only copy of default store converters. + * + * @return never {@literal null}. + * @since 2.3 + */ + public static Collection storeConverters() { + return STORE_CONVERTERS; + } + + private static boolean isDateTimeApiConversion(ConvertiblePair cp) { + + if (cp.getSourceType().equals(java.util.Date.class)) { + return cp.getTargetType().getTypeName().startsWith("java.time."); + } + + if (cp.getTargetType().equals(java.util.Date.class)) { + return cp.getSourceType().getTypeName().startsWith("java.time."); + } + + return false; + } + + private static boolean excludeConversionsBetweenDateAndJsr310Types(ConvertiblePair cp) { + return !isDateTimeApiConversion(cp); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcIdentifierBuilder.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcIdentifierBuilder.java new file mode 100644 index 0000000000..22944aaad2 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcIdentifierBuilder.java @@ -0,0 +1,74 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.util.Assert; + +/** + * Builder for {@link Identifier}. Mainly for internal use within the framework + * + * @author Jens Schauder + * @since 1.1 + */ +public class JdbcIdentifierBuilder { + + private Identifier identifier; + + private JdbcIdentifierBuilder(Identifier identifier) { + this.identifier = identifier; + } + + public static JdbcIdentifierBuilder empty() { + return new JdbcIdentifierBuilder(Identifier.empty()); + } + + /** + * Creates ParentKeys with backreference for the given path and value of the parents id. + */ + public static JdbcIdentifierBuilder forBackReferences(JdbcConverter converter, AggregatePath path, Object value) { + + Identifier identifier = Identifier.of( // + path.getTableInfo().reverseColumnInfo().name(), // + value, // + converter.getColumnType(path.getIdDefiningParentPath().getRequiredIdProperty()) // + ); + + return new JdbcIdentifierBuilder(identifier); + } + + /** + * Adds a qualifier to the identifier to build. A qualifier is a map key or a list index. + * + * @param path path to the map that gets qualified by {@code value}. Must not be {@literal null}. + * @param value map key or list index qualifying the map identified by {@code path}. Must not be {@literal null}. + * @return this builder. Guaranteed to be not {@literal null}. + */ + public JdbcIdentifierBuilder withQualifier(AggregatePath path, Object value) { + + Assert.notNull(path, "Path must not be null"); + Assert.notNull(value, "Value must not be null"); + + identifier = identifier.withPart(path.getTableInfo().qualifierColumnInfo().name(), value, + path.getTableInfo().qualifierColumnType()); + + return this; + } + + public Identifier build() { + return identifier; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcPropertyValueProvider.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcPropertyValueProvider.java new file mode 100644 index 0000000000..4485ef28bc --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcPropertyValueProvider.java @@ -0,0 +1,67 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * {@link PropertyValueProvider} obtaining values from a {@link ResultSetAccessor}. + * + * @author Jens Schauder + * @author Kurt Niemi + * @since 2.0 + */ +class JdbcPropertyValueProvider implements PropertyValueProvider { + + private final AggregatePath basePath; + private final ResultSetAccessor resultSet; + + /** + * @param basePath path from the aggregate root relative to which all properties get resolved. + * @param resultSet the {@link ResultSetAccessor} from which to obtain the actual values. + */ + JdbcPropertyValueProvider(AggregatePath basePath, ResultSetAccessor resultSet) { + + this.resultSet = resultSet; + this.basePath = basePath; + } + + @Override + public T getPropertyValue(RelationalPersistentProperty property) { + return (T) resultSet.getObject(getColumnName(property)); + } + + /** + * Returns whether the underlying source contains a data source for the given {@link RelationalPersistentProperty}. + * + * @param property + * @return + */ + public boolean hasProperty(RelationalPersistentProperty property) { + return resultSet.hasValue(getColumnName(property)); + } + + private String getColumnName(RelationalPersistentProperty property) { + AggregatePath.ColumnInfo columnInfo = basePath.append(property).getColumnInfo(); + return columnInfo.alias().getReference(); + } + + public JdbcPropertyValueProvider extendBy(RelationalPersistentProperty property) { + return new JdbcPropertyValueProvider(basePath.append(property), resultSet); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcTypeFactory.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcTypeFactory.java new file mode 100644 index 0000000000..f48b78c472 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/JdbcTypeFactory.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.Array; + +/** + * Allows the creation of instances of database dependent types, e.g. {@link Array}. + * + * @author Jens Schauder + * @since 1.1 + */ +public interface JdbcTypeFactory { + + /** + * An implementation used in places where a proper {@code JdbcTypeFactory} can not be provided but an instance needs + * to be provided anyway, mostly for providing backward compatibility. Calling it will result in an exception. The + * features normally supported by a {@link JdbcTypeFactory} will not work. + */ + static JdbcTypeFactory unsupported() { + + return value -> { + throw new UnsupportedOperationException("This JdbcTypeFactory does not support Array creation"); + }; + } + + /** + * Converts the provided value in a {@link Array} instance. + * + * @param value the value to be converted. Must not be {@literal null}. + * @return an {@link Array}. Guaranteed to be not {@literal null}. + */ + Array createArray(Object[] value); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/Jsr310TimestampBasedConverters.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/Jsr310TimestampBasedConverters.java new file mode 100644 index 0000000000..6f0346d347 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/Jsr310TimestampBasedConverters.java @@ -0,0 +1,164 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.time.LocalDateTime.*; +import static java.time.ZoneId.*; + +import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.List; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.lang.NonNull; + +/** + * Helper class to register JSR-310 specific {@link Converter} implementations. These converters are based on + * {@link java.sql.Timestamp} instead of {@link Date} and therefore preserve nanosecond precision + * + * @see org.springframework.data.convert.Jsr310Converters + * @author Jens Schauder + * @since 2.2 + */ +public abstract class Jsr310TimestampBasedConverters { + + /** + * Returns the converters to be registered. + * + * Note that the {@link LocalDateTimeToTimestampConverter} is not included, since many database don't need that conversion. + * Databases that do need it, should include it in the conversions offered by their respective dialect. + * + * @return a collection of converters. Guaranteed to be not {@literal null}. + */ + public static Collection> getConvertersToRegister() { + + List> converters = new ArrayList<>(8); + + converters.add(TimestampToLocalDateTimeConverter.INSTANCE); + converters.add(TimestampToLocalDateConverter.INSTANCE); + converters.add(LocalDateToTimestampConverter.INSTANCE); + converters.add(TimestampToLocalTimeConverter.INSTANCE); + converters.add(LocalTimeToTimestampConverter.INSTANCE); + converters.add(TimestampToInstantConverter.INSTANCE); + converters.add(InstantToTimestampConverter.INSTANCE); + + return converters; + } + + @ReadingConverter + public enum TimestampToLocalDateTimeConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public LocalDateTime convert(Timestamp source) { + return ofInstant(source.toInstant(), systemDefault()); + } + } + + @WritingConverter + public enum LocalDateTimeToTimestampConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public Timestamp convert(LocalDateTime source) { + return Timestamp.from(source.atZone(systemDefault()).toInstant()); + } + } + + @ReadingConverter + public enum TimestampToLocalDateConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public LocalDate convert(Timestamp source) { + return source.toLocalDateTime().toLocalDate(); + } + } + + @WritingConverter + public enum LocalDateToTimestampConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public Timestamp convert(LocalDate source) { + return Timestamp.from(source.atStartOfDay(systemDefault()).toInstant()); + } + } + + @ReadingConverter + public enum TimestampToLocalTimeConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public LocalTime convert(Timestamp source) { + return source.toLocalDateTime().toLocalTime(); + } + } + + @WritingConverter + public enum LocalTimeToTimestampConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public Timestamp convert(LocalTime source) { + return Timestamp.from(source.atDate(LocalDate.now()).atZone(systemDefault()).toInstant()); + } + } + + @ReadingConverter + public enum TimestampToInstantConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public Instant convert(Timestamp source) { + return source.toInstant(); + } + } + + @WritingConverter + public enum InstantToTimestampConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public Timestamp convert(Instant source) { + return Timestamp.from(source); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/MapEntityRowMapper.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/MapEntityRowMapper.java new file mode 100644 index 0000000000..ca38e9604d --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/MapEntityRowMapper.java @@ -0,0 +1,70 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; +import org.springframework.jdbc.core.RowMapper; + +/** + * A {@link RowMapper} that maps a row to a {@link Map.Entry} so an {@link Iterable} of those can be converted to a + * {@link Map} using an {@link IterableOfEntryToMapConverter}. Creation of the {@literal value} part of the resulting + * {@link Map.Entry} is delegated to a {@link RowMapper} provided in the constructor. + * + * @author Jens Schauder + * @author Mikhail Polivakha + */ +class MapEntityRowMapper implements RowMapper> { + + private final AggregatePath path; + private final JdbcConverter converter; + private final Identifier identifier; + private final SqlIdentifier keyColumn; + + MapEntityRowMapper(AggregatePath path, JdbcConverter converter, Identifier identifier, SqlIdentifier keyColumn) { + + this.path = path; + this.converter = converter; + this.identifier = identifier; + this.keyColumn = keyColumn; + } + + @Override + public Map.Entry mapRow(ResultSet rs, int rowNum) throws SQLException { + + RowDocument document = RowDocumentResultSetExtractor.toRowDocument(rs); + + Object key = document.get(keyColumn.getReference()); + Class qualifierColumnType = path.getRequiredLeafProperty().getQualifierColumnType(); + Object convertedKey = converter.readValue(key, TypeInformation.of(qualifierColumnType)); + + return new HashMap.SimpleEntry<>(convertedKey, mapEntity(document, key)); + } + + @SuppressWarnings("unchecked") + private T mapEntity(RowDocument document, Object key) { + + return (T) converter.readAndResolve(path.getRequiredLeafEntity().getTypeInformation(), document, + identifier.withPart(keyColumn, key, Object.class)); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/MappingJdbcConverter.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/MappingJdbcConverter.java new file mode 100644 index 0000000000..7460931dab --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/MappingJdbcConverter.java @@ -0,0 +1,557 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.Array; +import java.sql.JDBCType; +import java.sql.SQLException; +import java.sql.SQLType; +import java.util.Iterator; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.context.ApplicationContextAware; +import org.springframework.core.convert.ConverterNotFoundException; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.convert.converter.ConverterRegistry; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.relational.core.conversion.MappingRelationalConverter; +import org.springframework.data.relational.core.conversion.ObjectPath; +import org.springframework.data.relational.core.conversion.RelationalConverter; +import org.springframework.data.relational.core.conversion.RowDocumentAccessor; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * {@link RelationalConverter} that uses a {@link MappingContext} to apply conversion of relational values to property + * values. + *

+ * Conversion is configurable by providing a customized {@link CustomConversions}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Christoph Strobl + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @see MappingContext + * @see SimpleTypeHolder + * @see CustomConversions + * @since 3.2 + */ +public class MappingJdbcConverter extends MappingRelationalConverter implements JdbcConverter, ApplicationContextAware { + + private static final Log LOG = LogFactory.getLog(MappingJdbcConverter.class); + private static final Converter, Map> ITERABLE_OF_ENTRY_TO_MAP_CONVERTER = new IterableOfEntryToMapConverter(); + + private final JdbcTypeFactory typeFactory; + private final RelationResolver relationResolver; + + /** + * Creates a new {@link MappingJdbcConverter} given {@link MappingContext} and a {@link JdbcTypeFactory#unsupported() + * no-op type factory} throwing {@link UnsupportedOperationException} on type creation. Use + * {@link #MappingJdbcConverter(RelationalMappingContext, RelationResolver, CustomConversions, JdbcTypeFactory)} + * (MappingContext, RelationResolver, JdbcTypeFactory)} to convert arrays and large objects into JDBC-specific types. + * + * @param context must not be {@literal null}. + * @param relationResolver used to fetch additional relations from the database. Must not be {@literal null}. + */ + public MappingJdbcConverter(RelationalMappingContext context, RelationResolver relationResolver) { + + super(context, new JdbcCustomConversions()); + + Assert.notNull(relationResolver, "RelationResolver must not be null"); + + this.typeFactory = JdbcTypeFactory.unsupported(); + this.relationResolver = relationResolver; + + registerAggregateReferenceConverters(); + } + + /** + * Creates a new {@link MappingJdbcConverter} given {@link MappingContext}. + * + * @param context must not be {@literal null}. + * @param relationResolver used to fetch additional relations from the database. Must not be {@literal null}. + * @param typeFactory must not be {@literal null} + */ + public MappingJdbcConverter(RelationalMappingContext context, RelationResolver relationResolver, + CustomConversions conversions, JdbcTypeFactory typeFactory) { + + super(context, conversions); + + Assert.notNull(typeFactory, "JdbcTypeFactory must not be null"); + Assert.notNull(relationResolver, "RelationResolver must not be null"); + + this.typeFactory = typeFactory; + this.relationResolver = relationResolver; + + registerAggregateReferenceConverters(); + } + + private void registerAggregateReferenceConverters() { + + ConverterRegistry registry = (ConverterRegistry) getConversionService(); + AggregateReferenceConverters.getConvertersToRegister(getConversionService()).forEach(registry::addConverter); + } + + @Nullable + private Class getEntityColumnType(TypeInformation type) { + + RelationalPersistentEntity persistentEntity = getMappingContext().getPersistentEntity(type); + + if (persistentEntity == null) { + return null; + } + + RelationalPersistentProperty idProperty = persistentEntity.getIdProperty(); + + if (idProperty == null) { + return null; + } + return getColumnType(idProperty); + } + + private Class getReferenceColumnType(RelationalPersistentProperty property) { + + Class componentType = property.getTypeInformation().getRequiredComponentType().getType(); + RelationalPersistentEntity referencedEntity = getMappingContext().getRequiredPersistentEntity(componentType); + + return getColumnType(referencedEntity.getRequiredIdProperty()); + } + + @Override + public SQLType getTargetSqlType(RelationalPersistentProperty property) { + return JdbcUtil.targetSqlTypeFor(getColumnType(property)); + } + + @Override + public Class getColumnType(RelationalPersistentProperty property) { + return doGetColumnType(property); + } + + private Class doGetColumnType(RelationalPersistentProperty property) { + + if (property.isAssociation()) { + return getReferenceColumnType(property); + } + + if (property.isEntity()) { + Class columnType = getEntityColumnType(property.getTypeInformation().getActualType()); + + if (columnType != null) { + return columnType; + } + } + + Class componentColumnType = JdbcColumnTypes.INSTANCE.resolvePrimitiveType(property.getActualType()); + + while (componentColumnType.isArray()) { + componentColumnType = componentColumnType.getComponentType(); + } + + if (property.isCollectionLike() && !property.isEntity()) { + return java.lang.reflect.Array.newInstance(componentColumnType, 0).getClass(); + } + + return componentColumnType; + } + + @Override + @Nullable + public Object readValue(@Nullable Object value, TypeInformation type) { + + if (value == null) { + return value; + } + + if (value instanceof Array array) { + try { + return super.readValue(array.getArray(), type); + } catch (SQLException | ConverterNotFoundException e) { + LOG.info("Failed to extract a value of type %s from an Array; Attempting to use standard conversions", e); + } + } + + return super.readValue(value, type); + } + + @Override + @Nullable + public Object writeValue(@Nullable Object value, TypeInformation type) { + + if (value == null) { + return null; + } + + return super.writeValue(value, type); + } + + private boolean canWriteAsJdbcValue(@Nullable Object value) { + + if (value == null) { + return true; + } + + if (value instanceof AggregateReference aggregateReference) { + return canWriteAsJdbcValue(aggregateReference.getId()); + } + + RelationalPersistentEntity persistentEntity = getMappingContext().getPersistentEntity(value.getClass()); + + if (persistentEntity != null) { + + Object id = persistentEntity.getIdentifierAccessor(value).getIdentifier(); + return canWriteAsJdbcValue(id); + } + + if (value instanceof JdbcValue) { + return true; + } + + Optional> customWriteTarget = getConversions().getCustomWriteTarget(value.getClass()); + return customWriteTarget.isPresent() && customWriteTarget.get().isAssignableFrom(JdbcValue.class); + } + + @Override + public JdbcValue writeJdbcValue(@Nullable Object value, TypeInformation columnType, SQLType sqlType) { + + TypeInformation targetType = canWriteAsJdbcValue(value) ? TypeInformation.of(JdbcValue.class) : columnType; + Object convertedValue = writeValue(value, targetType); + + if (convertedValue instanceof JdbcValue result) { + return result; + } + + if (convertedValue == null || !convertedValue.getClass().isArray()) { + return JdbcValue.of(convertedValue, sqlType); + } + + Class componentType = convertedValue.getClass().getComponentType(); + if (componentType != byte.class && componentType != Byte.class) { + + Object[] objectArray = requireObjectArray(convertedValue); + return JdbcValue.of(typeFactory.createArray(objectArray), JDBCType.ARRAY); + } + + if (componentType == Byte.class) { + convertedValue = ArrayUtils.toPrimitive((Byte[]) convertedValue); + } + + return JdbcValue.of(convertedValue, JDBCType.BINARY); + } + + @SuppressWarnings("unchecked") + @Override + public R readAndResolve(TypeInformation type, RowDocument source, Identifier identifier) { + + RelationalPersistentEntity entity = (RelationalPersistentEntity) getMappingContext() + .getRequiredPersistentEntity(type); + AggregatePath path = getMappingContext().getAggregatePath(entity); + Identifier identifierToUse = ResolvingRelationalPropertyValueProvider.potentiallyAppendIdentifier(identifier, + entity, it -> source.get(it.getColumnName().getReference())); + ResolvingConversionContext context = new ResolvingConversionContext(getConversionContext(ObjectPath.ROOT), path, + identifierToUse); + + return readAggregate(context, source, entity.getTypeInformation()); + } + + @Override + protected RelationalPropertyValueProvider newValueProvider(RowDocumentAccessor documentAccessor, + ValueExpressionEvaluator evaluator, ConversionContext context) { + + if (context instanceof ResolvingConversionContext rcc) { + + AggregatePathValueProvider delegate = (AggregatePathValueProvider) super.newValueProvider(documentAccessor, + evaluator, context); + + return new ResolvingRelationalPropertyValueProvider(delegate, documentAccessor, rcc, rcc.identifier()); + } + + return super.newValueProvider(documentAccessor, evaluator, context); + } + + /** + * {@link RelationalPropertyValueProvider} using a resolving context to lookup relations. This is highly + * context-sensitive. Note that the identifier is held here because of a chicken and egg problem, while + * {@link ResolvingConversionContext} hols the {@link AggregatePath}. + */ + class ResolvingRelationalPropertyValueProvider implements RelationalPropertyValueProvider { + + private final AggregatePathValueProvider delegate; + + private final RowDocumentAccessor accessor; + + private final ResolvingConversionContext context; + + private final Identifier identifier; + + private ResolvingRelationalPropertyValueProvider(AggregatePathValueProvider delegate, RowDocumentAccessor accessor, + ResolvingConversionContext context, Identifier identifier) { + + AggregatePath path = context.aggregatePath(); + + this.delegate = delegate; + this.accessor = accessor; + this.context = context; + this.identifier = path.isEntity() + ? potentiallyAppendIdentifier(identifier, path.getRequiredLeafEntity(), + property -> delegate.getValue(path.append(property))) + : identifier; + } + + /** + * Conditionally append the identifier if the entity has an identifier property. + */ + static Identifier potentiallyAppendIdentifier(Identifier base, RelationalPersistentEntity entity, + Function getter) { + + if (entity.hasIdProperty()) { + + RelationalPersistentProperty idProperty = entity.getRequiredIdProperty(); + Object propertyValue = getter.apply(idProperty); + + if (propertyValue != null) { + return base.withPart(idProperty.getColumnName(), propertyValue, idProperty.getType()); + } + } + + return base; + } + + @SuppressWarnings("unchecked") + @Nullable + @Override + public T getPropertyValue(RelationalPersistentProperty property) { + + AggregatePath aggregatePath = this.context.aggregatePath(); + + if (getConversions().isSimpleType(property.getActualType())) { + return (T) delegate.getValue(aggregatePath); + } + + if (property.isEntity()) { + + if (property.isCollectionLike() || property.isMap()) { + + Identifier identifierToUse = this.identifier; + AggregatePath idDefiningParentPath = aggregatePath.getIdDefiningParentPath(); + + // note that the idDefiningParentPath might not itself have an id property, but have a combination of back + // references and possibly keys, that form an id + if (idDefiningParentPath.hasIdProperty()) { + + RelationalPersistentProperty identifier = idDefiningParentPath.getRequiredIdProperty(); + AggregatePath idPath = idDefiningParentPath.append(identifier); + Object value = delegate.getValue(idPath); + + Assert.state(value != null, "Identifier value must not be null at this point"); + + identifierToUse = Identifier.of(aggregatePath.getTableInfo().reverseColumnInfo().name(), value, + identifier.getActualType()); + } + + Iterable allByPath = relationResolver.findAllByPath(identifierToUse, + aggregatePath.getRequiredPersistentPropertyPath()); + + if (property.isCollectionLike()) { + return (T) allByPath; + } + + if (property.isMap()) { + return (T) ITERABLE_OF_ENTRY_TO_MAP_CONVERTER.convert(allByPath); + } + + Iterator iterator = allByPath.iterator(); + if (iterator.hasNext()) { + return (T) iterator.next(); + } + + return null; + } + + return hasValue(property) ? (T) readAggregate(this.context, accessor, property.getTypeInformation()) : null; + } + + return (T) delegate.getValue(aggregatePath); + } + + @Override + public boolean hasValue(RelationalPersistentProperty property) { + + if ((property.isCollectionLike() && property.isEntity()) || property.isMap()) { + // attempt relation fetch + return true; + } + + AggregatePath aggregatePath = context.aggregatePath(); + + if (property.isEntity()) { + + RelationalPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(property); + if (entity.hasIdProperty()) { + + RelationalPersistentProperty referenceId = entity.getRequiredIdProperty(); + AggregatePath toUse = aggregatePath.append(referenceId); + return delegate.hasValue(toUse); + } + + return delegate.hasValue(aggregatePath.getTableInfo().reverseColumnInfo().alias()); + } + + return delegate.hasValue(aggregatePath); + } + + @Override + public boolean hasNonEmptyValue(RelationalPersistentProperty property) { + + if ((property.isCollectionLike() && property.isEntity()) || property.isMap()) { + // attempt relation fetch + return true; + } + + AggregatePath aggregatePath = context.aggregatePath(); + + if (property.isEntity()) { + + RelationalPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(property); + if (entity.hasIdProperty()) { + + RelationalPersistentProperty referenceId = entity.getRequiredIdProperty(); + AggregatePath toUse = aggregatePath.append(referenceId); + return delegate.hasValue(toUse); + } + + return delegate.hasValue(aggregatePath.getTableInfo().reverseColumnInfo().alias()); + } + + return delegate.hasNonEmptyValue(aggregatePath); + } + + @Override + public RelationalPropertyValueProvider withContext(ConversionContext context) { + + return context == this.context ? this + : new ResolvingRelationalPropertyValueProvider(delegate.withContext(context), accessor, + (ResolvingConversionContext) context, identifier); + } + } + + /** + * Marker object to indicate that the property value provider should resolve relations. + * + * @param delegate + * @param aggregatePath + * @param identifier + */ + private record ResolvingConversionContext(ConversionContext delegate, AggregatePath aggregatePath, + Identifier identifier) implements ConversionContext { + + @Override + public S convert(Object source, TypeInformation typeHint) { + return delegate.convert(source, typeHint); + } + + @Override + public S convert(Object source, TypeInformation typeHint, ConversionContext context) { + return delegate.convert(source, typeHint, context); + } + + @Override + public ResolvingConversionContext forProperty(String name) { + RelationalPersistentProperty property = aggregatePath.getRequiredLeafEntity().getRequiredPersistentProperty(name); + return forProperty(property); + } + + @Override + public ResolvingConversionContext forProperty(RelationalPersistentProperty property) { + ConversionContext nested = delegate.forProperty(property); + return new ResolvingConversionContext(nested, aggregatePath.append(property), identifier); + } + + @Override + public ResolvingConversionContext withPath(ObjectPath currentPath) { + return new ResolvingConversionContext(delegate.withPath(currentPath), aggregatePath, identifier); + } + + @Override + public ObjectPath getPath() { + return delegate.getPath(); + } + + @Override + public CustomConversions getCustomConversions() { + return delegate.getCustomConversions(); + } + + @Override + public RelationalConverter getSourceConverter() { + return delegate.getSourceConverter(); + } + } + + static Object[] requireObjectArray(Object source) { + + Assert.isTrue(source.getClass().isArray(), "Source object is not an array"); + + Class componentType = source.getClass().getComponentType(); + + if (componentType.isPrimitive()) { + if (componentType == boolean.class) { + return ArrayUtils.toObject((boolean[]) source); + } + if (componentType == byte.class) { + return ArrayUtils.toObject((byte[]) source); + } + if (componentType == char.class) { + return ArrayUtils.toObject((char[]) source); + } + if (componentType == double.class) { + return ArrayUtils.toObject((double[]) source); + } + if (componentType == float.class) { + return ArrayUtils.toObject((float[]) source); + } + if (componentType == int.class) { + return ArrayUtils.toObject((int[]) source); + } + if (componentType == long.class) { + return ArrayUtils.toObject((long[]) source); + } + if (componentType == short.class) { + return ArrayUtils.toObject((short[]) source); + } + + throw new IllegalArgumentException("Unsupported component type: " + componentType); + } + return (Object[]) source; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/PathToColumnMapping.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/PathToColumnMapping.java new file mode 100644 index 0000000000..64fd535337 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/PathToColumnMapping.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.AggregatePath; + +/** + * A mapping between {@link PersistentPropertyPath} and column names of a query. Column names are intentionally + * represented by {@link String} values, since this is what a {@link java.sql.ResultSet} uses, and since all the query + * columns should be aliases there is no need for quoting or similar as provided by + * {@link org.springframework.data.relational.core.sql.SqlIdentifier}. + * + * @author Jens Schauder + * @since 3.2 + */ +interface PathToColumnMapping { + + String column(AggregatePath path); + + String keyColumn(AggregatePath path); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/QueryMapper.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/QueryMapper.java new file mode 100644 index 0000000000..1d3ce3095e --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/QueryMapper.java @@ -0,0 +1,813 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.JDBCType; +import java.sql.SQLType; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.CriteriaDefinition; +import org.springframework.data.relational.core.query.CriteriaDefinition.Comparator; +import org.springframework.data.relational.core.query.ValueFunction; +import org.springframework.data.relational.core.sql.*; +import org.springframework.data.relational.domain.SqlSort; +import org.springframework.data.util.Pair; +import org.springframework.data.util.TypeInformation; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * Maps {@link CriteriaDefinition} and {@link Sort} objects considering mapping metadata and dialect-specific + * conversion. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Yan Qiang + * @author Mikhail Fedorov + * @since 3.0 + */ +public class QueryMapper { + + private final JdbcConverter converter; + private final MappingContext, RelationalPersistentProperty> mappingContext; + + /** + * Creates a new {@link QueryMapper} with the given {@link JdbcConverter}. + * + * @param converter must not be {@literal null}. + */ + public QueryMapper(JdbcConverter converter) { + + Assert.notNull(converter, "JdbcConverter must not be null"); + + this.converter = converter; + this.mappingContext = converter.getMappingContext(); + } + + /** + * Map the {@link Sort} object to apply field name mapping using {@link RelationalPersistentEntity the type to read}. + * + * @param sort must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return a List of {@link OrderByField} objects guaranteed to be not {@literal null}. + */ + public List getMappedSort(Table table, Sort sort, @Nullable RelationalPersistentEntity entity) { + + List mappedOrder = new ArrayList<>(); + + for (Sort.Order order : sort) { + + SqlSort.validate(order); + + OrderByField simpleOrderByField = createSimpleOrderByField(table, entity, order); + OrderByField orderBy = simpleOrderByField.withNullHandling(order.getNullHandling()); + mappedOrder.add(order.isAscending() ? orderBy.asc() : orderBy.desc()); + } + + return mappedOrder; + + } + + private OrderByField createSimpleOrderByField(Table table, RelationalPersistentEntity entity, Sort.Order order) { + + if (order instanceof SqlSort.SqlOrder sqlOrder && sqlOrder.isUnsafe()) { + return OrderByField.from(Expressions.just(sqlOrder.getProperty())); + } + + Field field = createPropertyField(entity, SqlIdentifier.unquoted(order.getProperty()), this.mappingContext); + return OrderByField.from(table.column(field.getMappedColumnName())); + } + + /** + * Map the {@link Expression} object to apply field name mapping using {@link RelationalPersistentEntity the type to + * read}. + * + * @param expression must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return the mapped {@link Expression}. Guaranteed to be not {@literal null}. + */ + Expression getMappedObject(Expression expression, @Nullable RelationalPersistentEntity entity) { + + if (entity == null || expression instanceof AsteriskFromTable) { + return expression; + } + + if (expression instanceof Column column) { + + Field field = createPropertyField(entity, column.getName()); + TableLike table = column.getTable(); + + Assert.state(table != null, String.format("The column %s must have a table set", column)); + + Column columnFromTable = table.column(field.getMappedColumnName()); + return column instanceof Aliased aliased ? columnFromTable.as(aliased.getAlias()) : columnFromTable; + } + + if (expression instanceof SimpleFunction function) { + + List arguments = function.getExpressions(); + List mappedArguments = new ArrayList<>(arguments.size()); + + for (Expression argument : arguments) { + mappedArguments.add(getMappedObject(argument, entity)); + } + + SimpleFunction mappedFunction = SimpleFunction.create(function.getFunctionName(), mappedArguments); + + return function instanceof Aliased aliased ? mappedFunction.as(aliased.getAlias()) : mappedFunction; + } + + throw new IllegalArgumentException(String.format("Cannot map %s", expression)); + } + + /** + * Map a {@link CriteriaDefinition} object into {@link Condition} and consider value/{@code NULL} bindings. + * + * @param parameterSource bind parameterSource object, must not be {@literal null}. + * @param criteria criteria definition to map, must not be {@literal null}. + * @param table must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return the mapped {@link Condition}. + */ + public Condition getMappedObject(MapSqlParameterSource parameterSource, CriteriaDefinition criteria, Table table, + @Nullable RelationalPersistentEntity entity) { + + Assert.notNull(parameterSource, "MapSqlParameterSource must not be null"); + Assert.notNull(criteria, "CriteriaDefinition must not be null"); + Assert.notNull(table, "Table must not be null"); + + if (criteria.isEmpty()) { + throw new IllegalArgumentException("Cannot map empty Criteria"); + } + + return unroll(criteria, table, entity, parameterSource); + } + + private Condition unroll(CriteriaDefinition criteria, Table table, @Nullable RelationalPersistentEntity entity, + MapSqlParameterSource parameterSource) { + + CriteriaDefinition current = criteria; + + // reverse unroll criteria chain + Map forwardChain = new HashMap<>(); + + while (current.hasPrevious()) { + forwardChain.put(current.getPrevious(), current); + current = current.getPrevious(); + } + + // perform the actual mapping + Condition mapped = getCondition(current, parameterSource, table, entity); + while (forwardChain.containsKey(current)) { + + CriteriaDefinition criterion = forwardChain.get(current); + Condition result = null; + + Condition condition = getCondition(criterion, parameterSource, table, entity); + if (condition != null) { + result = combine(mapped, criterion.getCombinator(), condition); + } + + if (result != null) { + mapped = result; + } + current = criterion; + } + + if (mapped == null) { + throw new IllegalStateException("Cannot map empty Criteria"); + } + + return mapped; + } + + @Nullable + private Condition unrollGroup(List criteria, Table table, + CriteriaDefinition.Combinator combinator, @Nullable RelationalPersistentEntity entity, + MapSqlParameterSource parameterSource) { + + Condition mapped = null; + for (CriteriaDefinition criterion : criteria) { + + if (criterion.isEmpty()) { + continue; + } + + Condition condition = unroll(criterion, table, entity, parameterSource); + + mapped = combine(mapped, combinator, condition); + } + + return mapped; + } + + @Nullable + private Condition getCondition(CriteriaDefinition criteria, MapSqlParameterSource parameterSource, Table table, + @Nullable RelationalPersistentEntity entity) { + + if (criteria.isEmpty()) { + return null; + } + + if (criteria.isGroup()) { + + Condition condition = unrollGroup(criteria.getGroup(), table, criteria.getCombinator(), entity, parameterSource); + + return condition == null ? null : Conditions.nest(condition); + } + + return mapCondition(criteria, parameterSource, table, entity); + } + + private Condition combine(@Nullable Condition currentCondition, CriteriaDefinition.Combinator combinator, + Condition nextCondition) { + + if (currentCondition == null) { + currentCondition = nextCondition; + } else if (combinator == CriteriaDefinition.Combinator.INITIAL) { + currentCondition = currentCondition.and(Conditions.nest(nextCondition)); + } else if (combinator == CriteriaDefinition.Combinator.AND) { + currentCondition = currentCondition.and(nextCondition); + } else if (combinator == CriteriaDefinition.Combinator.OR) { + currentCondition = currentCondition.or(nextCondition); + } else { + throw new IllegalStateException("Combinator " + combinator + " not supported"); + } + + return currentCondition; + } + + private Condition mapCondition(CriteriaDefinition criteria, MapSqlParameterSource parameterSource, Table table, + @Nullable RelationalPersistentEntity entity) { + + Field propertyField = createPropertyField(entity, criteria.getColumn(), this.mappingContext); + + // Single embedded entity + if (propertyField.isEmbedded()) { + return mapEmbeddedObjectCondition(criteria, parameterSource, table, + ((MetadataBackedField) propertyField).getPath().getLeafProperty()); + } + + TypeInformation actualType = propertyField.getTypeHint().getRequiredActualType(); + Column column = table.column(propertyField.getMappedColumnName()); + Object mappedValue; + SQLType sqlType; + Comparator comparator = criteria.getComparator(); + + if (criteria.getValue() instanceof JdbcValue settableValue) { + + mappedValue = convertValue(comparator, settableValue.getValue(), propertyField.getTypeHint()); + sqlType = getTypeHint(mappedValue, actualType.getType(), settableValue); + } else if (criteria.getValue() instanceof ValueFunction valueFunction) { + + mappedValue = valueFunction.map(v -> convertValue(comparator, v, propertyField.getTypeHint())); + sqlType = propertyField.getSqlType(); + + } else if (propertyField instanceof MetadataBackedField metadataBackedField // + && metadataBackedField.property != null // + && (criteria.getValue() == null || !criteria.getValue().getClass().isArray())) { + + RelationalPersistentProperty property = metadataBackedField.property; + JdbcValue jdbcValue = convertToJdbcValue(property, criteria.getValue()); + mappedValue = jdbcValue.getValue(); + sqlType = jdbcValue.getJdbcType() != null ? jdbcValue.getJdbcType() : propertyField.getSqlType(); + + } else { + + mappedValue = convertValue(comparator, criteria.getValue(), propertyField.getTypeHint()); + sqlType = propertyField.getSqlType(); + } + + return createCondition(column, mappedValue, sqlType, parameterSource, comparator, criteria.isIgnoreCase()); + } + + /** + * Converts values while taking specific value types like arrays, {@link Iterable}, or {@link Pair}. + * + * @param property the property to which the value relates. It determines the type to convert to. Must not be + * {@literal null}. + * @param value the value to be converted. + * @return a non null {@link JdbcValue} holding the converted value and the appropriate JDBC type information. + */ + private JdbcValue convertToJdbcValue(RelationalPersistentProperty property, @Nullable Object value) { + + if (value == null) { + return JdbcValue.of(null, JDBCType.NULL); + } + + if (value instanceof Pair) { + + JdbcValue first = getWriteValue(property, ((Pair) value).getFirst()); + JdbcValue second = getWriteValue(property, ((Pair) value).getSecond()); + return JdbcValue.of(Pair.of(first.getValue(), second.getValue()), first.getJdbcType()); + } + + if (value instanceof Iterable) { + + List mapped = new ArrayList<>(); + SQLType jdbcType = null; + + for (Object o : (Iterable) value) { + + JdbcValue jdbcValue = getWriteValue(property, o); + if (jdbcType == null) { + jdbcType = jdbcValue.getJdbcType(); + } + + mapped.add(jdbcValue.getValue()); + } + + return JdbcValue.of(mapped, jdbcType); + } + + if (value.getClass().isArray()) { + + Object[] valueAsArray = (Object[]) value; + Object[] mappedValueArray = new Object[valueAsArray.length]; + SQLType jdbcType = null; + + for (int i = 0; i < valueAsArray.length; i++) { + + JdbcValue jdbcValue = getWriteValue(property, valueAsArray[i]); + if (jdbcType == null) { + jdbcType = jdbcValue.getJdbcType(); + } + + mappedValueArray[i] = jdbcValue.getValue(); + } + + return JdbcValue.of(mappedValueArray, jdbcType); + } + + return getWriteValue(property, value); + } + + /** + * Converts values to a {@link JdbcValue}. + * + * @param property the property to which the value relates. It determines the type to convert to. Must not be + * {@literal null}. + * @param value the value to be converted. + * @return a non null {@link JdbcValue} holding the converted value and the appropriate JDBC type information. + */ + private JdbcValue getWriteValue(RelationalPersistentProperty property, Object value) { + + return converter.writeJdbcValue( // + value, // + converter.getColumnType(property), // + converter.getTargetSqlType(property) // + ); + } + + private Condition mapEmbeddedObjectCondition(CriteriaDefinition criteria, MapSqlParameterSource parameterSource, + Table table, RelationalPersistentProperty embeddedProperty) { + + RelationalPersistentEntity persistentEntity = this.mappingContext.getRequiredPersistentEntity(embeddedProperty); + + Assert.isInstanceOf(persistentEntity.getType(), criteria.getValue(), + () -> "Value must be of type " + persistentEntity.getType().getName() + " for embedded entity matching"); + + PersistentPropertyAccessor embeddedAccessor = persistentEntity.getPropertyAccessor(criteria.getValue()); + + Condition condition = null; + for (RelationalPersistentProperty nestedProperty : persistentEntity) { + + SqlIdentifier sqlIdentifier = nestedProperty.getColumnName(); + Object mappedNestedValue = convertValue(embeddedAccessor.getProperty(nestedProperty), + nestedProperty.getTypeInformation()); + SQLType sqlType = converter.getTargetSqlType(nestedProperty); + + Condition mappedCondition = createCondition(table.column(sqlIdentifier), mappedNestedValue, sqlType, + parameterSource, criteria.getComparator(), criteria.isIgnoreCase()); + + if (condition != null) { + condition = condition.and(mappedCondition); + } else { + condition = mappedCondition; + } + } + + return Conditions.nest(condition); + } + + @Nullable + private Object convertValue(Comparator comparator, @Nullable Object value, TypeInformation typeHint) { + + if ((Comparator.IN.equals(comparator) || Comparator.NOT_IN.equals(comparator)) + && value instanceof Collection collection && !collection.isEmpty()) { + + Collection mapped = new ArrayList<>(collection.size()); + + for (Object o : collection) { + mapped.add(convertValue(o, typeHint)); + } + + return mapped; + } + + return convertValue(value, typeHint); + } + + @Nullable + protected Object convertValue(@Nullable Object value, TypeInformation typeInformation) { + + if (value == null) { + return null; + } + + if (value instanceof Pair) { + + Pair pair = (Pair) value; + + Object first = convertValue(pair.getFirst(), typeInformation.getActualType() != null // + ? typeInformation.getRequiredActualType() + : TypeInformation.OBJECT); + + Object second = convertValue(pair.getSecond(), typeInformation.getActualType() != null // + ? typeInformation.getRequiredActualType() + : TypeInformation.OBJECT); + + return Pair.of(first, second); + } + + if (value.getClass().isArray() + && (TypeInformation.OBJECT.equals(typeInformation) || typeInformation.isCollectionLike())) { + return value; + } + + return this.converter.writeValue(value, typeInformation); + } + + protected MappingContext, RelationalPersistentProperty> getMappingContext() { + return this.mappingContext; + } + + private Condition createCondition(Column column, @Nullable Object mappedValue, SQLType sqlType, + MapSqlParameterSource parameterSource, Comparator comparator, boolean ignoreCase) { + + if (comparator.equals(Comparator.IS_NULL)) { + return column.isNull(); + } + + if (comparator.equals(Comparator.IS_NOT_NULL)) { + return column.isNotNull(); + } + + if (comparator == Comparator.IS_TRUE) { + + Expression bind = bindBoolean(column, parameterSource, + mappedValue instanceof Boolean ? (Boolean) mappedValue : true); + return column.isEqualTo(bind); + } + + if (comparator == Comparator.IS_FALSE) { + + Expression bind = bindBoolean(column, parameterSource, + mappedValue instanceof Boolean ? (Boolean) mappedValue : false); + return column.isEqualTo(bind); + } + + Expression columnExpression = column; + if (ignoreCase && (sqlType == JDBCType.VARCHAR || sqlType == JDBCType.NVARCHAR)) { + columnExpression = Functions.upper(column); + } + + if (comparator == Comparator.NOT_IN || comparator == Comparator.IN) { + + Condition condition; + + if (mappedValue instanceof Iterable) { + + List expressions = new ArrayList<>( + mappedValue instanceof Collection ? ((Collection) mappedValue).size() : 10); + + for (Object o : (Iterable) mappedValue) { + + expressions.add(bind(o, sqlType, parameterSource, column.getName().getReference())); + } + + condition = Conditions.in(columnExpression, expressions.toArray(new Expression[0])); + + } else { + + Expression expression = bind(mappedValue, sqlType, parameterSource, column.getName().getReference()); + + condition = Conditions.in(columnExpression, expression); + } + + if (comparator == Comparator.NOT_IN) { + condition = condition.not(); + } + + return condition; + } + + if (comparator == Comparator.BETWEEN || comparator == Comparator.NOT_BETWEEN) { + + Pair pair = (Pair) mappedValue; + + Expression begin = bind(pair.getFirst(), sqlType, parameterSource, column.getName().getReference(), ignoreCase); + Expression end = bind(pair.getSecond(), sqlType, parameterSource, column.getName().getReference(), ignoreCase); + + return comparator == Comparator.BETWEEN ? Conditions.between(columnExpression, begin, end) + : Conditions.notBetween(columnExpression, begin, end); + } + + String refName = column.getName().getReference(); + + switch (comparator) { + case EQ -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName, ignoreCase); + return Conditions.isEqual(columnExpression, expression); + } + case NEQ -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName, ignoreCase); + return Conditions.isEqual(columnExpression, expression).not(); + } + case LT -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName); + return column.isLess(expression); + } + case LTE -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName); + return column.isLessOrEqualTo(expression); + } + case GT -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName); + return column.isGreater(expression); + } + case GTE -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName); + return column.isGreaterOrEqualTo(expression); + } + case LIKE -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName, ignoreCase); + return Conditions.like(columnExpression, expression); + } + case NOT_LIKE -> { + Expression expression = bind(mappedValue, sqlType, parameterSource, refName, ignoreCase); + return Conditions.notLike(columnExpression, expression); + } + default -> throw new UnsupportedOperationException("Comparator " + comparator + " not supported"); + } + } + + private Expression bindBoolean(Column column, MapSqlParameterSource parameterSource, boolean value) { + + Object converted = converter.writeValue(value, TypeInformation.OBJECT); + return bind(converted, JDBCType.BIT, parameterSource, column.getName().getReference()); + } + + Field createPropertyField(@Nullable RelationalPersistentEntity entity, SqlIdentifier key) { + return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext, converter); + } + + Field createPropertyField(@Nullable RelationalPersistentEntity entity, SqlIdentifier key, + MappingContext, RelationalPersistentProperty> mappingContext) { + return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext, converter); + } + + SQLType getTypeHint(@Nullable Object mappedValue, Class propertyType, JdbcValue settableValue) { + + if (mappedValue == null || propertyType.equals(Object.class)) { + return JdbcUtil.TYPE_UNKNOWN; + } + + if (mappedValue.getClass().equals(settableValue.getValue().getClass())) { + return JdbcUtil.TYPE_UNKNOWN; + } + + return settableValue.getJdbcType(); + } + + private Expression bind(@Nullable Object mappedValue, SQLType sqlType, MapSqlParameterSource parameterSource, + String name) { + return bind(mappedValue, sqlType, parameterSource, name, false); + } + + private Expression bind(@Nullable Object mappedValue, SQLType sqlType, MapSqlParameterSource parameterSource, + String name, boolean ignoreCase) { + + String uniqueName = getUniqueName(parameterSource, name); + + parameterSource.addValue(uniqueName, mappedValue, sqlType.getVendorTypeNumber()); + + return ignoreCase ? Functions.upper(SQL.bindMarker(":" + uniqueName)) : SQL.bindMarker(":" + uniqueName); + } + + private static String getUniqueName(MapSqlParameterSource parameterSource, String name) { + + Map values = parameterSource.getValues(); + + if (!values.containsKey(name)) { + return name; + } + + int counter = values.size(); + String uniqueName; + + do { + uniqueName = name + (counter++); + } while (values.containsKey(uniqueName)); + + return uniqueName; + } + + /** + * Value object to represent a field and its meta-information. + */ + protected static class Field { + + protected final SqlIdentifier name; + + /** + * Creates a new {@link Field} without meta-information but the given name. + * + * @param name must not be {@literal null} or empty. + */ + Field(SqlIdentifier name) { + + Assert.notNull(name, "Name must not be null"); + this.name = name; + } + + public boolean isEmbedded() { + return false; + } + + /** + * Returns the key to be used in the mapped document eventually. + * + * @return the key to be used in the mapped document eventually. + */ + public SqlIdentifier getMappedColumnName() { + return this.name; + } + + public TypeInformation getTypeHint() { + return TypeInformation.OBJECT; + } + + public SQLType getSqlType() { + return JdbcUtil.TYPE_UNKNOWN; + } + } + + /** + * Extension of {@link Field} to be backed with mapping metadata. + */ + protected static class MetadataBackedField extends Field { + + private final RelationalPersistentEntity entity; + private final MappingContext, RelationalPersistentProperty> mappingContext; + private final RelationalPersistentProperty property; + private final @Nullable PersistentPropertyPath path; + private final boolean embedded; + private final SQLType sqlType; + + /** + * Creates a new {@link MetadataBackedField} with the given name, {@link RelationalPersistentEntity} and + * {@link MappingContext}. + * + * @param name must not be {@literal null} or empty. + * @param entity must not be {@literal null}. + * @param context must not be {@literal null}. + * @param converter must not be {@literal null}. + */ + protected MetadataBackedField(SqlIdentifier name, RelationalPersistentEntity entity, + MappingContext, RelationalPersistentProperty> context, + JdbcConverter converter) { + this(name, entity, context, null, converter); + } + + /** + * Creates a new {@link MetadataBackedField} with the given name, {@link RelationalPersistentEntity} and + * {@link MappingContext} with the given {@link RelationalPersistentProperty}. + * + * @param name must not be {@literal null} or empty. + * @param entity must not be {@literal null}. + * @param context must not be {@literal null}. + * @param property may be {@literal null}. + * @param converter may be {@literal null}. + */ + protected MetadataBackedField(SqlIdentifier name, RelationalPersistentEntity entity, + MappingContext, RelationalPersistentProperty> context, + @Nullable RelationalPersistentProperty property, JdbcConverter converter) { + + super(name); + + Assert.notNull(entity, "MongoPersistentEntity must not be null"); + + this.entity = entity; + this.mappingContext = context; + + this.path = getPath(name.getReference()); + this.property = this.path == null ? property : this.path.getLeafProperty(); + this.sqlType = this.property != null ? converter.getTargetSqlType(this.property) : JdbcUtil.TYPE_UNKNOWN; + + if (this.property != null) { + this.embedded = this.property.isEmbedded(); + } else { + this.embedded = false; + } + } + + @Override + public SqlIdentifier getMappedColumnName() { + + if (isEmbedded()) { + throw new IllegalStateException("Cannot obtain a single column name for embedded property"); + } + + return this.path == null || this.path.getLeafProperty() == null ? super.getMappedColumnName() + : this.path.getLeafProperty().getColumnName(); + } + + /** + * Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}. + */ + @Nullable + private PersistentPropertyPath getPath(String pathExpression) { + + try { + + PropertyPath path = PropertyPath.from(pathExpression, this.entity.getTypeInformation()); + + if (isPathToJavaLangClassProperty(path)) { + return null; + } + + return this.mappingContext.getPersistentPropertyPath(path); + } catch (PropertyReferenceException | InvalidPersistentPropertyPath e) { + return null; + } + } + + private boolean isPathToJavaLangClassProperty(PropertyPath path) { + return path.getType().equals(Class.class) && path.getLeafProperty().getOwningType().getType().equals(Class.class); + } + + @Nullable + public PersistentPropertyPath getPath() { + return path; + } + + @Override + public boolean isEmbedded() { + return this.embedded; + } + + @Override + public TypeInformation getTypeHint() { + + if (this.property == null) { + return super.getTypeHint(); + } + + if (this.property.getType().isPrimitive()) { + return TypeInformation.of(ClassUtils.resolvePrimitiveIfNecessary(this.property.getType())); + } + + if (this.property.getType().isArray()) { + return this.property.getTypeInformation(); + } + + return this.property.getTypeInformation(); + } + + @Override + public SQLType getSqlType() { + return this.sqlType; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ReadingDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ReadingDataAccessStrategy.java new file mode 100644 index 0000000000..5b00f99dd5 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ReadingDataAccessStrategy.java @@ -0,0 +1,165 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core.convert; + +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.relational.core.query.Query; +import org.springframework.lang.Nullable; + +/** + * The finding methods of a {@link DataAccessStrategy}. + * + * @author Jens Schauder + * @author Sergey Korotaev + * @since 3.2 + */ +interface ReadingDataAccessStrategy { + + /** + * Loads a single entity identified by type and id. + * + * @param id the id of the entity to load. Must not be {@code null}. + * @param domainType the domain type of the entity. Must not be {@code null}. + * @param the type of the entity. + * @return Might return {@code null}. + */ + @Nullable + T findById(Object id, Class domainType); + + /** + * Loads all entities of the given type. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @return Guaranteed to be not {@code null}. + */ + Iterable findAll(Class domainType); + + /** + * Loads all entities of the given type to a {@link Stream}. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @return Guaranteed to be not {@code null}. + */ + Stream streamAll(Class domainType); + + /** + * Loads all entities that match one of the ids passed as an argument. It is not guaranteed that the number of ids + * passed in matches the number of entities returned. + * + * @param ids the Ids of the entities to load. Must not be {@code null}. + * @param domainType the type of entities to load. Must not be {@code null}. + * @param type of entities to load. + * @return the loaded entities. Guaranteed to be not {@code null}. + */ + Iterable findAllById(Iterable ids, Class domainType); + + /** + * Loads all entities that match one of the ids passed as an argument to a {@link Stream}. + * It is not guaranteed that the number of ids passed in matches the number of entities returned. + * + * @param ids the Ids of the entities to load. Must not be {@code null}. + * @param domainType the type of entities to load. Must not be {@code null}. + * @param type of entities to load. + * @return the loaded entities. Guaranteed to be not {@code null}. + */ + Stream streamAllByIds(Iterable ids, Class domainType); + + /** + * Loads all entities of the given type, sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param sort the sorting information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + Iterable findAll(Class domainType, Sort sort); + + /** + * Loads all entities of the given type to a {@link Stream}, sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param sort the sorting information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + Stream streamAll(Class domainType, Sort sort); + + /** + * Loads all entities of the given type, paged and sorted. + * + * @param domainType the type of entities to load. Must not be {@code null}. + * @param the type of entities to load. + * @param pageable the pagination information. Must not be {@code null}. + * @return Guaranteed to be not {@code null}. + * @since 2.0 + */ + Iterable findAll(Class domainType, Pageable pageable); + + /** + * Execute a {@code SELECT} query and convert the resulting item to an entity ensuring exactly one result. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return exactly one result or {@link Optional#empty()} if no match found. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + Optional findOne(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Iterable}. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + Iterable findAll(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Stream}. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@code null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + Stream streamAll(Query query, Class domainType); + + /** + * Execute a {@code SELECT} query and convert the resulting items to a {@link Iterable}. Applies the {@link Pageable} + * to the result. + * + * @param query must not be {@literal null}. + * @param domainType the type of entities. Must not be {@literal null}. + * @param pageable the pagination that should be applied. Must not be {@literal null}. + * @return a non-null list with all the matching results. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + * @since 3.0 + */ + Iterable findAll(Query query, Class domainType, Pageable pageable); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RelationResolver.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RelationResolver.java new file mode 100644 index 0000000000..fc3c0669b0 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RelationResolver.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Resolves relations within an aggregate. + * + * @author Jens Schauder + * @since 1.1 + */ +public interface RelationResolver { + + /** + * Finds all entities reachable via {@literal path}. + * + * @param identifier the combination of Id, map keys and list indexes that identify the parent of the entity to be + * loaded. Must not be {@literal null}. + * @param path the path from the aggregate root to the entities to be resolved. Must not be {@literal null}. + * @return guaranteed to be not {@literal null}. + */ + Iterable findAllByPath(Identifier identifier, + PersistentPropertyPath path); +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ResultSetAccessor.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ResultSetAccessor.java new file mode 100644 index 0000000000..5cb10291fb --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ResultSetAccessor.java @@ -0,0 +1,112 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.data.mapping.MappingException; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.LinkedCaseInsensitiveMap; + +/** + * Wrapper value object for a {@link java.sql.ResultSet} to be able to access raw values by + * {@link org.springframework.data.relational.core.mapping.RelationalPersistentProperty} references. Provides fast + * lookup of columns by name, including for absent columns. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 2.0 + */ +class ResultSetAccessor { + + private static final Log LOG = LogFactory.getLog(ResultSetAccessor.class); + + private final ResultSet resultSet; + + private final Map indexLookUp; + + ResultSetAccessor(ResultSet resultSet) { + + this.resultSet = resultSet; + this.indexLookUp = indexColumns(resultSet); + } + + private static Map indexColumns(ResultSet resultSet) { + + try { + + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = metaData.getColumnCount(); + + Map index = new LinkedCaseInsensitiveMap<>(columnCount); + + for (int i = 1; i <= columnCount; i++) { + + String label = metaData.getColumnLabel(i); + + if (index.containsKey(label)) { + LOG.warn(String.format("ResultSet contains %s multiple times", label)); + continue; + } + + index.put(label, i); + } + + return index; + } catch (SQLException se) { + throw new MappingException("Cannot obtain result metadata", se); + } + } + + /** + * Returns the value if the result set contains the {@code columnName}. + * + * @param columnName the column name (label). + * @return + * @see ResultSet#getObject(int) + */ + @Nullable + public Object getObject(String columnName) { + + try { + + int index = findColumnIndex(columnName); + return index > 0 ? JdbcUtils.getResultSetValue(resultSet, index) : null; + } catch (SQLException o_O) { + throw new MappingException(String.format("Could not read value %s from result set", columnName), o_O); + } + } + + private int findColumnIndex(String columnName) { + return indexLookUp.getOrDefault(columnName, -1); + } + + /** + * Returns {@literal true} if the result set contains the {@code columnName}. + * + * @param columnName the column name (label). + * @return + */ + public boolean hasValue(String columnName) { + return indexLookUp.containsKey(columnName); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ResultSetAccessorPropertyAccessor.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ResultSetAccessorPropertyAccessor.java new file mode 100644 index 0000000000..dc5576a11e --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/ResultSetAccessorPropertyAccessor.java @@ -0,0 +1,69 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.PropertyAccessor; +import org.springframework.expression.TypedValue; +import org.springframework.lang.Nullable; + +/** + * {@link PropertyAccessor} to access a column from a {@link ResultSetAccessor}. + * + * @author Mark Paluch + * @since 2.1 + */ +class ResultSetAccessorPropertyAccessor implements PropertyAccessor { + + static final PropertyAccessor INSTANCE = new ResultSetAccessorPropertyAccessor(); + + @Override + public Class[] getSpecificTargetClasses() { + return new Class[] { ResultSetAccessor.class }; + } + + @Override + public boolean canRead(EvaluationContext context, @Nullable Object target, String name) { + return target instanceof ResultSetAccessor resultSetAccessor && resultSetAccessor.hasValue(name); + } + + @Override + public TypedValue read(EvaluationContext context, @Nullable Object target, String name) { + + if (target == null) { + return TypedValue.NULL; + } + + Object value = ((ResultSetAccessor) target).getObject(name); + + if (value == null) { + return TypedValue.NULL; + } + + return new TypedValue(value); + } + + @Override + public boolean canWrite(EvaluationContext context, @Nullable Object target, String name) { + return false; + } + + @Override + public void write(EvaluationContext context, @Nullable Object target, String name, @Nullable Object newValue) { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentExtractorSupport.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentExtractorSupport.java new file mode 100644 index 0000000000..2d27a453ac --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentExtractorSupport.java @@ -0,0 +1,516 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.lang.Nullable; + +/** + * Support class for {@code ResultSet}-driven extractor implementations extracting {@link RowDocument documents} from + * flexible input streams. + * + * @author Mark Paluch + * @since 3.2 + */ +abstract class RowDocumentExtractorSupport { + + /** + * Result adapter to obtain values and column metadata. + * + * @param + */ + interface TabularResultAdapter { + + /** + * Read a value from the row input at {@code index}. + * + * @param row the row to read from. + * @param index the column index. + * @return the column value. Can be {@code null}. + */ + @Nullable + Object getObject(RS row, int index); + + /** + * Retrieve a column name to column index map for access by column name. + * + * @param result the result set to read from. + * @return column name to column index map. + */ + Map getColumnMap(RS result); + } + + /** + * Reading context encapsulating value reading and column handling. + * + * @param + */ + protected static class AggregateContext { + + private final TabularResultAdapter adapter; + private final RelationalMappingContext context; + private final PathToColumnMapping propertyToColumn; + private final Map columnMap; + + protected AggregateContext(TabularResultAdapter adapter, RelationalMappingContext context, + PathToColumnMapping propertyToColumn, Map columnMap) { + + this.adapter = adapter; + this.context = context; + this.propertyToColumn = propertyToColumn; + this.columnMap = columnMap; + } + + public RelationalPersistentEntity getRequiredPersistentEntity(RelationalPersistentProperty property) { + return context.getRequiredPersistentEntity(property); + } + + public String getColumnName(AggregatePath path) { + return propertyToColumn.column(path); + } + + public String getKeyColumnName(AggregatePath path) { + return propertyToColumn.keyColumn(path); + } + + public boolean containsColumn(String columnName) { + return columnMap.containsKey(columnName); + } + + @Nullable + public Object getObject(RS row, String columnName) { + return adapter.getObject(row, columnMap.get(columnName)); + } + + /** + * Collect the value for {@link AggregatePath} from the {@code row} and add it under {@link SqlIdentifier} to the + * {@link RowDocument}. + */ + void collectValue(RS row, AggregatePath source, RowDocument document, SqlIdentifier targetName) { + + String columnLabel = propertyToColumn.column(source); + Integer index = columnMap.get(columnLabel); + + if (index == null) { + return; + } + + Object resultSetValue = adapter.getObject(row, index); + if (resultSetValue == null) { + return; + } + + document.put(targetName.getReference(), resultSetValue); + } + + } + + /** + * Sink abstraction for tabular result sets that represent an aggregate including all of its nested entities. Reading + * is driven by the results and readers receive a feed of rows to extract the data they are looking for. + *

+ * Sinks aim to produce a {@link #getResult() result}. Based on the inputs, results may be {@link #hasResult() + * present} or absent. + */ + protected abstract static class TabularSink { + + /** + * Accept a row of data and process their results to form potentially a {@link #getResult() result}. + * + * @param row the row to read from. + */ + abstract void accept(RS row); + + /** + * @return {@code true} if the sink has produced a result. + */ + abstract boolean hasResult(); + + /** + * Retrieve the sink result if present. + * + * @return the sink result. + */ + @Nullable + abstract Object getResult(); + + /** + * Reset the sink to prepare for the next result. + */ + abstract void reset(); + } + + /** + * Entity-driven sink to form a {@link RowDocument document} representing the underlying entities properties. + * + * @param + */ + protected static class RowDocumentSink extends TabularSink { + + private final AggregateContext aggregateContext; + private final RelationalPersistentEntity entity; + private final AggregatePath basePath; + private RowDocument result; + + private String keyColumnName; + + private @Nullable Object key; + private final Map> readerState = new LinkedHashMap<>(); + + public RowDocumentSink(AggregateContext aggregateContext, RelationalPersistentEntity entity, + AggregatePath basePath) { + + this.aggregateContext = aggregateContext; + this.entity = entity; + this.basePath = basePath; + + String keyColumnName; + if (entity.hasIdProperty()) { + keyColumnName = aggregateContext.getColumnName(basePath.append(entity.getRequiredIdProperty())); + } else { + keyColumnName = aggregateContext.getColumnName(basePath); + } + + this.keyColumnName = keyColumnName; + } + + @Override + void accept(RS row) { + + boolean first = result == null; + + if (first) { + RowDocument document = new RowDocument(); + readFirstRow(row, document); + this.result = document; + } + + for (TabularSink reader : readerState.values()) { + reader.accept(row); + } + } + + /** + * First row contains the root aggregate and all headers for nested collections/maps/entities. + */ + private void readFirstRow(RS row, RowDocument document) { + + // key marker + if (aggregateContext.containsColumn(keyColumnName)) { + key = aggregateContext.getObject(row, keyColumnName); + } + + readEntity(row, document, basePath, entity); + } + + private void readEntity(RS row, RowDocument document, AggregatePath basePath, + RelationalPersistentEntity entity) { + + for (RelationalPersistentProperty property : entity) { + + AggregatePath path = basePath.append(property); + + if (property.isEntity() && !property.isEmbedded() && (property.isCollectionLike() || property.isQualified())) { + + readerState.put(property, new ContainerSink<>(aggregateContext, property, path)); + continue; + } + + if (property.isEmbedded()) { + + RelationalPersistentEntity embeddedEntity = aggregateContext.getRequiredPersistentEntity(property); + readEntity(row, document, path, embeddedEntity); + continue; + } + + if (property.isEntity()) { + readerState.put(property, + new RowDocumentSink<>(aggregateContext, aggregateContext.getRequiredPersistentEntity(property), path)); + continue; + } + + aggregateContext.collectValue(row, path, document, property.getColumnName()); + } + } + + /** + * Read properties of embedded from the result set and store them under their column names + */ + private void collectEmbeddedValues(RS row, RowDocument document, RelationalPersistentProperty property, + AggregatePath path) { + + RelationalPersistentEntity embeddedHolder = aggregateContext.getRequiredPersistentEntity(property); + for (RelationalPersistentProperty embeddedProperty : embeddedHolder) { + + if (embeddedProperty.isQualified() || embeddedProperty.isCollectionLike() || embeddedProperty.isEntity()) { + // hell, no! + throw new UnsupportedOperationException("Reading maps and collections into embeddable isn't supported yet"); + } + + AggregatePath nested = path.append(embeddedProperty); + aggregateContext.collectValue(row, nested, document, nested.getColumnInfo().name()); + } + } + + @Override + boolean hasResult() { + + if (result == null) { + return false; + } + + for (TabularSink value : readerState.values()) { + if (value.hasResult()) { + return true; + } + } + + return !(result.isEmpty() && key == null); + } + + @Override + RowDocument getResult() { + + readerState.forEach((property, reader) -> { + + if (reader.hasResult()) { + result.put(property.getColumnName().getReference(), reader.getResult()); + } + }); + + return result; + } + + @Override + void reset() { + + result = null; + readerState.clear(); + } + } + + /** + * Sink using a single column to retrieve values from. + * + * @param + */ + private static class SingleColumnSink extends TabularSink { + + private final AggregateContext aggregateContext; + private final String columnName; + + private @Nullable Object value; + + public SingleColumnSink(AggregateContext aggregateContext, AggregatePath path) { + + this.aggregateContext = aggregateContext; + this.columnName = path.getColumnInfo().name().getReference(); + } + + @Override + void accept(RS row) { + + if (aggregateContext.containsColumn(columnName)) { + value = aggregateContext.getObject(row, columnName); + } else { + value = null; + } + } + + @Override + boolean hasResult() { + return value != null; + } + + @Override + Object getResult() { + return getValue(); + } + + @Nullable + public Object getValue() { + return value; + } + + @Override + void reset() { + value = null; + } + } + + /** + * A sink that aggregates multiple values in a {@link CollectionContainer container} such as List or Map. Inner values + * are determined by the value type while the key type is expected to be a simple type such a string or a number. + * + * @param + */ + private static class ContainerSink extends TabularSink { + + private final String keyColumn; + private final AggregateContext aggregateContext; + + private Object key; + private boolean hasResult = false; + + private final TabularSink componentReader; + private final CollectionContainer container; + + public ContainerSink(AggregateContext aggregateContext, RelationalPersistentProperty property, + AggregatePath path) { + + this.aggregateContext = aggregateContext; + this.keyColumn = aggregateContext.getKeyColumnName(path); + this.componentReader = property.isEntity() + ? new RowDocumentSink<>(aggregateContext, aggregateContext.getRequiredPersistentEntity(property), path) + : new SingleColumnSink<>(aggregateContext, path); + + this.container = property.isMap() ? new MapContainer() : new ListContainer(); + } + + @Override + void accept(RS row) { + + if (!aggregateContext.containsColumn(keyColumn)) { + return; + } + + Object key = aggregateContext.getObject(row, keyColumn); + if (key == null && !hasResult) { + return; + } + + boolean keyChange = key != null && !key.equals(this.key); + + if (!hasResult) { + hasResult = true; + } + + if (keyChange) { + if (componentReader.hasResult()) { + container.add(this.key, componentReader.getResult()); + componentReader.reset(); + } + } + + if (key != null) { + this.key = key; + } + + this.componentReader.accept(row); + } + + @Override + public boolean hasResult() { + return hasResult; + } + + @Override + public Object getResult() { + + if (componentReader.hasResult()) { + + container.add(this.key, componentReader.getResult()); + componentReader.reset(); + } + + return container.get(); + } + + @Override + void reset() { + hasResult = false; + } + } + + /** + * Base class defining method signatures to add values to a container that can hold multiple values, such as a List or + * Map. + */ + private abstract static class CollectionContainer { + + /** + * Append the value. + * + * @param key the entry key/index. + * @param value the entry value, can be {@literal null}. + */ + abstract void add(Object key, @Nullable Object value); + + /** + * Return the container holding the values that were previously added. + * + * @return the container holding the values that were previously added. + */ + abstract Object get(); + } + + // TODO: Are we 0 or 1 based? + private static class ListContainer extends CollectionContainer { + + private final Map list = new TreeMap<>(Comparator.comparing(Number::longValue)); + + @Override + public void add(Object key, @Nullable Object value) { + list.put(((Number) key).intValue() - 1, value); + } + + @Override + public List get() { + + List result = new ArrayList<>(list.size()); + + // TODO: How do we go about padding? Should we insert null values? + list.forEach((index, o) -> { + + while (result.size() < index.intValue()) { + result.add(null); + } + + result.add(o); + }); + + return result; + } + } + + private static class MapContainer extends CollectionContainer { + + private final Map map = new LinkedHashMap<>(); + + @Override + public void add(Object key, @Nullable Object value) { + map.put(key, value); + } + + @Override + public Map get() { + return new LinkedHashMap<>(map); + } + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractor.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractor.java new file mode 100644 index 0000000000..f4e52538a8 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractor.java @@ -0,0 +1,284 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.dao.DataRetrievalFailureException; +import org.springframework.data.jdbc.core.convert.RowDocumentExtractorSupport.AggregateContext; +import org.springframework.data.jdbc.core.convert.RowDocumentExtractorSupport.RowDocumentSink; +import org.springframework.data.jdbc.core.convert.RowDocumentExtractorSupport.TabularResultAdapter; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.LinkedCaseInsensitiveMap; + +/** + * {@link ResultSet}-driven extractor to extract {@link RowDocument documents}. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 3.2 + */ +class RowDocumentResultSetExtractor { + + private static final Log log = LogFactory.getLog(RowDocumentResultSetExtractor.class); + public static final String DUPLICATE_COLUMN_WARNING = "ResultSet contains column \"%s\" multiple times. Later column index is %s"; + + private final RelationalMappingContext context; + private final PathToColumnMapping propertyToColumn; + + RowDocumentResultSetExtractor(RelationalMappingContext context, PathToColumnMapping propertyToColumn) { + + this.context = context; + this.propertyToColumn = propertyToColumn; + } + + /** + * Create a {@link RowDocument} from the current {@link ResultSet} row. + * + * @param resultSet must not be {@literal null}. + * @return + * @throws SQLException + */ + static RowDocument toRowDocument(ResultSet resultSet) throws SQLException { + + ResultSetMetaData md = resultSet.getMetaData(); + int columnCount = md.getColumnCount(); + RowDocument document = new RowDocument(columnCount); + + for (int i = 0; i < columnCount; i++) { + + Object rsv = JdbcUtils.getResultSetValue(resultSet, i + 1); + String columnName = JdbcUtils.lookupColumnName(md, i+1); + Object old = document.putIfAbsent(columnName, rsv instanceof Array a ? a.getArray() : rsv); + if (old != null) { + log.warn(DUPLICATE_COLUMN_WARNING.formatted(columnName, i)); + } + } + + return document; + } + + /** + * Adapter to extract values and column metadata from a {@link ResultSet}. + */ + enum ResultSetAdapter implements TabularResultAdapter { + + INSTANCE; + + @Override + public Object getObject(ResultSet row, int index) { + + try { + + Object resultSetValue = JdbcUtils.getResultSetValue(row, index); + + if (resultSetValue instanceof Array a) { + return a.getArray(); + } + + return resultSetValue; + } catch (SQLException e) { + throw new DataRetrievalFailureException("Cannot retrieve column " + index + " from ResultSet", e); + } + } + + @Override + public Map getColumnMap(ResultSet result) { + + try { + + ResultSetMetaData metaData = result.getMetaData(); + Map columns = new LinkedCaseInsensitiveMap<>(metaData.getColumnCount()); + + for (int i = 0; i < metaData.getColumnCount(); i++) { + + String columnLabel = metaData.getColumnLabel(i + 1); + Object old = columns.put(columnLabel, i + 1); + if (old != null) { + log.warn(DUPLICATE_COLUMN_WARNING.formatted( columnLabel, i)); + } + } + return columns; + } catch (SQLException e) { + throw new DataRetrievalFailureException("Cannot retrieve ColumnMap from ResultSet", e); + } + } + } + + /** + * Reads the next {@link RowDocument} from the {@link ResultSet}. The result set can be pristine (i.e. + * {@link ResultSet#isBeforeFirst()}) or pointing already at a row. + * + * @param entity entity defining the document structure. + * @param resultSet the result set to consume. + * @return a {@link RowDocument}. + * @throws SQLException if thrown by the JDBC API. + * @throws IllegalStateException if the {@link ResultSet#isAfterLast() fully consumed}. + */ + public RowDocument extractNextDocument(Class entity, ResultSet resultSet) throws SQLException { + return extractNextDocument(context.getRequiredPersistentEntity(entity), resultSet); + } + + /** + * Reads the next {@link RowDocument} from the {@link ResultSet}. The result set can be pristine (i.e. + * {@link ResultSet#isBeforeFirst()}) or pointing already at a row. + * + * @param entity entity defining the document structure. + * @param resultSet the result set to consume. + * @return a {@link RowDocument}. + * @throws SQLException if thrown by the JDBC API. + * @throws IllegalStateException if the {@link ResultSet#isAfterLast() fully consumed}. + */ + public RowDocument extractNextDocument(RelationalPersistentEntity entity, ResultSet resultSet) + throws SQLException { + + Iterator iterator = iterate(entity, resultSet); + + if (!iterator.hasNext()) { + throw new IllegalStateException("ResultSet is fully consumed"); + } + + return iterator.next(); + } + + /** + * Obtain a {@link Iterator} to retrieve {@link RowDocument documents} from a {@link ResultSet}. + * + * @param entity the entity to determine the document structure. + * @param rs the input result set. + * @return an iterator to consume the {@link ResultSet} as RowDocuments. + * @throws SQLException if thrown by the JDBC API. + */ + public Iterator iterate(RelationalPersistentEntity entity, ResultSet rs) throws SQLException { + return new RowDocumentIterator(entity, rs); + } + + /** + * Iterator implementation that advances through the {@link ResultSet} and feeds its input into a + * {@link org.springframework.data.jdbc.core.convert.RowDocumentExtractorSupport.RowDocumentSink}. + */ + private class RowDocumentIterator implements Iterator { + + private final ResultSet resultSet; + private final AggregatePath rootPath; + private final RelationalPersistentEntity rootEntity; + private final Integer identifierIndex; + private final AggregateContext aggregateContext; + + /** + * Answers the question if the internal {@link ResultSet} points at an actual row. + */ + private boolean hasNext; + + RowDocumentIterator(RelationalPersistentEntity entity, ResultSet resultSet) { + + ResultSetAdapter adapter = ResultSetAdapter.INSTANCE; + + this.rootPath = context.getAggregatePath(entity); + this.rootEntity = entity; + + String idColumn = propertyToColumn.column(rootPath.append(entity.getRequiredIdProperty())); + Map columns = adapter.getColumnMap(resultSet); + this.aggregateContext = new AggregateContext<>(adapter, context, propertyToColumn, columns); + + this.resultSet = resultSet; + this.identifierIndex = columns.get(idColumn); + this.hasNext = hasRow(resultSet); + } + + private static boolean hasRow(ResultSet resultSet) { + + // If we are before the first row we need to advance to the first row. + try { + if (resultSet.isBeforeFirst()) { + return resultSet.next(); + } + } catch (SQLException e) { + // seems that isBeforeFirst is not implemented + } + + // if we are after the last row we are done and not pointing a valid row and also can't advance to one. + try { + if (resultSet.isAfterLast()) { + return false; + } + } catch (SQLException e) { + // seems that isAfterLast is not implemented + } + + // if we arrived here we know almost nothing. + // maybe isBeforeFirst or isBeforeLast aren't implemented + // or the ResultSet is empty. + + try { + resultSet.getObject(1); + // we can see actual data, so we are looking at a current row. + return true; + } catch (SQLException ignored) {} + + try { + return resultSet.next(); + } catch (SQLException e) { + // we aren't looking at a row, but we can't advance either. + // so it seems we are facing an empty ResultSet + return false; + } + } + + @Override + public boolean hasNext() { + return hasNext; + } + + @Override + @Nullable + public RowDocument next() { + + RowDocumentSink reader = new RowDocumentSink<>(aggregateContext, rootEntity, rootPath); + Object key = ResultSetAdapter.INSTANCE.getObject(resultSet, identifierIndex); + + try { + + do { + Object nextKey = ResultSetAdapter.INSTANCE.getObject(resultSet, identifierIndex); + + if (nextKey != null && !nextKey.equals(key)) { + break; + } + + reader.accept(resultSet); + hasNext = resultSet.next(); + } while (hasNext); + } catch (SQLException e) { + throw new DataRetrievalFailureException("Cannot advance ResultSet", e); + } + + return reader.getResult(); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SequenceEntityCallbackDelegate.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SequenceEntityCallbackDelegate.java new file mode 100644 index 0000000000..00efd7fcff --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SequenceEntityCallbackDelegate.java @@ -0,0 +1,102 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.NumberUtils; + +/** + * Support class for generating identifier values through a database sequence. + * + * @author Mikhail Polivakha + * @author Mark Paluch + * @since 3.5 + * @see org.springframework.data.relational.core.mapping.Sequence + */ +class SequenceEntityCallbackDelegate { + + private static final Log LOG = LogFactory.getLog(SequenceEntityCallbackDelegate.class); + private final static MapSqlParameterSource EMPTY_PARAMETERS = new MapSqlParameterSource(); + + private final Dialect dialect; + private final NamedParameterJdbcOperations operations; + + public SequenceEntityCallbackDelegate(Dialect dialect, NamedParameterJdbcOperations operations) { + this.dialect = dialect; + this.operations = operations; + } + + @SuppressWarnings("unchecked") + protected void generateSequenceValue(RelationalPersistentProperty property, + PersistentPropertyAccessor accessor) { + + Object sequenceValue = getSequenceValue(property); + + if (sequenceValue == null) { + return; + } + + Class targetType = ClassUtils.resolvePrimitiveIfNecessary(property.getType()); + if (sequenceValue instanceof Number && Number.class.isAssignableFrom(targetType)) { + sequenceValue = NumberUtils.convertNumberToTargetClass((Number) sequenceValue, + (Class) targetType); + } + + accessor.setProperty(property, sequenceValue); + } + + protected boolean hasValue(PersistentProperty property, PersistentPropertyAccessor propertyAccessor) { + + Object identifier = propertyAccessor.getProperty(property); + + if (property.getType().isPrimitive()) { + + Object primitiveDefault = ReflectionUtils.getPrimitiveDefault(property.getType()); + return !primitiveDefault.equals(identifier); + } + + return identifier != null; + } + + private @Nullable Object getSequenceValue(RelationalPersistentProperty property) { + + SqlIdentifier sequence = property.getSequence(); + + if (sequence != null && !dialect.getIdGeneration().sequencesSupported()) { + LOG.warn(""" + Aggregate type '%s' is marked for sequence usage but configured dialect '%s' + does not support sequences. Falling back to identity columns. + """.formatted(property.getOwner().getType(), ClassUtils.getQualifiedName(dialect.getClass()))); + return null; + } + + String sql = dialect.getIdGeneration().createSequenceQuery(sequence); + return operations.queryForObject(sql, EMPTY_PARAMETERS, (rs, rowNum) -> rs.getObject(1)); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SingleQueryDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SingleQueryDataAccessStrategy.java new file mode 100644 index 0000000000..d367c9c0c0 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SingleQueryDataAccessStrategy.java @@ -0,0 +1,115 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core.convert; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.query.Query; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; + +/** + * A {@link ReadingDataAccessStrategy} that uses an {@link AggregateReader} to load entities with a single query. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Sergey Korotaev + * @since 3.2 + */ +class SingleQueryDataAccessStrategy implements ReadingDataAccessStrategy { + + private final RelationalMappingContext mappingContext; + private final AggregateReader aggregateReader; + + public SingleQueryDataAccessStrategy(Dialect dialect, JdbcConverter converter, + NamedParameterJdbcOperations jdbcTemplate) { + + this.mappingContext = converter.getMappingContext(); + this.aggregateReader = new AggregateReader(dialect, converter, jdbcTemplate); + } + + @Override + public T findById(Object id, Class domainType) { + return aggregateReader.findById(id, getPersistentEntity(domainType)); + } + + @Override + public List findAll(Class domainType) { + return aggregateReader.findAll(getPersistentEntity(domainType)); + } + + @Override + public Stream streamAll(Class domainType) { + throw new UnsupportedOperationException(); + } + + @Override + public List findAllById(Iterable ids, Class domainType) { + return aggregateReader.findAllById(ids, getPersistentEntity(domainType)); + } + + @Override + public Stream streamAllByIds(Iterable ids, Class domainType) { + throw new UnsupportedOperationException(); + } + + @Override + public List findAll(Class domainType, Sort sort) { + throw new UnsupportedOperationException(); + } + + @Override + public Stream streamAll(Class domainType, Sort sort) { + throw new UnsupportedOperationException(); + } + + @Override + public List findAll(Class domainType, Pageable pageable) { + throw new UnsupportedOperationException(); + } + + @Override + public Optional findOne(Query query, Class domainType) { + return Optional.ofNullable(aggregateReader.findOne(query, getPersistentEntity(domainType))); + } + + @Override + public List findAll(Query query, Class domainType) { + return aggregateReader.findAll(query, getPersistentEntity(domainType)); + } + + @Override + public Stream streamAll(Query query, Class domainType) { + throw new UnsupportedOperationException(); + } + + @Override + public List findAll(Query query, Class domainType, Pageable pageable) { + throw new UnsupportedOperationException(); + } + + @SuppressWarnings("unchecked") + private RelationalPersistentEntity getPersistentEntity(Class domainType) { + return (RelationalPersistentEntity) mappingContext.getRequiredPersistentEntity(domainType); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SingleQueryFallbackDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SingleQueryFallbackDataAccessStrategy.java new file mode 100644 index 0000000000..962e19831c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SingleQueryFallbackDataAccessStrategy.java @@ -0,0 +1,148 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Collections; +import java.util.Optional; + +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Query; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.util.Assert; + +/** + * {@link DelegatingDataAccessStrategy} applying Single Query Loading if the underlying aggregate type allows Single + * Query Loading. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 3.2 + */ +class SingleQueryFallbackDataAccessStrategy extends DelegatingDataAccessStrategy { + + private final SqlGeneratorSource sqlGeneratorSource; + private final SingleQueryDataAccessStrategy singleSelectDelegate; + private final JdbcConverter converter; + + public SingleQueryFallbackDataAccessStrategy(SqlGeneratorSource sqlGeneratorSource, JdbcConverter converter, + NamedParameterJdbcOperations operations, DataAccessStrategy fallback) { + + super(fallback); + + Assert.notNull(sqlGeneratorSource, "SqlGeneratorSource must not be null"); + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + + this.sqlGeneratorSource = sqlGeneratorSource; + this.converter = converter; + + this.singleSelectDelegate = new SingleQueryDataAccessStrategy(sqlGeneratorSource.getDialect(), converter, + operations); + } + + @Override + public T findById(Object id, Class domainType) { + + if (isSingleSelectQuerySupported(domainType)) { + return singleSelectDelegate.findById(id, domainType); + } + + return super.findById(id, domainType); + } + + @Override + public Iterable findAll(Class domainType) { + + if (isSingleSelectQuerySupported(domainType)) { + return singleSelectDelegate.findAll(domainType); + } + + return super.findAll(domainType); + } + + @Override + public Iterable findAllById(Iterable ids, Class domainType) { + + if (!ids.iterator().hasNext()) { + return Collections.emptyList(); + } + + if (isSingleSelectQuerySupported(domainType)) { + return singleSelectDelegate.findAllById(ids, domainType); + } + + return super.findAllById(ids, domainType); + } + + @Override + public Optional findOne(Query query, Class domainType) { + + if (isSingleSelectQuerySupported(domainType) && isSingleSelectQuerySupported(query)) { + return singleSelectDelegate.findOne(query, domainType); + } + + return super.findOne(query, domainType); + } + + @Override + public Iterable findAll(Query query, Class domainType) { + + if (isSingleSelectQuerySupported(domainType) && isSingleSelectQuerySupported(query)) { + return singleSelectDelegate.findAll(query, domainType); + } + + return super.findAll(query, domainType); + } + + private static boolean isSingleSelectQuerySupported(Query query) { + return !query.isSorted() && !query.isLimited(); + } + + private boolean isSingleSelectQuerySupported(Class entityType) { + + return converter.getMappingContext().isSingleQueryLoadingEnabled() + && sqlGeneratorSource.getDialect().supportsSingleQueryLoading()// + && entityQualifiesForSingleQueryLoading(entityType); + } + + private boolean entityQualifiesForSingleQueryLoading(Class entityType) { + + for (PersistentPropertyPath path : converter.getMappingContext() + .findPersistentPropertyPaths(entityType, __ -> true)) { + RelationalPersistentProperty property = path.getLeafProperty(); + if (property.isEntity()) { + + // single references are currently not supported + if (!(property.isMap() || property.isCollectionLike())) { + return false; + } + + // embedded entities are currently not supported + if (property.isEmbedded()) { + return false; + } + + // nested references are currently not supported + if (path.getLength() > 1) { + return false; + } + } + } + return true; + + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlContext.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlContext.java new file mode 100644 index 0000000000..7663e6cd4f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlContext.java @@ -0,0 +1,70 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.Table; + +/** + * Utility to get from path to SQL DSL elements. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Tyler Van Gorder + * @since 1.1 + */ +class SqlContext { + + private final RelationalPersistentEntity entity; + private final Table table; + + SqlContext(RelationalPersistentEntity entity) { + + this.entity = entity; + this.table = Table.create(entity.getQualifiedTableName()); + } + + Column getIdColumn() { + return table.column(entity.getIdColumn()); + } + + Column getVersionColumn() { + return table.column(entity.getRequiredVersionProperty().getColumnName()); + } + + Table getTable() { + return table; + } + + Table getTable(AggregatePath path) { + + SqlIdentifier tableAlias = path.getTableInfo().tableAlias(); + Table table = Table.create(path.getTableInfo().qualifiedTableName()); + return tableAlias == null ? table : table.as(tableAlias); + } + + Column getColumn(AggregatePath path) { + AggregatePath.ColumnInfo columnInfo = path.getColumnInfo(); + return getTable(path).column(columnInfo.name()).as(columnInfo.alias()); + } + + Column getReverseColumn(AggregatePath path) { + return getTable(path).column(path.getTableInfo().reverseColumnInfo().name()).as(path.getTableInfo().reverseColumnInfo().alias()); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlGenerator.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlGenerator.java new file mode 100644 index 0000000000..7ac637e8c3 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlGenerator.java @@ -0,0 +1,1208 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.repository.support.SimpleJdbcRepository; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.CriteriaDefinition; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.*; +import org.springframework.data.relational.core.sql.render.RenderContext; +import org.springframework.data.relational.core.sql.render.SqlRenderer; +import org.springframework.data.util.Lazy; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Generates SQL statements to be used by {@link SimpleJdbcRepository} + * + * @author Jens Schauder + * @author Yoichi Imai + * @author Bastian Wilhelm + * @author Oleksandr Kucher + * @author Mark Paluch + * @author Tom Hombergs + * @author Tyler Van Gorder + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Mikhail Polivakha + * @author Chirag Tailor + * @author Diego Krupitza + * @author Hari Ohm Prasath + * @author Viktor Ardelean + * @author Kurt Niemi + */ +class SqlGenerator { + + static final SqlIdentifier VERSION_SQL_PARAMETER = SqlIdentifier.unquoted("___oldOptimisticLockingVersion"); + static final SqlIdentifier ID_SQL_PARAMETER = SqlIdentifier.unquoted("id"); + static final SqlIdentifier IDS_SQL_PARAMETER = SqlIdentifier.unquoted("ids"); + static final SqlIdentifier ROOT_ID_PARAMETER = SqlIdentifier.unquoted("rootId"); + + /** + * Length of an aggregate path that is one longer then the root path. + */ + private static final int FIRST_NON_ROOT_LENGTH = 2; + + private final RelationalPersistentEntity entity; + private final RelationalMappingContext mappingContext; + private final RenderContext renderContext; + + private final SqlContext sqlContext; + private final SqlRenderer sqlRenderer; + private final Columns columns; + + private final Lazy findOneSql = Lazy.of(this::createFindOneSql); + private final Lazy findAllSql = Lazy.of(this::createFindAllSql); + private final Lazy findAllInListSql = Lazy.of(this::createFindAllInListSql); + + private final Lazy existsSql = Lazy.of(this::createExistsSql); + private final Lazy countSql = Lazy.of(this::createCountSql); + + private final Lazy updateSql = Lazy.of(this::createUpdateSql); + private final Lazy updateWithVersionSql = Lazy.of(this::createUpdateWithVersionSql); + + private final Lazy deleteByIdSql = Lazy.of(this::createDeleteByIdSql); + private final Lazy deleteByIdInSql = Lazy.of(this::createDeleteByIdInSql); + private final Lazy deleteByIdAndVersionSql = Lazy.of(this::createDeleteByIdAndVersionSql); + private final Lazy deleteByListSql = Lazy.of(this::createDeleteByListSql); + private final QueryMapper queryMapper; + private final Dialect dialect; + + /** + * Create a new {@link SqlGenerator} given {@link RelationalMappingContext} and {@link RelationalPersistentEntity}. + * + * @param mappingContext must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param entity must not be {@literal null}. + * @param dialect must not be {@literal null}. + */ + SqlGenerator(RelationalMappingContext mappingContext, JdbcConverter converter, RelationalPersistentEntity entity, + Dialect dialect) { + + this.mappingContext = mappingContext; + this.entity = entity; + this.sqlContext = new SqlContext(entity); + this.renderContext = new RenderContextFactory(dialect).createRenderContext(); + this.sqlRenderer = SqlRenderer.create(renderContext); + this.columns = new Columns(entity, mappingContext, converter); + this.queryMapper = new QueryMapper(converter); + this.dialect = dialect; + } + + /** + * When deleting entities there is a fundamental difference between deleting + *
    + *
  1. the aggregate root.
  2. + *
  3. a first level entity which still references the root id directly
  4. + *
  5. and all other entities which have to use a subselect to navigate from the id of the aggregate root to something + * referenced by the table in question.
  6. + *
+ * For paths of the second kind this method returns {@literal true}. + * + * @param path the path to analyze. + * @return If the given path is considered deeply nested. + */ + private static boolean isFirstNonRoot(AggregatePath path) { + return path.getLength() == FIRST_NON_ROOT_LENGTH; + } + + /** + * When deleting entities there is a fundamental difference between deleting + *
    + *
  1. the aggregate root.
  2. + *
  3. a first level entity which still references the root id directly
  4. + *
  5. and all other entities which have to use a subselect to navigate from the id of the aggregate root to something + * referenced by the table in question.
  6. + *
+ * For paths of the third kind this method returns {@literal true}. + * + * @param path the path to analyze. + * @return If the given path is considered deeply nested. + */ + private static boolean isDeeplyNested(AggregatePath path) { + return path.getLength() > FIRST_NON_ROOT_LENGTH; + } + + /** + * Construct an IN-condition based on a {@link Select Sub-Select} which selects the ids (or stand-ins for ids) of the + * given {@literal path} to those that reference the root entities specified by the {@literal rootCondition}. + * + * @param path specifies the table and id to select + * @param rootCondition the condition on the root of the path determining what to select + * @param filterColumn the column to apply the IN-condition to. + * @return the IN condition + */ + private Condition getSubselectCondition(AggregatePath path, Function rootCondition, + Column filterColumn) { + + AggregatePath parentPath = path.getParentPath(); + + if (!parentPath.hasIdProperty()) { + if (isDeeplyNested(parentPath)) { + return getSubselectCondition(parentPath, rootCondition, filterColumn); + } + return rootCondition.apply(filterColumn); + } + + Table subSelectTable = Table.create(parentPath.getTableInfo().qualifiedTableName()); + Column idColumn = subSelectTable.column(parentPath.getTableInfo().idColumnName()); + Column selectFilterColumn = subSelectTable.column(parentPath.getTableInfo().effectiveIdColumnName()); + + Condition innerCondition; + + if (isFirstNonRoot(parentPath)) { // if the parent is the root of the path + + // apply the rootCondition + innerCondition = rootCondition.apply(selectFilterColumn); + } else { + + // otherwise, we need another layer of subselect + innerCondition = getSubselectCondition(parentPath, rootCondition, selectFilterColumn); + } + + Select select = Select.builder() // + .select(idColumn) // + .from(subSelectTable) // + .where(innerCondition).build(); + + return filterColumn.in(select); + } + + private BindMarker getBindMarker(SqlIdentifier columnName) { + return SQL.bindMarker(":" + BindParameterNameSanitizer.sanitize(renderReference(columnName))); + } + + /** + * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships. + * Results are filtered using an {@code IN}-clause on the id column. + * + * @return a SQL statement. Guaranteed to be not {@code null}. + */ + String getFindAllInList() { + return findAllInListSql.get(); + } + + /** + * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships. + * + * @return a SQL statement. Guaranteed to be not {@code null}. + */ + String getFindAll() { + return findAllSql.get(); + } + + /** + * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships, + * sorted by the given parameter. + * + * @return a SQL statement. Guaranteed to be not {@code null}. + */ + String getFindAll(Sort sort) { + return render(selectBuilder(Collections.emptyList(), sort, Pageable.unpaged()).build()); + } + + /** + * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships, + * paged and sorted by the given parameter. + * + * @return a SQL statement. Guaranteed to be not {@code null}. + */ + String getFindAll(Pageable pageable) { + return render(selectBuilder(Collections.emptyList(), pageable.getSort(), pageable).build()); + } + + /** + * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships. + * Results are limited to those rows referencing some parent entity. This is used to select values for a complex + * property ({@link Set}, {@link Map} ...) based on a referencing entity. + * + * @param parentIdentifier name of the column of the FK back to the referencing entity. + * @param propertyPath used to determine if the property is ordered and if there is a key column. + * @return a SQL String. + * @since 3.0 + */ + String getFindAllByProperty(Identifier parentIdentifier, + PersistentPropertyPath propertyPath) { + + Assert.notNull(parentIdentifier, "identifier must not be null"); + Assert.notNull(propertyPath, "propertyPath must not be null"); + + AggregatePath path = mappingContext.getAggregatePath(propertyPath); + + return getFindAllByProperty(parentIdentifier, path.getTableInfo().qualifierColumnInfo(), path.isOrdered()); + } + + /** + * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships. + * Results are limited to those rows referencing some other entity using the column specified by + * {@literal columnName}. This is used to select values for a complex property ({@link Set}, {@link Map} ...) based on + * a referencing entity. + * + * @param parentIdentifier name of the column of the FK back to the referencing entity. + * @param keyColumn if the property is of type {@link Map} this column contains the map key. + * @param ordered whether the SQL statement should include an ORDER BY for the keyColumn. If this is {@code true}, the + * keyColumn must not be {@code null}. + * @return a SQL String. + */ + String getFindAllByProperty(Identifier parentIdentifier, @Nullable AggregatePath.ColumnInfo keyColumn, + boolean ordered) { + + Assert.isTrue(keyColumn != null || !ordered, + "If the SQL statement should be ordered a keyColumn to order by must be provided"); + + Table table = getTable(); + + SelectBuilder.SelectWhere builder = selectBuilder( // + keyColumn == null // + ? Collections.emptyList() // + : Collections.singleton(keyColumn.name()) // + ); + + Condition condition = buildConditionForBackReference(parentIdentifier, table); + SelectBuilder.SelectWhereAndOr withWhereClause = builder.where(condition); + + Select select = ordered // + ? withWhereClause.orderBy(table.column(keyColumn.name()).as(keyColumn.alias())).build() // + : withWhereClause.build(); + + return render(select); + } + + private Condition buildConditionForBackReference(Identifier parentIdentifier, Table table) { + + Condition condition = null; + for (SqlIdentifier backReferenceColumn : parentIdentifier.toMap().keySet()) { + + Assert.isTrue(!SqlIdentifier.EMPTY.equals(backReferenceColumn), + "An empty SqlIdentifier can't be used in condition. Make sure that all composite primary keys are defined in the query"); + + Condition newCondition = table.column(backReferenceColumn).isEqualTo(getBindMarker(backReferenceColumn)); + condition = condition == null ? newCondition : condition.and(newCondition); + } + + Assert.state(condition != null, "We need at least one condition"); + + return condition; + } + + /** + * Create a {@code SELECT COUNT(id) FROM … WHERE :id = …} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getExists() { + return existsSql.get(); + } + + /** + * Create a {@code SELECT … FROM … WHERE :id = …} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getFindOne() { + return findOneSql.get(); + } + + /** + * Create a {@code SELECT count(id) FROM … WHERE :id = … (LOCK CLAUSE)} statement. + * + * @param lockMode Lock clause mode. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getAcquireLockById(LockMode lockMode) { + return this.createAcquireLockById(lockMode); + } + + /** + * Create a {@code SELECT count(id) FROM … (LOCK CLAUSE)} statement. + * + * @param lockMode Lock clause mode. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getAcquireLockAll(LockMode lockMode) { + return this.createAcquireLockAll(lockMode); + } + + /** + * Create a {@code INSERT INTO … (…) VALUES(…)} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getInsert(Set additionalColumns) { + return createInsertSql(additionalColumns); + } + + /** + * Create a {@code UPDATE … SET …} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getUpdate() { + return updateSql.get(); + } + + /** + * Create a {@code UPDATE … SET … WHERE ID = :id and VERSION_COLUMN = :___oldOptimisticLockingVersion } statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getUpdateWithVersion() { + return updateWithVersionSql.get(); + } + + /** + * Create a {@code SELECT COUNT(*) FROM …} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getCount() { + return countSql.get(); + } + + /** + * Create a {@code DELETE FROM … WHERE :id = …} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getDeleteById() { + return deleteByIdSql.get(); + } + + /** + * Create a {@code DELETE FROM … WHERE :id IN …} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getDeleteByIdIn() { + return deleteByIdInSql.get(); + } + + /** + * Create a {@code DELETE FROM … WHERE :id = … and :___oldOptimisticLockingVersion = ...} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getDeleteByIdAndVersion() { + return deleteByIdAndVersionSql.get(); + } + + /** + * Create a {@code DELETE FROM … WHERE :ids in (…)} statement. + * + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String getDeleteByList() { + return deleteByListSql.get(); + } + + /** + * Create a {@code DELETE} query and optionally filter by {@link PersistentPropertyPath}. + * + * @param path can be {@literal null}. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String createDeleteAllSql(@Nullable PersistentPropertyPath path) { + + Table table = getTable(); + + DeleteBuilder.DeleteWhere deleteAll = Delete.builder().from(table); + + if (path == null) { + return render(deleteAll.build()); + } + + return createDeleteByPathAndCriteria(mappingContext.getAggregatePath(path), Column::isNotNull); + } + + /** + * Create a {@code DELETE} query and filter by {@link PersistentPropertyPath} using {@code WHERE} with the {@code =} + * operator. + * + * @param path must not be {@literal null}. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String createDeleteByPath(PersistentPropertyPath path) { + return createDeleteByPathAndCriteria(mappingContext.getAggregatePath(path), + filterColumn -> filterColumn.isEqualTo(getBindMarker(ROOT_ID_PARAMETER))); + } + + /** + * Create a {@code DELETE} query and filter by {@link PersistentPropertyPath} using {@code WHERE} with the {@code IN} + * operator. + * + * @param path must not be {@literal null}. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + String createDeleteInByPath(PersistentPropertyPath path) { + + return createDeleteByPathAndCriteria(mappingContext.getAggregatePath(path), + filterColumn -> filterColumn.in(getBindMarker(IDS_SQL_PARAMETER))); + } + + private String createFindOneSql() { + + Select select = selectBuilder().where(getIdColumn().isEqualTo(getBindMarker(ID_SQL_PARAMETER))) // + .build(); + + return render(select); + } + + private String createAcquireLockById(LockMode lockMode) { + + Table table = this.getTable(); + + Select select = StatementBuilder // + .select(getIdColumn()) // + .from(table) // + .where(getIdColumn().isEqualTo(getBindMarker(ID_SQL_PARAMETER))) // + .lock(lockMode) // + .build(); + + return render(select); + } + + private String createAcquireLockAll(LockMode lockMode) { + + Table table = this.getTable(); + + Select select = StatementBuilder // + .select(getIdColumn()) // + .from(table) // + .lock(lockMode) // + .build(); + + return render(select); + } + + private String createFindAllSql() { + return render(selectBuilder().build()); + } + + private SelectBuilder.SelectWhere selectBuilder() { + return selectBuilder(Collections.emptyList()); + } + + private SelectBuilder.SelectWhere selectBuilder(Collection keyColumns) { + + Table table = getTable(); + + Set columnExpressions = new LinkedHashSet<>(); + + List joinTables = new ArrayList<>(); + for (PersistentPropertyPath path : mappingContext + .findPersistentPropertyPaths(entity.getType(), p -> true)) { + + AggregatePath extPath = mappingContext.getAggregatePath(path); + + // add a join if necessary + Join join = getJoin(extPath); + if (join != null) { + joinTables.add(join); + } + + Column column = getColumn(extPath); + if (column != null) { + columnExpressions.add(column); + } + } + + for (SqlIdentifier keyColumn : keyColumns) { + columnExpressions.add(table.column(keyColumn).as(keyColumn)); + } + + SelectBuilder.SelectAndFrom selectBuilder = StatementBuilder.select(columnExpressions); + SelectBuilder.SelectJoin baseSelect = selectBuilder.from(table); + + for (Join join : joinTables) { + baseSelect = baseSelect.leftOuterJoin(join.joinTable).on(join.joinColumn).equals(join.parentId); + } + + return (SelectBuilder.SelectWhere) baseSelect; + } + + private SelectBuilder.SelectOrdered selectBuilder(Collection keyColumns, Sort sort, + Pageable pageable) { + + SelectBuilder.SelectOrdered sortable = this.selectBuilder(keyColumns); + sortable = applyPagination(pageable, sortable); + return sortable.orderBy(extractOrderByFields(sort)); + + } + + private SelectBuilder.SelectOrdered applyPagination(Pageable pageable, SelectBuilder.SelectOrdered select) { + + if (!pageable.isPaged()) { + return select; + } + + Assert.isTrue(select instanceof SelectBuilder.SelectLimitOffset, + () -> String.format("Can't apply limit clause to statement of type %s", select.getClass())); + + SelectBuilder.SelectLimitOffset limitable = (SelectBuilder.SelectLimitOffset) select; + SelectBuilder.SelectLimitOffset limitResult = limitable.limitOffset(pageable.getPageSize(), pageable.getOffset()); + + Assert.state(limitResult instanceof SelectBuilder.SelectOrdered, String.format( + "The result of applying the limit-clause must be of type SelectOrdered in order to apply the order-by-clause but is of type %s", + select.getClass())); + + return (SelectBuilder.SelectOrdered) limitResult; + } + + /** + * Create a {@link Column} for {@link AggregatePath}. + * + * @param path the path to the column in question. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + @Nullable + Column getColumn(AggregatePath path) { + + // an embedded itself doesn't give a column, its members will though. + // if there is a collection or map on the path it won't get selected at all, but it will get loaded with a separate + // select + // only the parent path is considered in order to handle arrays that get stored as BINARY properly + if (path.isEmbedded() || path.getParentPath().isMultiValued()) { + return null; + } + + if (path.isEntity()) { + + // Simple entities without id include there backreference as a synthetic id in order to distinguish null entities + // from entities with only null values. + + if (path.isQualified() // + || path.isCollectionLike() // + || path.hasIdProperty() // + ) { + return null; + } + + return sqlContext.getReverseColumn(path); + } + + return sqlContext.getColumn(path); + } + + @Nullable + Join getJoin(AggregatePath path) { + + if (!path.isEntity() || path.isEmbedded() || path.isMultiValued()) { + return null; + } + + Table currentTable = sqlContext.getTable(path); + + AggregatePath idDefiningParentPath = path.getIdDefiningParentPath(); + Table parentTable = sqlContext.getTable(idDefiningParentPath); + + return new Join( // + currentTable, // + currentTable.column(path.getTableInfo().reverseColumnInfo().name()), // + parentTable.column(idDefiningParentPath.getTableInfo().idColumnName()) // + ); + } + + private String createFindAllInListSql() { + + Select select = selectBuilder().where(getIdColumn().in(getBindMarker(IDS_SQL_PARAMETER))).build(); + + return render(select); + } + + private String createExistsSql() { + + Table table = getTable(); + + Select select = StatementBuilder // + .select(Functions.count(getIdColumn())) // + .from(table) // + .where(getIdColumn().isEqualTo(getBindMarker(ID_SQL_PARAMETER))) // + .build(); + + return render(select); + } + + private String createCountSql() { + + Table table = getTable(); + + Select select = StatementBuilder // + .select(Functions.count(Expressions.asterisk())) // + .from(table) // + .build(); + + return render(select); + } + + private String createInsertSql(Set additionalColumns) { + + Table table = getTable(); + + Set columnNamesForInsert = new TreeSet<>(Comparator.comparing(SqlIdentifier::getReference)); + columnNamesForInsert.addAll(columns.getInsertableColumns()); + columnNamesForInsert.addAll(additionalColumns); + + InsertBuilder.InsertIntoColumnsAndValuesWithBuild insert = Insert.builder().into(table); + + for (SqlIdentifier cn : columnNamesForInsert) { + insert = insert.column(table.column(cn)); + } + + if (columnNamesForInsert.isEmpty()) { + return render(insert.build()); + } + + InsertBuilder.InsertValuesWithBuild insertWithValues = null; + for (SqlIdentifier cn : columnNamesForInsert) { + insertWithValues = (insertWithValues == null ? insert : insertWithValues).values(getBindMarker(cn)); + } + + return render(insertWithValues.build()); + } + + private String createUpdateSql() { + return render(createBaseUpdate().build()); + } + + private String createUpdateWithVersionSql() { + + Update update = createBaseUpdate() // + .and(getVersionColumn().isEqualTo(getBindMarker(VERSION_SQL_PARAMETER))) // + .build(); + + return render(update); + } + + private UpdateBuilder.UpdateWhereAndOr createBaseUpdate() { + + Table table = getTable(); + + List assignments = columns.getUpdatableColumns() // + .stream() // + .map(columnName -> Assignments.value( // + table.column(columnName), // + getBindMarker(columnName))) // + .collect(Collectors.toList()); + + return Update.builder() // + .table(table) // + .set(assignments) // + .where(getIdColumn().isEqualTo(getBindMarker(entity.getIdColumn()))); + } + + private String createDeleteByIdSql() { + return render(createBaseDeleteById(getTable()).build()); + } + + private String createDeleteByIdInSql() { + return render(createBaseDeleteByIdIn(getTable()).build()); + } + + private String createDeleteByIdAndVersionSql() { + + Delete delete = createBaseDeleteById(getTable()) // + .and(getVersionColumn().isEqualTo(getBindMarker(VERSION_SQL_PARAMETER))) // + .build(); + + return render(delete); + } + + private DeleteBuilder.DeleteWhereAndOr createBaseDeleteById(Table table) { + + return Delete.builder().from(table) // + .where(getIdColumn().isEqualTo(getBindMarker(ID_SQL_PARAMETER))); + } + + private DeleteBuilder.DeleteWhereAndOr createBaseDeleteByIdIn(Table table) { + + return Delete.builder().from(table) // + .where(getIdColumn().in(getBindMarker(IDS_SQL_PARAMETER))); + } + + private String createDeleteByPathAndCriteria(AggregatePath path, Function rootCondition) { + + Table table = Table.create(path.getTableInfo().qualifiedTableName()); + + DeleteBuilder.DeleteWhere builder = Delete.builder() // + .from(table); + Delete delete; + + Column filterColumn = table.column(path.getTableInfo().reverseColumnInfo().name()); + + if (isFirstNonRoot(path)) { + + delete = builder // + .where(rootCondition.apply(filterColumn)) // + .build(); + } else { + + Condition condition = getSubselectCondition(path, rootCondition, filterColumn); + delete = builder.where(condition).build(); + } + + return render(delete); + } + + private String createDeleteByListSql() { + + Table table = getTable(); + + Delete delete = Delete.builder() // + .from(table) // + .where(getIdColumn().in(getBindMarker(IDS_SQL_PARAMETER))) // + .build(); + + return render(delete); + } + + private String render(Select select) { + return this.sqlRenderer.render(select); + } + + private String render(Insert insert) { + return this.sqlRenderer.render(insert); + } + + private String render(Update update) { + return this.sqlRenderer.render(update); + } + + private String render(Delete delete) { + return this.sqlRenderer.render(delete); + } + + private Table getTable() { + return sqlContext.getTable(); + } + + private Column getIdColumn() { + return sqlContext.getIdColumn(); + } + + private Column getVersionColumn() { + return sqlContext.getVersionColumn(); + } + + private String renderReference(SqlIdentifier identifier) { + return identifier.getReference(); + } + + private List extractOrderByFields(Sort sort) { + + return sort.stream() // + .map(this::orderToOrderByField) // + .collect(Collectors.toList()); + } + + private OrderByField orderToOrderByField(Sort.Order order) { + + SqlIdentifier columnName = getColumnNameToSortBy(order); + Column column = Column.create(columnName, this.getTable()); + return OrderByField.from(column, order.getDirection()).withNullHandling(order.getNullHandling()); + } + + private SqlIdentifier getColumnNameToSortBy(Sort.Order order) { + + RelationalPersistentProperty propertyToSortBy = entity.getPersistentProperty(order.getProperty()); + if (propertyToSortBy != null) { + return propertyToSortBy.getColumnName(); + } + + PersistentPropertyPath persistentPropertyPath = mappingContext + .getPersistentPropertyPath(order.getProperty(), entity.getTypeInformation()); + + propertyToSortBy = persistentPropertyPath.getBaseProperty(); + + Assert.state(propertyToSortBy != null && propertyToSortBy.isEmbedded(), () -> String.format( // + "Specified sorting property '%s' is expected to " + // + "be the property, named '%s', of embedded entity '%s', but field '%s' is " + // + "not marked with @Embedded", // + order.getProperty(), // + extractFieldNameFromEmbeddedProperty(order), // + extractEmbeddedPropertyName(order), // + extractEmbeddedPropertyName(order) // + )); + + RelationalPersistentEntity embeddedEntity = mappingContext + .getRequiredPersistentEntity(propertyToSortBy.getType()); + return embeddedEntity.getRequiredPersistentProperty(extractFieldNameFromEmbeddedProperty(order)).getColumnName(); + } + + public String extractEmbeddedPropertyName(Sort.Order order) { + return order.getProperty().substring(0, order.getProperty().indexOf(".")); + } + + public String extractFieldNameFromEmbeddedProperty(Sort.Order order) { + return order.getProperty().substring(order.getProperty().indexOf(".") + 1); + } + + /** + * Constructs a single sql query that performs select based on the provided query. Additional the bindings for the + * where clause are stored after execution into the parameterSource + * + * @param query the query to base the select on. Must not be null + * @param parameterSource the source for holding the bindings + * @return a non null query string. + */ + public String selectByQuery(Query query, MapSqlParameterSource parameterSource) { + + Assert.notNull(parameterSource, "parameterSource must not be null"); + + SelectBuilder.SelectWhere selectBuilder = selectBuilder(); + + Select select = applyQueryOnSelect(query, parameterSource, selectBuilder) // + .build(); + + return render(select); + } + + /** + * Constructs a single sql query that performs select based on the provided query and pagination information. + * Additional the bindings for the where clause are stored after execution into the parameterSource + * + * @param query the query to base the select on. Must not be null. + * @param pageable the pageable to perform on the select. + * @param parameterSource the source for holding the bindings. + * @return a non null query string. + */ + public String selectByQuery(Query query, MapSqlParameterSource parameterSource, Pageable pageable) { + + Assert.notNull(parameterSource, "parameterSource must not be null"); + + SelectBuilder.SelectWhere selectBuilder = selectBuilder(); + + // first apply query and then pagination. This means possible query sorting and limiting might be overwritten by the + // pagination. This is desired. + SelectBuilder.SelectOrdered selectOrdered = applyQueryOnSelect(query, parameterSource, selectBuilder); + selectOrdered = applyPagination(pageable, selectOrdered); + selectOrdered = selectOrdered.orderBy(extractOrderByFields(pageable.getSort())); + + Select select = selectOrdered.build(); + return render(select); + } + + /** + * Constructs a single sql query that performs select count based on the provided query for checking existence. + * Additional the bindings for the where clause are stored after execution into the parameterSource + * + * @param query the query to base the select on. Must not be null + * @param parameterSource the source for holding the bindings + * @return a non null query string. + */ + public String existsByQuery(Query query, MapSqlParameterSource parameterSource) { + + SelectBuilder.SelectJoin baseSelect = getExistsSelect(); + + Select select = applyQueryOnSelect(query, parameterSource, (SelectBuilder.SelectWhere) baseSelect) // + .build(); + + return render(select); + } + + /** + * Constructs a single sql query that performs select count based on the provided query. Additional the bindings for + * the where clause are stored after execution into the parameterSource + * + * @param query the query to base the select on. Must not be null + * @param parameterSource the source for holding the bindings + * @return a non null query string. + */ + public String countByQuery(Query query, MapSqlParameterSource parameterSource) { + + Expression countExpression = Expressions.just("1"); + SelectBuilder.SelectJoin baseSelect = getSelectCountWithExpression(countExpression); + + Select select = applyQueryOnSelect(query, parameterSource, (SelectBuilder.SelectWhere) baseSelect) // + .build(); + + return render(select); + } + + /** + * Generates a {@link org.springframework.data.relational.core.sql.SelectBuilder.SelectJoin} with a + * COUNT(...) where the countExpressions are the parameters of the count. + * + * @return a non-null {@link org.springframework.data.relational.core.sql.SelectBuilder.SelectJoin} that joins all the + * columns and has only a count in the projection of the select. + */ + private SelectBuilder.SelectJoin getExistsSelect() { + + Table table = getTable(); + + SelectBuilder.SelectJoin baseSelect = StatementBuilder // + .select(dialect.getExistsFunction()) // + .from(table); + + // add possible joins + for (PersistentPropertyPath path : mappingContext + .findPersistentPropertyPaths(entity.getType(), p -> true)) { + + AggregatePath aggregatePath = mappingContext.getAggregatePath(path); + + // add a join if necessary + Join join = getJoin(aggregatePath); + if (join != null) { + baseSelect = baseSelect.leftOuterJoin(join.joinTable).on(join.joinColumn).equals(join.parentId); + } + } + return baseSelect; + } + + /** + * Generates a {@link org.springframework.data.relational.core.sql.SelectBuilder.SelectJoin} with a + * COUNT(...) where the countExpressions are the parameters of the count. + * + * @param countExpressions the expression to use as count parameter. + * @return a non-null {@link org.springframework.data.relational.core.sql.SelectBuilder.SelectJoin} that joins all the + * columns and has only a count in the projection of the select. + */ + private SelectBuilder.SelectJoin getSelectCountWithExpression(Expression... countExpressions) { + + Assert.notNull(countExpressions, "countExpressions must not be null"); + Assert.state(countExpressions.length >= 1, "countExpressions must contain at least one expression"); + + Table table = getTable(); + + SelectBuilder.SelectJoin baseSelect = StatementBuilder // + .select(Functions.count(countExpressions)) // + .from(table); + + // add possible joins + for (PersistentPropertyPath path : mappingContext + .findPersistentPropertyPaths(entity.getType(), p -> true)) { + + AggregatePath extPath = mappingContext.getAggregatePath(path); + + // add a join if necessary + Join join = getJoin(extPath); + if (join != null) { + baseSelect = baseSelect.leftOuterJoin(join.joinTable).on(join.joinColumn).equals(join.parentId); + } + } + return baseSelect; + } + + private SelectBuilder.SelectOrdered applyQueryOnSelect(Query query, MapSqlParameterSource parameterSource, + SelectBuilder.SelectWhere selectBuilder) { + + Table table = Table.create(this.entity.getQualifiedTableName()); + + SelectBuilder.SelectOrdered selectOrdered = query // + .getCriteria() // + .map(item -> this.applyCriteria(item, selectBuilder, parameterSource, table)) // + .orElse(selectBuilder); + + if (query.isSorted()) { + List sort = this.queryMapper.getMappedSort(table, query.getSort(), entity); + selectOrdered = selectOrdered.orderBy(sort); + } + + SelectBuilder.SelectLimitOffset limitable = (SelectBuilder.SelectLimitOffset) selectOrdered; + + if (query.getLimit() > 0) { + limitable = limitable.limit(query.getLimit()); + } + + if (query.getOffset() > 0) { + limitable = limitable.offset(query.getOffset()); + } + return (SelectBuilder.SelectOrdered) limitable; + } + + SelectBuilder.SelectOrdered applyCriteria(@Nullable CriteriaDefinition criteria, + SelectBuilder.SelectWhere whereBuilder, MapSqlParameterSource parameterSource, Table table) { + + return criteria == null || criteria.isEmpty() // Check for null and empty criteria + ? whereBuilder // + : whereBuilder.where(queryMapper.getMappedObject(parameterSource, criteria, table, entity)); + } + + /** + * Value object representing a {@code JOIN} association. + */ + static final class Join { + + private final Table joinTable; + private final Column joinColumn; + private final Column parentId; + + Join(Table joinTable, Column joinColumn, Column parentId) { + + Assert.notNull(joinTable, "JoinTable must not be null"); + Assert.notNull(joinColumn, "JoinColumn must not be null"); + Assert.notNull(parentId, "ParentId must not be null"); + + this.joinTable = joinTable; + this.joinColumn = joinColumn; + this.parentId = parentId; + } + + Table getJoinTable() { + return this.joinTable; + } + + Column getJoinColumn() { + return this.joinColumn; + } + + Column getParentId() { + return this.parentId; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Join join = (Join) o; + return joinTable.equals(join.joinTable) && joinColumn.equals(join.joinColumn) && parentId.equals(join.parentId); + } + + @Override + public int hashCode() { + return Objects.hash(joinTable, joinColumn, parentId); + } + + @Override + public String toString() { + + return "Join{" + // + "joinTable=" + joinTable + // + ", joinColumn=" + joinColumn + // + ", parentId=" + parentId + // + '}'; + } + } + + /** + * Value object encapsulating column name caches. + * + * @author Mark Paluch + * @author Jens Schauder + */ + static class Columns { + + private final MappingContext, RelationalPersistentProperty> mappingContext; + private final JdbcConverter converter; + + private final List columnNames = new ArrayList<>(); + private final List idColumnNames = new ArrayList<>(); + private final List nonIdColumnNames = new ArrayList<>(); + private final Set readOnlyColumnNames = new HashSet<>(); + private final Set insertOnlyColumnNames = new HashSet<>(); + private final Set insertableColumns; + private final Set updatableColumns; + + Columns(RelationalPersistentEntity entity, + MappingContext, RelationalPersistentProperty> mappingContext, + JdbcConverter converter) { + + this.mappingContext = mappingContext; + this.converter = converter; + + populateColumnNameCache(entity, ""); + + Set insertable = new LinkedHashSet<>(nonIdColumnNames); + insertable.removeAll(readOnlyColumnNames); + + this.insertableColumns = Collections.unmodifiableSet(insertable); + + Set updatable = new LinkedHashSet<>(columnNames); + + updatable.removeAll(idColumnNames); + updatable.removeAll(readOnlyColumnNames); + updatable.removeAll(insertOnlyColumnNames); + + this.updatableColumns = Collections.unmodifiableSet(updatable); + } + + private void populateColumnNameCache(RelationalPersistentEntity entity, String prefix) { + + entity.doWithAll(property -> { + + // the referencing column of referenced entity is expected to be on the other side of the relation + if (!property.isEntity()) { + initSimpleColumnName(property, prefix); + } else if (property.isEmbedded()) { + initEmbeddedColumnNames(property, prefix); + } + }); + } + + private void initSimpleColumnName(RelationalPersistentProperty property, String prefix) { + + SqlIdentifier columnName = property.getColumnName().transform(prefix::concat); + + columnNames.add(columnName); + + if (!property.getOwner().isIdProperty(property)) { + nonIdColumnNames.add(columnName); + } else { + idColumnNames.add(columnName); + } + + if (!property.isWritable()) { + readOnlyColumnNames.add(columnName); + } + if (property.isInsertOnly()) { + insertOnlyColumnNames.add(columnName); + } + } + + private void initEmbeddedColumnNames(RelationalPersistentProperty property, String prefix) { + + String embeddedPrefix = property.getEmbeddedPrefix(); + + RelationalPersistentEntity embeddedEntity = mappingContext + .getRequiredPersistentEntity(converter.getColumnType(property)); + + populateColumnNameCache(embeddedEntity, prefix + embeddedPrefix); + } + + /** + * @return Column names that can be used for {@code INSERT}. + */ + Set getInsertableColumns() { + return insertableColumns; + } + + /** + * @return Column names that can be used for {@code UPDATE}. + */ + Set getUpdatableColumns() { + return updatableColumns; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlGeneratorSource.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlGeneratorSource.java new file mode 100644 index 0000000000..0a217dce63 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlGeneratorSource.java @@ -0,0 +1,64 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Map; + +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.util.Assert; +import org.springframework.util.ConcurrentReferenceHashMap; + +/** + * Provides {@link SqlGenerator}s per domain type. Instances get cached, so when asked multiple times for the same + * domain type, the same generator will get returned. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Milan Milanov + */ +public class SqlGeneratorSource { + + private final Map, SqlGenerator> CACHE = new ConcurrentReferenceHashMap<>(); + private final RelationalMappingContext context; + private final JdbcConverter converter; + private final Dialect dialect; + + public SqlGeneratorSource(RelationalMappingContext context, JdbcConverter converter, Dialect dialect) { + + Assert.notNull(context, "Context must not be null"); + Assert.notNull(converter, "Converter must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + + this.context = context; + this.converter = converter; + this.dialect = dialect; + } + + /** + * @return the {@link Dialect} used by the created {@link SqlGenerator} instances. Guaranteed to be not + * {@literal null}. + */ + public Dialect getDialect() { + return dialect; + } + + SqlGenerator getSqlGenerator(Class domainType) { + + return CACHE.computeIfAbsent(domainType, + t -> new SqlGenerator(context, converter, context.getRequiredPersistentEntity(t), dialect)); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlIdentifierParameterSource.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlIdentifierParameterSource.java new file mode 100644 index 0000000000..78ff82deb2 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlIdentifierParameterSource.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.namedparam.AbstractSqlParameterSource; + +/** + * Implementation of the {@link org.springframework.jdbc.core.namedparam.SqlParameterSource} interface based on + * {@link SqlIdentifier} instead of {@link String} for names. + * + * @author Jens Schauder + * @author Kurt Niemi + * @author Mikhail Polivakha + * @since 2.0 + */ +class SqlIdentifierParameterSource extends AbstractSqlParameterSource { + + private final Set identifiers = new HashSet<>(); + private final Map namesToValues = new HashMap<>(); + + @Override + public boolean hasValue(String paramName) { + return namesToValues.containsKey(paramName); + } + + @Override + public Object getValue(String paramName) throws IllegalArgumentException { + return namesToValues.get(paramName); + } + + @Override + public String[] getParameterNames() { + return namesToValues.keySet().toArray(new String[0]); + } + + Set getIdentifiers() { + return Collections.unmodifiableSet(identifiers); + } + + void addValue(SqlIdentifier name, Object value) { + addValue(name, value, Integer.MIN_VALUE); + } + + void addValue(SqlIdentifier identifier, Object value, int sqlType) { + + identifiers.add(identifier); + String name = BindParameterNameSanitizer.sanitize(identifier.getReference()); + namesToValues.put(name, value); + registerSqlType(name, sqlType); + } + + void addAll(SqlIdentifierParameterSource others) { + + for (SqlIdentifier identifier : others.getIdentifiers()) { + + String name = BindParameterNameSanitizer.sanitize( identifier.getReference()); + addValue(identifier, others.getValue(name), others.getSqlType(name)); + } + } + + int size() { + return namesToValues.size(); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlParametersFactory.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlParametersFactory.java new file mode 100644 index 0000000000..8bf9bb869f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/SqlParametersFactory.java @@ -0,0 +1,294 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import java.sql.SQLType; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Creates the {@link SqlIdentifierParameterSource} for various SQL operations, dialect identifier processing rules and + * applicable converters. + * + * @author Jens Schauder + * @author Chirag Tailor + * @author Mikhail Polivakha + * @since 2.4 + */ +public class SqlParametersFactory { + private final RelationalMappingContext context; + private final JdbcConverter converter; + + /** + * @since 3.1 + */ + public SqlParametersFactory(RelationalMappingContext context, JdbcConverter converter) { + this.context = context; + this.converter = converter; + } + + /** + * Creates the parameters for a SQL insert operation. + * + * @param instance the entity to be inserted. Must not be {@code null}. + * @param domainType the type of the instance. Must not be {@code null}. + * @param identifier information about data that needs to be considered for the insert but which is not part of the + * entity. Namely references back to a parent entity and key/index columns for entities that are stored in a + * {@link Map} or {@link List}. + * @param idValueSource the {@link IdValueSource} for the insert. + * @return the {@link SqlIdentifierParameterSource} for the insert. Guaranteed to not be {@code null}. + * @since 2.4 + */ + SqlIdentifierParameterSource forInsert(T instance, Class domainType, Identifier identifier, + IdValueSource idValueSource) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(domainType); + SqlIdentifierParameterSource parameterSource = getParameterSource(instance, persistentEntity, "", + PersistentProperty::isIdProperty); + + identifier.forEach((name, value, type) -> addConvertedPropertyValue(parameterSource, name, value, type)); + + if (IdValueSource.PROVIDED.equals(idValueSource)) { + + RelationalPersistentProperty idProperty = persistentEntity.getRequiredIdProperty(); + Object idValue = persistentEntity.getIdentifierAccessor(instance).getRequiredIdentifier(); + addConvertedPropertyValue(parameterSource, idProperty, idValue, idProperty.getColumnName()); + } + return parameterSource; + } + + /** + * Creates the parameters for a SQL update operation. + * + * @param instance the entity to be updated. Must not be {@code null}. + * @param domainType the type of the instance. Must not be {@code null}. + * @return the {@link SqlIdentifierParameterSource} for the update. Guaranteed to not be {@code null}. + * @since 2.4 + */ + SqlIdentifierParameterSource forUpdate(T instance, Class domainType) { + + return getParameterSource(instance, getRequiredPersistentEntity(domainType), "", + RelationalPersistentProperty::isInsertOnly); + } + + /** + * Creates the parameters for a SQL query by id. + * + * @param id the entity id. Must not be {@code null}. + * @param domainType the type of the instance. Must not be {@code null}. + * @param name the name to be used for the id parameter. + * @return the {@link SqlIdentifierParameterSource} for the query. Guaranteed to not be {@code null}. + * @since 2.4 + */ + SqlIdentifierParameterSource forQueryById(Object id, Class domainType, SqlIdentifier name) { + + SqlIdentifierParameterSource parameterSource = new SqlIdentifierParameterSource(); + + addConvertedPropertyValue( // + parameterSource, // + getRequiredPersistentEntity(domainType).getRequiredIdProperty(), // + id, // + name // + ); + return parameterSource; + } + + /** + * Creates the parameters for a SQL query by ids. + * + * @param ids the entity ids. Must not be {@code null}. + * @param domainType the type of the instance. Must not be {@code null}. + * @return the {@link SqlIdentifierParameterSource} for the query. Guaranteed to not be {@code null}. + * @since 2.4 + */ + SqlIdentifierParameterSource forQueryByIds(Iterable ids, Class domainType) { + + SqlIdentifierParameterSource parameterSource = new SqlIdentifierParameterSource(); + + addConvertedPropertyValuesAsList(parameterSource, getRequiredPersistentEntity(domainType).getRequiredIdProperty(), + ids); + + return parameterSource; + } + + /** + * Creates the parameters for a SQL query of related entities. + * + * @param identifier the identifier describing the relation. Must not be {@code null}. + * @return the {@link SqlIdentifierParameterSource} for the query. Guaranteed to not be {@code null}. + * @since 2.4 + */ + SqlIdentifierParameterSource forQueryByIdentifier(Identifier identifier) { + + SqlIdentifierParameterSource parameterSource = new SqlIdentifierParameterSource(); + + identifier.toMap() + .forEach((name, value) -> addConvertedPropertyValue(parameterSource, name, value, value.getClass())); + + return parameterSource; + } + + /** + * Utility to create {@link Predicate}s. + */ + static class Predicates { + + /** + * Include all {@link Predicate} returning {@literal false} to never skip a property. + * + * @return the include all {@link Predicate}. + */ + static Predicate includeAll() { + return it -> false; + } + } + + private void addConvertedPropertyValue(SqlIdentifierParameterSource parameterSource, + RelationalPersistentProperty property, @Nullable Object value, SqlIdentifier name) { + + addConvertedValue(parameterSource, value, name, converter.getColumnType(property), + converter.getTargetSqlType(property)); + } + + private void addConvertedPropertyValue(SqlIdentifierParameterSource parameterSource, SqlIdentifier name, Object value, + Class javaType) { + + addConvertedValue(parameterSource, value, name, javaType, JdbcUtil.targetSqlTypeFor(javaType)); + } + + private void addConvertedValue(SqlIdentifierParameterSource parameterSource, @Nullable Object value, + SqlIdentifier paramName, Class javaType, SQLType sqlType) { + + JdbcValue jdbcValue = converter.writeJdbcValue( // + value, // + javaType, // + sqlType // + ); + + parameterSource.addValue( // + paramName, // + jdbcValue.getValue(), // + jdbcValue.getJdbcType().getVendorTypeNumber()); + } + + private void addConvertedPropertyValuesAsList(SqlIdentifierParameterSource parameterSource, + RelationalPersistentProperty property, Iterable values) { + + List convertedIds = new ArrayList<>(); + JdbcValue jdbcValue = null; + for (Object id : values) { + + Class columnType = converter.getColumnType(property); + SQLType sqlType = converter.getTargetSqlType(property); + + jdbcValue = converter.writeJdbcValue(id, columnType, sqlType); + convertedIds.add(jdbcValue.getValue()); + } + + Assert.state(jdbcValue != null, "JdbcValue must be not null at this point; Please report this as a bug"); + + SQLType jdbcType = jdbcValue.getJdbcType(); + int typeNumber = jdbcType == null ? JdbcUtils.TYPE_UNKNOWN : jdbcType.getVendorTypeNumber(); + + parameterSource.addValue(SqlGenerator.IDS_SQL_PARAMETER, convertedIds, typeNumber); + } + + @SuppressWarnings("unchecked") + private RelationalPersistentEntity getRequiredPersistentEntity(Class domainType) { + return (RelationalPersistentEntity) context.getRequiredPersistentEntity(domainType); + } + + private SqlIdentifierParameterSource getParameterSource(@Nullable S instance, + RelationalPersistentEntity persistentEntity, String prefix, + Predicate skipProperty) { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + + PersistentPropertyAccessor propertyAccessor = instance != null ? persistentEntity.getPropertyAccessor(instance) + : NoValuePropertyAccessor.instance(); + + persistentEntity.doWithAll(property -> { + + if (skipProperty.test(property) || !property.isWritable()) { + return; + } + if (property.isEntity() && !property.isEmbedded()) { + return; + } + + if (property.isEmbedded()) { + + Object value = propertyAccessor.getProperty(property); + RelationalPersistentEntity embeddedEntity = context.getPersistentEntity(property.getTypeInformation()); + SqlIdentifierParameterSource additionalParameters = getParameterSource((T) value, + (RelationalPersistentEntity) embeddedEntity, prefix + property.getEmbeddedPrefix(), skipProperty); + parameters.addAll(additionalParameters); + } else { + + Object value = propertyAccessor.getProperty(property); + SqlIdentifier paramName = property.getColumnName().transform(prefix::concat); + + addConvertedPropertyValue(parameters, property, value, paramName); + } + }); + + return parameters; + } + + /** + * A {@link PersistentPropertyAccessor} implementation always returning null + * + * @param + */ + static class NoValuePropertyAccessor implements PersistentPropertyAccessor { + + private static final NoValuePropertyAccessor INSTANCE = new NoValuePropertyAccessor(); + + static NoValuePropertyAccessor instance() { + return INSTANCE; + } + + @Override + public void setProperty(PersistentProperty property, @Nullable Object value) { + throw new UnsupportedOperationException("Cannot set value on 'null' target object"); + } + + @Override + public Object getProperty(PersistentProperty property) { + return null; + } + + @Override + public T getBean() { + return null; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/package-info.java new file mode 100644 index 0000000000..43ca52cb9d --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/package-info.java @@ -0,0 +1,7 @@ +/** + * JDBC-specific conversion classes. + */ +@NonNullApi +package org.springframework.data.jdbc.core.convert; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/DialectResolver.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/DialectResolver.java new file mode 100644 index 0000000000..3ec2c9b107 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/DialectResolver.java @@ -0,0 +1,272 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.SQLException; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.io.support.SpringFactoriesLoader; +import org.springframework.dao.NonTransientDataAccessException; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.data.relational.core.dialect.IdGeneration; +import org.springframework.data.relational.core.dialect.InsertRenderContext; +import org.springframework.data.relational.core.dialect.LimitClause; +import org.springframework.data.relational.core.dialect.LockClause; +import org.springframework.data.relational.core.dialect.OrderByNullPrecedence; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.SimpleFunction; +import org.springframework.data.relational.core.sql.render.SelectRenderContext; +import org.springframework.data.util.Optionals; +import org.springframework.jdbc.core.ConnectionCallback; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Resolves a {@link Dialect}. Resolution typically uses {@link JdbcOperations} to obtain and inspect a + * {@link Connection}. Dialect resolution uses Spring's {@link SpringFactoriesLoader spring.factories} to determine + * available {@link JdbcDialectProvider extensions}. + * + * @author Jens Schauder + * @author Mikhail Polivakha + * @since 3.5 + * @see Dialect + * @see SpringFactoriesLoader + */ +public class DialectResolver { + + private static final Log LOG = LogFactory.getLog(DialectResolver.class); + + private static final List DETECTORS = SpringFactoriesLoader + .loadFactories(JdbcDialectProvider.class, DialectResolver.class.getClassLoader()); + + private static final List LEGACY_DETECTORS = SpringFactoriesLoader + .loadFactories(org.springframework.data.jdbc.repository.config.DialectResolver.JdbcDialectProvider.class, + DialectResolver.class.getClassLoader()); + + // utility constructor. + private DialectResolver() {} + + /** + * Retrieve a {@link Dialect} by inspecting a {@link Connection}. + * + * @param operations must not be {@literal null}. + * @return the resolved {@link Dialect} {@link NoDialectException} if the database type cannot be determined from + * {@link DataSource}. + * @throws NoDialectException if no {@link Dialect} can be found. + */ + public static JdbcDialect getDialect(JdbcOperations operations) { + + return Stream.concat(LEGACY_DETECTORS.stream(), DETECTORS.stream()) // + .map(it -> it.getDialect(operations)) // + .flatMap(Optionals::toStream) // + .map(it -> it instanceof JdbcDialect jd ? jd : new JdbcDialectAdapter(it)).findFirst() // + .orElseThrow(() -> new NoDialectException( + String.format("Cannot determine a dialect for %s; Please provide a Dialect", operations))); + } + + /** + * SPI to extend Spring's default JDBC Dialect discovery mechanism. Implementations of this interface are discovered + * through Spring's {@link SpringFactoriesLoader} mechanism. + * + * @author Jens Schauder + * @see SpringFactoriesLoader + */ + public interface JdbcDialectProvider { + + /** + * Returns a {@link Dialect} for a {@link DataSource}. + * + * @param operations the {@link JdbcOperations} to be used with the {@link Dialect}. + * @return {@link Optional} containing the {@link Dialect} if the {@link JdbcDialectProvider} can provide a dialect + * object, otherwise {@link Optional#empty()}. + */ + Optional getDialect(JdbcOperations operations); + } + + public static class DefaultDialectProvider implements JdbcDialectProvider { + + @Override + public Optional getDialect(JdbcOperations operations) { + return Optional.ofNullable(operations.execute((ConnectionCallback) DefaultDialectProvider::getDialect)); + } + + @Nullable + private static JdbcDialect getDialect(Connection connection) throws SQLException { + + DatabaseMetaData metaData = connection.getMetaData(); + + String name = metaData.getDatabaseProductName().toLowerCase(Locale.ENGLISH); + + if (name.contains("hsql")) { + return JdbcHsqlDbDialect.INSTANCE; + } + if (name.contains("h2")) { + return JdbcH2Dialect.INSTANCE; + } + if (name.contains("mysql")) { + return new JdbcMySqlDialect(getIdentifierProcessing(metaData)); + } + if (name.contains("mariadb")) { + return new JdbcMariaDbDialect(getIdentifierProcessing(metaData)); + } + if (name.contains("postgresql")) { + return JdbcPostgresDialect.INSTANCE; + } + if (name.contains("microsoft")) { + return JdbcSqlServerDialect.INSTANCE; + } + if (name.contains("db2")) { + return JdbcDb2Dialect.INSTANCE; + } + if (name.contains("oracle")) { + return JdbcOracleDialect.INSTANCE; + } + + LOG.info(String.format("Couldn't determine Dialect for \"%s\"", name)); + return null; + } + + private static IdentifierProcessing getIdentifierProcessing(DatabaseMetaData metaData) throws SQLException { + + // getIdentifierQuoteString() returns a space " " if identifier quoting is not + // supported. + String quoteString = metaData.getIdentifierQuoteString(); + IdentifierProcessing.Quoting quoting = StringUtils.hasText(quoteString) + ? new IdentifierProcessing.Quoting(quoteString) + : IdentifierProcessing.Quoting.NONE; + + IdentifierProcessing.LetterCasing letterCasing; + // IdentifierProcessing tries to mimic the behavior of unquoted identifiers for their quoted variants. + if (metaData.supportsMixedCaseIdentifiers()) { + letterCasing = IdentifierProcessing.LetterCasing.AS_IS; + } else if (metaData.storesUpperCaseIdentifiers()) { + letterCasing = IdentifierProcessing.LetterCasing.UPPER_CASE; + } else if (metaData.storesLowerCaseIdentifiers()) { + letterCasing = IdentifierProcessing.LetterCasing.LOWER_CASE; + } else { // this shouldn't happen since one of the previous cases should be true. + // But if it does happen, we go with the ANSI default. + letterCasing = IdentifierProcessing.LetterCasing.UPPER_CASE; + } + + return IdentifierProcessing.create(quoting, letterCasing); + } + } + + /** + * Exception thrown when {@link DialectResolver} cannot resolve a {@link Dialect}. + */ + public static class NoDialectException extends NonTransientDataAccessException { + + /** + * Constructor for NoDialectFoundException. + * + * @param msg the detail message + */ + protected NoDialectException(String msg) { + super(msg); + } + } + + private static class JdbcDialectAdapter implements JdbcDialect { + + private final Dialect delegate; + private final JdbcArrayColumnsAdapter arrayColumns; + + public JdbcDialectAdapter(Dialect delegate) { + this.delegate = delegate; + this.arrayColumns = new JdbcArrayColumnsAdapter(delegate.getArraySupport()); + } + + @Override + public LimitClause limit() { + return delegate.limit(); + } + + @Override + public LockClause lock() { + return delegate.lock(); + } + + @Override + public JdbcArrayColumns getArraySupport() { + return arrayColumns; + } + + @Override + public SelectRenderContext getSelectContext() { + return delegate.getSelectContext(); + } + + @Override + public IdentifierProcessing getIdentifierProcessing() { + return delegate.getIdentifierProcessing(); + } + + @Override + public Escaper getLikeEscaper() { + return delegate.getLikeEscaper(); + } + + @Override + public IdGeneration getIdGeneration() { + return delegate.getIdGeneration(); + } + + @Override + public Collection getConverters() { + return delegate.getConverters(); + } + + @Override + public Set> simpleTypes() { + return delegate.simpleTypes(); + } + + @Override + public InsertRenderContext getInsertRenderContext() { + return delegate.getInsertRenderContext(); + } + + @Override + public OrderByNullPrecedence orderByNullHandling() { + return delegate.orderByNullHandling(); + } + + @Override + public SimpleFunction getExistsFunction() { + return delegate.getExistsFunction(); + } + + @Override + public boolean supportsSingleQueryLoading() { + return delegate.supportsSingleQueryLoading(); + } + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcArrayColumns.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcArrayColumns.java new file mode 100644 index 0000000000..60568a7ee0 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcArrayColumns.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import java.sql.SQLType; + +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.relational.core.dialect.ArrayColumns; + +/** + * {@link ArrayColumns} that offer JDBC-specific functionality. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 3.5 + */ +public interface JdbcArrayColumns extends ArrayColumns { + + @Override + default Class getArrayType(Class userType) { + return ArrayColumns.unwrapComponentType(userType); + } + + /** + * Determine the {@link SQLType} for a given {@link Class array component type}. + * + * @param componentType component type of the array. + * @return the dialect-supported array type. + * @since 3.1.3 + */ + default SQLType getSqlType(Class componentType) { + return JdbcUtil.targetSqlTypeFor(getArrayType(componentType)); + } + + /** + * The appropriate SQL type as a String which should be used to represent the given {@link SQLType} in an + * {@link java.sql.Array}. Defaults to the name of the argument. + * + * @param jdbcType the {@link SQLType} value representing the type that should be stored in the + * {@link java.sql.Array}. Must not be {@literal null}. + * @return the appropriate SQL type as a String which should be used to represent the given {@link SQLType} in an + * {@link java.sql.Array}. Guaranteed to be not {@literal null}. + */ + default String getArrayTypeName(SQLType jdbcType) { + return jdbcType.getName(); + } + + /** + * Default {@link ArrayColumns} implementation for dialects that do not support array-typed columns. + */ + enum Unsupported implements JdbcArrayColumns { + + INSTANCE; + + @Override + public boolean isSupported() { + return false; + } + + @Override + public String getArrayTypeName(SQLType jdbcType) { + throw new UnsupportedOperationException("Array types not supported"); + } + + } + + /** + * Default {@link ArrayColumns} implementation for dialects that do not support array-typed columns. + */ + enum DefaultSupport implements JdbcArrayColumns { + + INSTANCE; + + @Override + public boolean isSupported() { + return true; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcArrayColumnsAdapter.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcArrayColumnsAdapter.java new file mode 100644 index 0000000000..6a117a2d5f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcArrayColumnsAdapter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.data.relational.core.dialect.ArrayColumns; + +/** + * Adapter for {@link ArrayColumns} to be exported as {@link JdbcArrayColumns}. + * + * @author Mark Paluch + * @since 3.5 + */ +record JdbcArrayColumnsAdapter(ArrayColumns arrayColumns) implements JdbcArrayColumns { + + @Override + public boolean isSupported() { + return arrayColumns.isSupported(); + } + + @Override + public Class getArrayType(Class userType) { + return arrayColumns.getArrayType(userType); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcDb2Dialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcDb2Dialect.java new file mode 100644 index 0000000000..2288a44c18 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcDb2Dialect.java @@ -0,0 +1,69 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import java.sql.Timestamp; +import java.time.OffsetDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.jdbc.core.convert.Jsr310TimestampBasedConverters; +import org.springframework.data.relational.core.dialect.Db2Dialect; + +/** + * {@link Db2Dialect} that registers JDBC specific converters. + * + * @author Jens Schauder + * @author Christoph Strobl + * @since 2.3 + */ +public class JdbcDb2Dialect extends Db2Dialect implements JdbcDialect { + + public static final JdbcDb2Dialect INSTANCE = new JdbcDb2Dialect(); + + protected JdbcDb2Dialect() {} + + @Override + public Collection getConverters() { + + List converters = new ArrayList<>(super.getConverters()); + converters.add(OffsetDateTimeToTimestampConverter.INSTANCE); + converters.add(Jsr310TimestampBasedConverters.LocalDateTimeToTimestampConverter.INSTANCE); + + return converters; + } + + /** + * {@link WritingConverter} from {@link OffsetDateTime} to {@link Timestamp}. The conversion preserves the + * {@link java.time.Instant} represented by {@link OffsetDateTime} + * + * @author Jens Schauder + * @since 2.3 + */ + @WritingConverter + enum OffsetDateTimeToTimestampConverter implements Converter { + + INSTANCE; + + @Override + public Timestamp convert(OffsetDateTime source) { + return Timestamp.from(source.toInstant()); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcDialect.java new file mode 100644 index 0000000000..5728ce4f56 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcDialect.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.data.relational.core.dialect.Dialect; + +/** + * {@link org.springframework.data.relational.core.dialect.ArrayColumns} that offer JDBC specific functionality. + * + * @author Jens Schauder + * @author Mikhail Polivakha + * @since 2.3 + */ +public interface JdbcDialect extends Dialect { + + /** + * Returns the JDBC specific array support object that describes how array-typed columns are supported by this + * dialect. + * + * @return the JDBC specific array support object that describes how array-typed columns are supported by this + * dialect. + */ + default JdbcArrayColumns getArraySupport() { + return JdbcArrayColumns.Unsupported.INSTANCE; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcH2Dialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcH2Dialect.java new file mode 100644 index 0000000000..8f781ef9db --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcH2Dialect.java @@ -0,0 +1,37 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.data.relational.core.dialect.H2Dialect; + +/** + * JDBC-specific H2 Dialect. + * + * @author Mikhail Polivakha + * @since 3.5 + */ +public class JdbcH2Dialect extends H2Dialect implements JdbcDialect { + + public static final JdbcH2Dialect INSTANCE = new JdbcH2Dialect(); + + private static final JdbcArrayColumns ARRAY_COLUMNS = new JdbcArrayColumnsAdapter(H2ArrayColumns.INSTANCE); + + @Override + public JdbcArrayColumns getArraySupport() { + return ARRAY_COLUMNS; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcHsqlDbDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcHsqlDbDialect.java new file mode 100644 index 0000000000..77f7531edc --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcHsqlDbDialect.java @@ -0,0 +1,35 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.data.relational.core.dialect.HsqlDbDialect; + +/** + * JDBC-specific HsqlDB Dialect. + * + * @author Mikhail Polivakha + * @since 3.5 + */ +public class JdbcHsqlDbDialect extends HsqlDbDialect implements JdbcDialect { + + public static final JdbcHsqlDbDialect INSTANCE = new JdbcHsqlDbDialect(); + + @Override + public JdbcArrayColumns getArraySupport() { + return JdbcArrayColumns.DefaultSupport.INSTANCE; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcMariaDbDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcMariaDbDialect.java new file mode 100644 index 0000000000..16c416f736 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcMariaDbDialect.java @@ -0,0 +1,33 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.data.relational.core.dialect.MariaDbDialect; +import org.springframework.data.relational.core.sql.IdentifierProcessing; + +/** + * JDBC-specific MariaDb Dialect. + * + * @author Mikhail Polivakha + * @since 3.5 + */ +public class JdbcMariaDbDialect extends MariaDbDialect implements JdbcDialect { + + public JdbcMariaDbDialect(IdentifierProcessing identifierProcessing) { + super(identifierProcessing); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcMySqlDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcMySqlDialect.java new file mode 100644 index 0000000000..76079db6a4 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcMySqlDialect.java @@ -0,0 +1,85 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import static java.time.ZoneId.*; + +import java.sql.JDBCType; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.relational.core.dialect.MySqlDialect; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.lang.NonNull; + +/** + * {@link MySqlDialect} that registers JDBC specific converters. + * + * @author Jens Schauder + * @author Christoph Strobl + * @author Mikhail Polivakha + * @since 2.3 + */ +public class JdbcMySqlDialect extends MySqlDialect implements JdbcDialect { + + public static final JdbcMySqlDialect INSTANCE = new JdbcMySqlDialect(); + + public JdbcMySqlDialect(IdentifierProcessing identifierProcessing) { + super(identifierProcessing); + } + + protected JdbcMySqlDialect() {} + + @Override + public Collection getConverters() { + + ArrayList converters = new ArrayList<>(super.getConverters()); + converters.add(OffsetDateTimeToTimestampJdbcValueConverter.INSTANCE); + converters.add(LocalDateTimeToDateConverter.INSTANCE); + + return converters; + } + + @WritingConverter + enum OffsetDateTimeToTimestampJdbcValueConverter implements Converter { + + INSTANCE; + + @Override + public JdbcValue convert(OffsetDateTime source) { + return JdbcValue.of(source, JDBCType.TIMESTAMP); + } + } + + @ReadingConverter + enum LocalDateTimeToDateConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public Date convert(LocalDateTime source) { + return Date.from(source.atZone(systemDefault()).toInstant()); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcOracleDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcOracleDialect.java new file mode 100644 index 0000000000..3b0b40cce9 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcOracleDialect.java @@ -0,0 +1,38 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.data.relational.core.dialect.ObjectArrayColumns; +import org.springframework.data.relational.core.dialect.OracleDialect; + +/** + * JDBC-specific Oracle Dialect. + * + * @author Mikhail Polivakha + */ +public class JdbcOracleDialect extends OracleDialect implements JdbcDialect { + + public static final JdbcOracleDialect INSTANCE = new JdbcOracleDialect(); + + private static final JdbcArrayColumns ARRAY_COLUMNS = new JdbcArrayColumnsAdapter(ObjectArrayColumns.INSTANCE); + + @Override + public JdbcArrayColumns getArraySupport() { + return ARRAY_COLUMNS; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcPostgresDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcPostgresDialect.java new file mode 100644 index 0000000000..b2c9b91626 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcPostgresDialect.java @@ -0,0 +1,221 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import java.sql.Array; +import java.sql.JDBCType; +import java.sql.SQLException; +import java.sql.SQLType; +import java.sql.Types; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.function.Consumer; + +import org.postgresql.core.Oid; +import org.postgresql.jdbc.TypeInfoCache; +import org.springframework.data.relational.core.dialect.PostgresDialect; +import org.springframework.util.ClassUtils; + +/** + * JDBC specific Postgres Dialect. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 2.3 + */ +public class JdbcPostgresDialect extends PostgresDialect implements JdbcDialect { + + public static final JdbcPostgresDialect INSTANCE = new JdbcPostgresDialect(); + + private static final JdbcPostgresArrayColumns ARRAY_COLUMNS = new JdbcPostgresArrayColumns(); + + private static final Set> SIMPLE_TYPES; + + static { + + Set> simpleTypes = new HashSet<>(PostgresDialect.INSTANCE.simpleTypes()); + List simpleTypeNames = Arrays.asList( // + "org.postgresql.util.PGobject", // + "org.postgresql.geometric.PGpoint", // + "org.postgresql.geometric.PGbox", // + "org.postgresql.geometric.PGcircle", // + "org.postgresql.geometric.PGline", // + "org.postgresql.geometric.PGpath", // + "org.postgresql.geometric.PGpolygon", // + "org.postgresql.geometric.PGlseg" // + ); + simpleTypeNames.forEach(name -> ifClassPresent(name, simpleTypes::add)); + SIMPLE_TYPES = Collections.unmodifiableSet(simpleTypes); + } + + @Override + public Set> simpleTypes() { + return SIMPLE_TYPES; + } + + @Override + public JdbcArrayColumns getArraySupport() { + return ARRAY_COLUMNS; + } + + /** + * If the class is present on the class path, invoke the specified consumer {@code action} with the class object, + * otherwise do nothing. + * + * @param action block to be executed if a value is present. + */ + private static void ifClassPresent(String className, Consumer> action) { + if (ClassUtils.isPresent(className, PostgresDialect.class.getClassLoader())) { + action.accept(ClassUtils.resolveClassName(className, PostgresDialect.class.getClassLoader())); + } + } + + static class JdbcPostgresArrayColumns implements JdbcArrayColumns { + + private static final boolean TYPE_INFO_PRESENT = ClassUtils.isPresent("org.postgresql.jdbc.TypeInfoCache", + JdbcPostgresDialect.class.getClassLoader()); + + private static final TypeInfoWrapper TYPE_INFO_WRAPPER; + + static { + TYPE_INFO_WRAPPER = TYPE_INFO_PRESENT ? new TypeInfoCacheWrapper() : new TypeInfoWrapper(); + } + + @Override + public boolean isSupported() { + return true; + } + + @Override + public SQLType getSqlType(Class componentType) { + + SQLType sqlType = TYPE_INFO_WRAPPER.getArrayTypeMap().get(componentType); + if (sqlType != null) { + return sqlType; + } + + return JdbcArrayColumns.super.getSqlType(componentType); + } + + @Override + public String getArrayTypeName(SQLType jdbcType) { + + if (jdbcType == JDBCType.DOUBLE) { + return "FLOAT8"; + } + if (jdbcType == JDBCType.REAL) { + return "FLOAT4"; + } + + return jdbcType.getName(); + } + } + + /** + * Wrapper for Postgres types. Defaults to no-op to guard runtimes against absent TypeInfoCache. + * + * @since 3.1.3 + */ + static class TypeInfoWrapper { + + /** + * @return a type map between a Java array component type and its Postgres type. + */ + Map, SQLType> getArrayTypeMap() { + return Collections.emptyMap(); + } + } + + /** + * {@link TypeInfoWrapper} backed by {@link TypeInfoCache}. + * + * @since 3.1.3 + */ + static class TypeInfoCacheWrapper extends TypeInfoWrapper { + + private final Map, SQLType> arrayTypes = new HashMap<>(); + + public TypeInfoCacheWrapper() { + + TypeInfoCache cache = new TypeInfoCache(null, 0); + addWellKnownTypes(cache); + + Iterator it = cache.getPGTypeNamesWithSQLTypes(); + + try { + + while (it.hasNext()) { + + String pgTypeName = it.next(); + int oid = cache.getPGType(pgTypeName); + String javaClassName = cache.getJavaClass(oid); + int arrayOid = cache.getJavaArrayType(pgTypeName); + + if (!ClassUtils.isPresent(javaClassName, getClass().getClassLoader())) { + continue; + } + + Class javaClass = ClassUtils.forName(javaClassName, getClass().getClassLoader()); + + // avoid accidental usage of smaller database types that map to the same Java type or generic-typed SQL + // arrays. + if (javaClass == Array.class || javaClass == String.class || javaClass == Integer.class || oid == Oid.OID + || oid == Oid.MONEY) { + continue; + } + + arrayTypes.put(javaClass, new PGSQLType(pgTypeName, arrayOid)); + } + } catch (SQLException | ClassNotFoundException e) { + throw new IllegalStateException("Cannot create type info mapping", e); + } + } + + private static void addWellKnownTypes(TypeInfoCache cache) { + cache.addCoreType("uuid", Oid.UUID, Types.OTHER, UUID.class.getName(), Oid.UUID_ARRAY); + } + + @Override + Map, SQLType> getArrayTypeMap() { + return arrayTypes; + } + + record PGSQLType(String name, int oid) implements SQLType { + + @Override + public String getName() { + return name; + } + + @Override + public String getVendor() { + return "Postgres"; + } + + @Override + public Integer getVendorTypeNumber() { + return oid; + } + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcSqlServerDialect.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcSqlServerDialect.java new file mode 100644 index 0000000000..bc45ad3dda --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/JdbcSqlServerDialect.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import microsoft.sql.DateTimeOffset; + +import java.time.Instant; +import java.time.OffsetDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.relational.core.dialect.SqlServerDialect; + +/** + * {@link SqlServerDialect} that registers JDBC specific converters. + * + * @author Jens Schauder + * @author Christoph Strobl + * @author Mikhail Polivakha + * @since 2.3 + */ +public class JdbcSqlServerDialect extends SqlServerDialect implements JdbcDialect { + + public static final JdbcSqlServerDialect INSTANCE = new JdbcSqlServerDialect(); + + @Override + public Collection getConverters() { + + List converters = new ArrayList<>(super.getConverters()); + converters.add(DateTimeOffsetToOffsetDateTimeConverter.INSTANCE); + converters.add(DateTimeOffsetToInstantConverter.INSTANCE); + return converters; + } + + @ReadingConverter + enum DateTimeOffsetToOffsetDateTimeConverter implements Converter { + + INSTANCE; + + @Override + public OffsetDateTime convert(DateTimeOffset source) { + return source.getOffsetDateTime(); + } + } + + @ReadingConverter + enum DateTimeOffsetToInstantConverter implements Converter { + + INSTANCE; + + @Override + public Instant convert(DateTimeOffset source) { + return source.getOffsetDateTime().toInstant(); + } + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/package-info.java new file mode 100644 index 0000000000..645c30d7c6 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/dialect/package-info.java @@ -0,0 +1,7 @@ +/** + * JDBC-specific Dialect implementations. + */ +@NonNullApi +package org.springframework.data.jdbc.core.dialect; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/AggregateReference.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/AggregateReference.java new file mode 100644 index 0000000000..a75054588b --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/AggregateReference.java @@ -0,0 +1,89 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping; + +import java.util.Objects; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * A reference to the aggregate root of a different aggregate. + * + * @param the type of the referenced aggregate root. + * @param the type of the id of the referenced aggregate root. + * @author Jens Schauder + * @author Myeonghyeon Lee + * @since 1.0 + */ +public interface AggregateReference { + + static AggregateReference to(ID id) { + return new IdOnlyAggregateReference<>(id); + } + + /** + * @return the id of the referenced aggregate. May be {@code null}. + */ + @Nullable + ID getId(); + + /** + * An {@link AggregateReference} that only holds the id of the referenced aggregate root. Note that there is no check + * that a matching aggregate for this id actually exists. + * + * @param + * @param + */ + class IdOnlyAggregateReference implements AggregateReference { + + private final ID id; + + public IdOnlyAggregateReference(ID id) { + + Assert.notNull(id, "Id must not be null"); + + this.id = id; + } + + @Override + public ID getId() { + return id; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + IdOnlyAggregateReference that = (IdOnlyAggregateReference) o; + return id.equals(that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + + return "IdOnlyAggregateReference{" + "id=" + id + '}'; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/BasicJdbcPersistentProperty.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/BasicJdbcPersistentProperty.java new file mode 100644 index 0000000000..0b6784ac09 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/BasicJdbcPersistentProperty.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping; + +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.model.Property; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.core.mapping.BasicRelationalPersistentProperty; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Extension to {@link BasicRelationalPersistentProperty}. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public class BasicJdbcPersistentProperty extends BasicRelationalPersistentProperty { + + /** + * Creates a new {@link BasicJdbcPersistentProperty}. + * + * @param property must not be {@literal null}. + * @param owner must not be {@literal null}. + * @param simpleTypeHolder must not be {@literal null}. + * @param namingStrategy must not be {@literal null} + * @since 2.0 + */ + public BasicJdbcPersistentProperty(Property property, PersistentEntity owner, + SimpleTypeHolder simpleTypeHolder, NamingStrategy namingStrategy) { + super(property, owner, simpleTypeHolder, namingStrategy); + } + + @Override + public boolean isAssociation() { + return super.isAssociation() || AggregateReference.class.isAssignableFrom(getRawType()); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcMappingContext.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcMappingContext.java new file mode 100644 index 0000000000..deff95518c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcMappingContext.java @@ -0,0 +1,74 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.Property; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.util.TypeInformation; + +/** + * {@link MappingContext} implementation for JDBC. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Kazuki Shimizu + * @author Oliver Gierke + * @author Mark Paluch + * @author Paul-Christian Volkmer + */ +public class JdbcMappingContext extends RelationalMappingContext { + + /** + * Creates a new {@link JdbcMappingContext}. + */ + public JdbcMappingContext() { + super(); + setSimpleTypeHolder(JdbcSimpleTypes.HOLDER); + } + + /** + * Creates a new {@link JdbcMappingContext} using the given {@link NamingStrategy}. + * + * @param namingStrategy must not be {@literal null}. + */ + public JdbcMappingContext(NamingStrategy namingStrategy) { + super(namingStrategy); + setSimpleTypeHolder(JdbcSimpleTypes.HOLDER); + } + + @Override + protected RelationalPersistentProperty createPersistentProperty(Property property, + RelationalPersistentEntity owner, SimpleTypeHolder simpleTypeHolder) { + BasicJdbcPersistentProperty persistentProperty = new BasicJdbcPersistentProperty(property, owner, simpleTypeHolder, + this.getNamingStrategy()); + applyDefaults(persistentProperty); + return persistentProperty; + } + + @Override + protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { + + return super.shouldCreatePersistentEntityFor(type) // + && !AggregateReference.class.isAssignableFrom(type.getType()) // + && !type.isCollectionLike(); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcSimpleTypes.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcSimpleTypes.java new file mode 100644 index 0000000000..63bbf00b43 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcSimpleTypes.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; + +import org.springframework.data.mapping.model.SimpleTypeHolder; + +/** + * Simple constant holder for a {@link SimpleTypeHolder} enriched with specific simple types for relational database + * access. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public abstract class JdbcSimpleTypes { + + public static final Set> AUTOGENERATED_ID_TYPES; + + static { + + Set> classes = new HashSet<>(); + classes.add(Long.class); + classes.add(String.class); + classes.add(BigInteger.class); + classes.add(BigDecimal.class); + classes.add(UUID.class); + AUTOGENERATED_ID_TYPES = Collections.unmodifiableSet(classes); + + Set> simpleTypes = new HashSet<>(); + simpleTypes.add(BigDecimal.class); + simpleTypes.add(BigInteger.class); + simpleTypes.add(Array.class); + simpleTypes.add(Clob.class); + simpleTypes.add(Blob.class); + simpleTypes.add(java.sql.Date.class); + simpleTypes.add(NClob.class); + simpleTypes.add(Ref.class); + simpleTypes.add(RowId.class); + simpleTypes.add(Struct.class); + simpleTypes.add(Time.class); + simpleTypes.add(Timestamp.class); + simpleTypes.add(UUID.class); + simpleTypes.add(JdbcValue.class); + + JDBC_SIMPLE_TYPES = Collections.unmodifiableSet(simpleTypes); + } + + private static final Set> JDBC_SIMPLE_TYPES; + public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(JDBC_SIMPLE_TYPES, true); + + private JdbcSimpleTypes() {} +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcValue.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcValue.java new file mode 100644 index 0000000000..33d409d79f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/JdbcValue.java @@ -0,0 +1,72 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping; + +import java.sql.JDBCType; +import java.sql.SQLType; +import java.util.Objects; + +import org.springframework.lang.Nullable; + +/** + * Wraps a value with the JDBCType that should be used to pass it as a bind parameter to a + * {@link java.sql.PreparedStatement}. Register a converter from any type to {@link JdbcValue} in order to control the + * value and the {@link JDBCType} as which a value should get passed to the JDBC driver. + * + * @author Jens Schauder + * @since 2.4 + */ +public class JdbcValue { + + private final Object value; + private final SQLType jdbcType; + + protected JdbcValue(@Nullable Object value, @Nullable SQLType jdbcType) { + + this.value = value; + this.jdbcType = jdbcType; + } + + public static JdbcValue of(@Nullable Object value, @Nullable SQLType jdbcType) { + return new JdbcValue(value, jdbcType); + } + + @Nullable + public Object getValue() { + return this.value; + } + + @Nullable + public SQLType getJdbcType() { + return this.jdbcType; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + JdbcValue jdbcValue = (JdbcValue) o; + return Objects.equals(value, jdbcValue.value) && jdbcType == jdbcValue.jdbcType; + } + + @Override + public int hashCode() { + return Objects.hash(value, jdbcType); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/package-info.java new file mode 100644 index 0000000000..e9fddca812 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/package-info.java @@ -0,0 +1,2 @@ +@org.springframework.lang.NonNullApi +package org.springframework.data.jdbc.core.mapping; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Column.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Column.java new file mode 100644 index 0000000000..eed3e8828c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Column.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.util.Objects; + +/** + * Models a Column for generating SQL for Schema generation. + * + * @author Kurt Niemi + * @since 3.2 + */ +record Column(String name, String type, boolean nullable, boolean identity) { + + public Column(String name, String type) { + this(name, type, false, false); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Column that = (Column) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/DefaultSqlTypeMapping.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/DefaultSqlTypeMapping.java new file mode 100644 index 0000000000..7ab07a0a00 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/DefaultSqlTypeMapping.java @@ -0,0 +1,73 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZonedDateTime; +import java.util.HashMap; +import java.util.UUID; + +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.util.ClassUtils; + +/** + * Class that provides a default implementation of mapping Java type to a Database type. To customize the mapping an + * instance of a class implementing {@link SqlTypeMapping} interface can be set on the {@link Tables} class + * + * @author Kurt Niemi + * @author Evgenii Koba + * @author Jens Schauder + * @since 3.2 + */ +public class DefaultSqlTypeMapping implements SqlTypeMapping { + + private final HashMap, String> typeMap = new HashMap<>(); + + public DefaultSqlTypeMapping() { + + typeMap.put(String.class, "VARCHAR(255 BYTE)"); + typeMap.put(Boolean.class, "TINYINT"); + typeMap.put(Double.class, "DOUBLE"); + typeMap.put(Float.class, "FLOAT"); + typeMap.put(Integer.class, "INT"); + typeMap.put(Long.class, "BIGINT"); + + typeMap.put(BigInteger.class, "BIGINT"); + typeMap.put(BigDecimal.class, "NUMERIC"); + + typeMap.put(UUID.class, "UUID"); + + typeMap.put(LocalDate.class, "DATE"); + typeMap.put(LocalTime.class, "TIME"); + typeMap.put(LocalDateTime.class, "TIMESTAMP"); + + typeMap.put(ZonedDateTime.class, "TIMESTAMPTZ"); + } + + @Override + public String getColumnType(RelationalPersistentProperty property) { + return getColumnType(property.getActualType()); + } + + @Override + public String getColumnType(Class type) { + return typeMap.get(ClassUtils.resolvePrimitiveIfNecessary(type)); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/ForeignKey.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/ForeignKey.java new file mode 100644 index 0000000000..35e65dd85e --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/ForeignKey.java @@ -0,0 +1,30 @@ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.util.List; +import java.util.Objects; + +/** + * Models a Foreign Key for generating SQL for Schema generation. + * + * @author Evgenii Koba + * @since 3.3 + */ +record ForeignKey(String name, String tableName, List columnNames, String referencedTableName, + List referencedColumnNames) { + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + ForeignKey that = (ForeignKey) o; + return Objects.equals(tableName, that.tableName) && Objects.equals(columnNames, that.columnNames) + && Objects.equals(referencedTableName, that.referencedTableName) + && Objects.equals(referencedColumnNames, that.referencedColumnNames); + } + + @Override + public int hashCode() { + return Objects.hash(tableName, columnNames, referencedTableName, referencedColumnNames); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java new file mode 100644 index 0000000000..675eb89b70 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java @@ -0,0 +1,667 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core.mapping.schema; + +import liquibase.CatalogAndSchema; +import liquibase.change.AddColumnConfig; +import liquibase.change.ColumnConfig; +import liquibase.change.ConstraintsConfig; +import liquibase.change.core.AddColumnChange; +import liquibase.change.core.AddForeignKeyConstraintChange; +import liquibase.change.core.CreateTableChange; +import liquibase.change.core.DropColumnChange; +import liquibase.change.core.DropForeignKeyConstraintChange; +import liquibase.change.core.DropTableChange; +import liquibase.changelog.ChangeLogChild; +import liquibase.changelog.ChangeLogParameters; +import liquibase.changelog.ChangeSet; +import liquibase.changelog.DatabaseChangeLog; +import liquibase.database.Database; +import liquibase.exception.ChangeLogParseException; +import liquibase.exception.LiquibaseException; +import liquibase.parser.ChangeLogParser; +import liquibase.parser.core.yaml.YamlChangeLogParser; +import liquibase.resource.DirectoryResourceAccessor; +import liquibase.serializer.ChangeLogSerializer; +import liquibase.serializer.core.yaml.YamlChangeLogSerializer; +import liquibase.snapshot.DatabaseSnapshot; +import liquibase.snapshot.SnapshotControl; +import liquibase.snapshot.SnapshotGeneratorFactory; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.text.Collator; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.function.BiPredicate; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.springframework.core.io.Resource; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.util.Predicates; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Use this class to write Liquibase ChangeSets. + *

+ * This writer uses {@link MappingContext} as input to determine mapped entities. Entities can be filtered through a + * {@link #setSchemaFilter(Predicate) schema filter} to include/exclude entities. By default, all entities within the + * mapping context are considered for computing the expected schema. + *

+ * This writer operates in two modes: + *

    + *
  • Initial Schema Creation
  • + *
  • Differential Schema Change Creation
  • + *
+ * The {@link #writeChangeSet(Resource) initial mode} allows creating the full schema without considering any existing + * tables. The {@link #writeChangeSet(Resource, Database) differential schema mode} uses a {@link Database} object to + * determine existing tables and columns. It creates in addition to table creations also changes to drop tables, drop + * columns and add columns. By default, the {@link #setDropTableFilter(Predicate) DROP TABLE} and the + * {@link #setDropColumnFilter(BiPredicate) DROP COLUMN} filters exclude all tables respective columns from being + * dropped. + *

+ * In differential schema mode, table and column names are compared using a case-insensitive comparator, see + * {@link Collator#PRIMARY}. + *

+ * The writer can be configured to use specific ChangeLogSerializers and ChangeLogParsers defaulting to YAML. + * + * @author Kurt Niemi + * @author Mark Paluch + * @author Evgenii Koba + * @author Jens Schauder + * @since 3.2 + */ +public class LiquibaseChangeSetWriter { + + public static final String DEFAULT_AUTHOR = "Spring Data Relational"; + private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; + + private SqlTypeMapping sqlTypeMapping = new DefaultSqlTypeMapping(); + + private ChangeLogSerializer changeLogSerializer = new YamlChangeLogSerializer(); + + private ChangeLogParser changeLogParser = new YamlChangeLogParser(); + + /** + * Predicate to identify Liquibase system tables. + */ + private final Predicate isLiquibaseTable = table -> table.toUpperCase(Locale.ROOT) + .startsWith("DATABASECHANGELOG"); + + /** + * Comparator to compare table and column names. + */ + private final Comparator nameComparator = createComparator(); + + private static Comparator createComparator() { + + Collator instance = Collator.getInstance(Locale.ROOT); + instance.setStrength(Collator.PRIMARY); + + return instance::compare; + } + + /** + * Filter predicate to determine which persistent entities should be used for schema generation. + */ + private Predicate> schemaFilter = Predicates.isTrue(); + + /** + * Filter predicate used to determine whether an existing table should be removed. Defaults to {@code false} to keep + * existing tables. + */ + private Predicate dropTableFilter = Predicates.isFalse(); + + /** + * Filter predicate used to determine whether an existing column should be removed. Defaults to {@code false} to keep + * existing columns. + */ + private BiPredicate dropColumnFilter = (table, column) -> false; + + /** + * Use this to generate a ChangeSet that can be used on an empty database. + * + * @param mappingContext source to determine persistent entities, must not be {@literal null}. + */ + public LiquibaseChangeSetWriter( + MappingContext, ? extends RelationalPersistentProperty> mappingContext) { + + Assert.notNull(mappingContext, "MappingContext must not be null"); + + this.mappingContext = mappingContext; + } + + /** + * Configure SQL type mapping. Defaults to {@link DefaultSqlTypeMapping}. + * + * @param sqlTypeMapping must not be {@literal null}. + */ + public void setSqlTypeMapping(SqlTypeMapping sqlTypeMapping) { + + Assert.notNull(sqlTypeMapping, "SqlTypeMapping must not be null"); + + this.sqlTypeMapping = sqlTypeMapping; + } + + /** + * Set the {@link ChangeLogSerializer}. + * + * @param changeLogSerializer must not be {@literal null}. + */ + public void setChangeLogSerializer(ChangeLogSerializer changeLogSerializer) { + + Assert.notNull(changeLogSerializer, "ChangeLogSerializer must not be null"); + + this.changeLogSerializer = changeLogSerializer; + } + + /** + * Set the {@link ChangeLogParser}. + * + * @param changeLogParser must not be {@literal null}. + */ + public void setChangeLogParser(ChangeLogParser changeLogParser) { + + Assert.notNull(changeLogParser, "ChangeLogParser must not be null"); + + this.changeLogParser = changeLogParser; + } + + /** + * Set the filter predicate to identify for which entities to create schema definitions. Existing tables for excluded + * entities will show up in {@link #setDropTableFilter(Predicate)}. Returning {@code true} includes the entity; + * {@code false} excludes the entity from schema creation. + * + * @param schemaFilter must not be {@literal null}. + */ + public void setSchemaFilter(Predicate> schemaFilter) { + + Assert.notNull(schemaFilter, "Schema filter must not be null"); + + this.schemaFilter = schemaFilter; + } + + /** + * Set the filter predicate to identify tables to drop. The predicate accepts the table name. Returning {@code true} + * will delete the table; {@code false} retains the table. + * + * @param dropTableFilter must not be {@literal null}. + */ + public void setDropTableFilter(Predicate dropTableFilter) { + + Assert.notNull(dropTableFilter, "Drop Column filter must not be null"); + + this.dropTableFilter = dropTableFilter; + } + + /** + * Set the filter predicate to identify columns within a table to drop. The predicate accepts the table- and column + * name. Returning {@code true} will delete the column; {@code false} retains the column. + * + * @param dropColumnFilter must not be {@literal null}. + */ + public void setDropColumnFilter(BiPredicate dropColumnFilter) { + + Assert.notNull(dropColumnFilter, "Drop Column filter must not be null"); + + this.dropColumnFilter = dropColumnFilter; + } + + /** + * Write a Liquibase ChangeSet containing all tables as initial ChangeSet. + * + * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). The + * resource must resolve to a valid {@link Resource#getFile()}. + * @throws IOException in case of I/O errors. + */ + public void writeChangeSet(Resource changeLogResource) throws IOException { + writeChangeSet(changeLogResource, ChangeSetMetadata.create()); + } + + /** + * Write a Liquibase ChangeSet using a {@link Database} to identify the differences between mapped entities and the + * existing database. + * + * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). The + * resource must resolve to a valid {@link Resource#getFile()}. + * @param database database to identify the differences. + * @throws LiquibaseException + * @throws IOException in case of I/O errors. + */ + public void writeChangeSet(Resource changeLogResource, Database database) throws IOException, LiquibaseException { + writeChangeSet(changeLogResource, ChangeSetMetadata.create(), database); + } + + /** + * Write a Liquibase ChangeSet containing all tables as initial ChangeSet. + * + * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). + * @param metadata the ChangeSet metadata. + * @throws IOException in case of I/O errors. + */ + public void writeChangeSet(Resource changeLogResource, ChangeSetMetadata metadata) throws IOException { + + DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile(), null); + ChangeSet changeSet = createChangeSet(metadata, databaseChangeLog); + + writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile()); + } + + /** + * Write a Liquibase ChangeSet using a {@link Database} to identify the differences between mapped entities and the + * existing database. + * + * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). + * @param metadata the ChangeSet metadata. + * @param database database to identify the differences. + * @throws LiquibaseException + * @throws IOException in case of I/O errors. + */ + public void writeChangeSet(Resource changeLogResource, ChangeSetMetadata metadata, Database database) + throws LiquibaseException, IOException { + + DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile(), database); + ChangeSet changeSet = createChangeSet(metadata, database, databaseChangeLog); + + writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile()); + } + + /** + * Creates an initial ChangeSet. + * + * @param metadata must not be {@literal null}. + * @param databaseChangeLog must not be {@literal null}. + * @return the initial ChangeSet. + */ + protected ChangeSet createChangeSet(ChangeSetMetadata metadata, DatabaseChangeLog databaseChangeLog) { + return createChangeSet(metadata, initial(), databaseChangeLog); + } + + /** + * Creates a diff ChangeSet by comparing {@link Database} with {@link MappingContext mapped entities}. + * + * @param metadata must not be {@literal null}. + * @param databaseChangeLog must not be {@literal null}. + * @return the diff ChangeSet. + */ + protected ChangeSet createChangeSet(ChangeSetMetadata metadata, Database database, + DatabaseChangeLog databaseChangeLog) throws LiquibaseException { + return createChangeSet(metadata, differenceOf(database), databaseChangeLog); + } + + private ChangeSet createChangeSet(ChangeSetMetadata metadata, SchemaDiff difference, + DatabaseChangeLog databaseChangeLog) { + + ChangeSet changeSet = new ChangeSet(metadata.getId(), metadata.getAuthor(), false, false, "", "", "", + databaseChangeLog); + + generateTableAdditionsDeletions(changeSet, difference); + generateTableModifications(changeSet, difference); + return changeSet; + } + + private SchemaDiff initial() { + + Stream> entities = mappingContext.getPersistentEntities().stream() + .filter(schemaFilter); + Tables mappedEntities = Tables.from(entities, sqlTypeMapping, null, mappingContext); + return SchemaDiff.diff(mappedEntities, Tables.empty(), nameComparator); + } + + private SchemaDiff differenceOf(Database database) throws LiquibaseException { + + Tables existingTables = getLiquibaseModel(database); + Stream> entities = mappingContext.getPersistentEntities().stream() + .filter(schemaFilter); + Tables mappedEntities = Tables.from(entities, sqlTypeMapping, database.getDefaultSchemaName(), mappingContext); + + return SchemaDiff.diff(mappedEntities, existingTables, nameComparator); + } + + private DatabaseChangeLog getDatabaseChangeLog(File changeLogFile, @Nullable Database database) throws IOException { + + ChangeLogParameters parameters = database != null ? new ChangeLogParameters(database) : new ChangeLogParameters(); + + if (!changeLogFile.exists()) { + DatabaseChangeLog databaseChangeLog = new DatabaseChangeLog(changeLogFile.getName()); + if (database != null) { + databaseChangeLog.setChangeLogParameters(parameters); + } + return databaseChangeLog; + } + + try { + + File parentDirectory = changeLogFile.getParentFile(); + if (parentDirectory == null) { + parentDirectory = new File("./"); + } + + DirectoryResourceAccessor resourceAccessor = new DirectoryResourceAccessor(parentDirectory); + return changeLogParser.parse(changeLogFile.getName(), parameters, resourceAccessor); + } catch (ChangeLogParseException ex) { + throw new IOException(ex); + } + } + + private void generateTableAdditionsDeletions(ChangeSet changeSet, SchemaDiff difference) { + + for (Table table : difference.tableDeletions()) { + for (ForeignKey foreignKey : table.foreignKeys()) { + DropForeignKeyConstraintChange dropForeignKey = dropForeignKey(foreignKey); + changeSet.addChange(dropForeignKey); + } + } + + for (Table table : difference.tableAdditions()) { + CreateTableChange newTable = changeTable(table); + changeSet.addChange(newTable); + } + + for (Table table : difference.tableDeletions()) { + // Do not delete/drop table if it is an external application table + if (dropTableFilter.test(table.name())) { + changeSet.addChange(dropTable(table)); + } + } + + for (Table table : difference.tableAdditions()) { + for (ForeignKey foreignKey : table.foreignKeys()) { + AddForeignKeyConstraintChange addForeignKey = addForeignKey(foreignKey); + changeSet.addChange(addForeignKey); + } + } + } + + private void generateTableModifications(ChangeSet changeSet, SchemaDiff difference) { + + for (TableDiff table : difference.tableDiffs()) { + + for (ForeignKey foreignKey : table.fkToDrop()) { + DropForeignKeyConstraintChange dropForeignKey = dropForeignKey(foreignKey); + changeSet.addChange(dropForeignKey); + } + + if (!table.columnsToAdd().isEmpty()) { + changeSet.addChange(addColumns(table)); + } + + List deletedColumns = getColumnsToDrop(table); + + if (!deletedColumns.isEmpty()) { + changeSet.addChange(dropColumns(table, deletedColumns)); + } + + for (ForeignKey foreignKey : table.fkToAdd()) { + AddForeignKeyConstraintChange addForeignKey = addForeignKey(foreignKey); + changeSet.addChange(addForeignKey); + } + } + } + + private List getColumnsToDrop(TableDiff table) { + + List deletedColumns = new ArrayList<>(); + for (Column column : table.columnsToDrop()) { + + if (dropColumnFilter.test(table.table().name(), column.name())) { + deletedColumns.add(column); + } + } + return deletedColumns; + } + + private void writeChangeSet(DatabaseChangeLog databaseChangeLog, ChangeSet changeSet, File changeLogFile) + throws IOException { + + List changes = new ArrayList<>(databaseChangeLog.getChangeSets()); + changes.add(changeSet); + + try (FileOutputStream fos = new FileOutputStream(changeLogFile)) { + changeLogSerializer.write(changes, fos); + } + } + + private Tables getLiquibaseModel(Database targetDatabase) throws LiquibaseException { + + CatalogAndSchema[] schemas = new CatalogAndSchema[] { targetDatabase.getDefaultSchema() }; + SnapshotControl snapshotControl = new SnapshotControl(targetDatabase); + + DatabaseSnapshot snapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(schemas, targetDatabase, + snapshotControl); + Set tables = snapshot.get(liquibase.structure.core.Table.class); + List existingTables = new ArrayList<>(tables.size()); + + for (liquibase.structure.core.Table table : tables) { + + // Exclude internal Liquibase tables from comparison + if (isLiquibaseTable.test(table.getName())) { + continue; + } + + Table tableModel = new Table(table.getSchema().getName(), table.getName()); + + List columns = table.getColumns(); + + for (liquibase.structure.core.Column column : columns) { + + String type = column.getType().toString(); + boolean nullable = column.isNullable(); + Column columnModel = new Column(column.getName(), type, nullable, false); + + tableModel.columns().add(columnModel); + } + + tableModel.foreignKeys().addAll(extractForeignKeys(table)); + + existingTables.add(tableModel); + } + + return new Tables(existingTables); + } + + private static List extractForeignKeys(liquibase.structure.core.Table table) { + + return table.getOutgoingForeignKeys().stream().map(foreignKey -> { + + String tableName = foreignKey.getForeignKeyTable().getName(); + List columnNames = foreignKey.getForeignKeyColumns().stream() + .map(liquibase.structure.core.Column::getName).toList(); + + String referencedTableName = foreignKey.getPrimaryKeyTable().getName(); + List referencedColumnNames = foreignKey.getPrimaryKeyColumns().stream() + .map(liquibase.structure.core.Column::getName).toList(); + + return new ForeignKey(foreignKey.getName(), tableName, columnNames, referencedTableName, referencedColumnNames); + }).collect(Collectors.toList()); + } + + private static AddColumnChange addColumns(TableDiff table) { + + AddColumnChange addColumnChange = new AddColumnChange(); + addColumnChange.setSchemaName(table.table().schema()); + addColumnChange.setTableName(table.table().name()); + + for (Column column : table.columnsToAdd()) { + AddColumnConfig addColumn = createAddColumnChange(column); + addColumnChange.addColumn(addColumn); + } + return addColumnChange; + } + + private static AddColumnConfig createAddColumnChange(Column column) { + + AddColumnConfig config = new AddColumnConfig(); + config.setName(column.name()); + config.setType(column.type()); + + if (column.identity()) { + config.setAutoIncrement(true); + } + + return config; + } + + private static DropColumnChange dropColumns(TableDiff table, Collection deletedColumns) { + + DropColumnChange dropColumnChange = new DropColumnChange(); + dropColumnChange.setSchemaName(table.table().schema()); + dropColumnChange.setTableName(table.table().name()); + + List dropColumns = new ArrayList<>(); + + for (Column column : deletedColumns) { + ColumnConfig config = new ColumnConfig(); + config.setName(column.name()); + dropColumns.add(config); + } + + dropColumnChange.setColumns(dropColumns); + return dropColumnChange; + } + + private static CreateTableChange changeTable(Table table) { + + CreateTableChange change = new CreateTableChange(); + change.setSchemaName(table.schema()); + change.setTableName(table.name()); + + for (Column column : table.columns()) { + + ColumnConfig columnConfig = new ColumnConfig(); + columnConfig.setName(column.name()); + columnConfig.setType(column.type()); + + ConstraintsConfig constraints = new ConstraintsConfig(); + constraints.setNullable(column.nullable()); + + if (column.identity()) { + + columnConfig.setAutoIncrement(true); + constraints.setPrimaryKey(true); + } + + columnConfig.setConstraints(constraints); + change.addColumn(columnConfig); + } + + return change; + } + + private static DropTableChange dropTable(Table table) { + + DropTableChange change = new DropTableChange(); + change.setSchemaName(table.schema()); + change.setTableName(table.name()); + change.setCascadeConstraints(true); + + return change; + } + + private static AddForeignKeyConstraintChange addForeignKey(ForeignKey foreignKey) { + + AddForeignKeyConstraintChange change = new AddForeignKeyConstraintChange(); + change.setConstraintName(foreignKey.name()); + change.setBaseTableName(foreignKey.tableName()); + change.setBaseColumnNames(String.join(",", foreignKey.columnNames())); + change.setReferencedTableName(foreignKey.referencedTableName()); + change.setReferencedColumnNames(String.join(",", foreignKey.referencedColumnNames())); + + return change; + } + + private static DropForeignKeyConstraintChange dropForeignKey(ForeignKey foreignKey) { + + DropForeignKeyConstraintChange change = new DropForeignKeyConstraintChange(); + change.setConstraintName(foreignKey.name()); + change.setBaseTableName(foreignKey.tableName()); + + return change; + } + + /** + * Metadata for a ChangeSet. + */ + interface ChangeSetMetadata { + + /** + * Creates a new default {@link ChangeSetMetadata} using the {@link #DEFAULT_AUTHOR default author}. + * + * @return a new default {@link ChangeSetMetadata} using the {@link #DEFAULT_AUTHOR default author}. + */ + static ChangeSetMetadata create() { + return ofAuthor(LiquibaseChangeSetWriter.DEFAULT_AUTHOR); + } + + /** + * Creates a new default {@link ChangeSetMetadata} using a generated {@code identifier} and provided {@code author}. + * + * @return a new default {@link ChangeSetMetadata} using a generated {@code identifier} and provided {@code author}. + */ + static ChangeSetMetadata ofAuthor(String author) { + return of(Long.toString(System.currentTimeMillis()), author); + } + + /** + * Creates a new default {@link ChangeSetMetadata} using the provided {@code identifier} and {@code author}. + * + * @return a new default {@link ChangeSetMetadata} using the provided {@code identifier} and {@code author}. + */ + static ChangeSetMetadata of(String identifier, String author) { + return new DefaultChangeSetMetadata(identifier, author); + } + + /** + * @return the ChangeSet identifier. + */ + String getId(); + + /** + * @return the ChangeSet author. + */ + String getAuthor(); + } + + private record DefaultChangeSetMetadata(String id, String author) implements ChangeSetMetadata { + + private DefaultChangeSetMetadata { + + Assert.hasText(id, "ChangeSet identifier must not be empty or null"); + Assert.hasText(author, "Author must not be empty or null"); + } + + @Override + public String getId() { + return id(); + } + + @Override + public String getAuthor() { + return author(); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java new file mode 100644 index 0000000000..73e468a1a3 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java @@ -0,0 +1,147 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.function.Function; +import java.util.function.Predicate; + +/** + * This class is created to return the difference between a source and target {@link Tables} The difference consists of + * Table Additions, Deletions, and Modified Tables (i.e. table exists in both source and target - but has columns to add + * or delete) + * + * @author Kurt Niemi + * @author Evgenii Koba + * @since 3.2 + */ +record SchemaDiff(List
tableAdditions, List
tableDeletions, List tableDiffs) { + + public static SchemaDiff diff(Tables mappedEntities, Tables existingTables, Comparator nameComparator) { + + Map existingIndex = createMapping(existingTables.tables(), SchemaDiff::getKey, nameComparator); + Map mappedIndex = createMapping(mappedEntities.tables(), SchemaDiff::getKey, nameComparator); + + List
toCreate = getTablesToCreate(mappedEntities, withTableKey(existingIndex::containsKey)); + List
toDrop = getTablesToDrop(existingTables, withTableKey(mappedIndex::containsKey)); + + List tableDiffs = diffTable(mappedEntities, existingIndex, withTableKey(existingIndex::containsKey), + nameComparator); + + return new SchemaDiff(toCreate, toDrop, tableDiffs); + } + + private static List
getTablesToCreate(Tables mappedEntities, Predicate
excludeTable) { + + List
toCreate = new ArrayList<>(mappedEntities.tables().size()); + + for (Table table : mappedEntities.tables()) { + if (!excludeTable.test(table)) { + toCreate.add(table); + } + } + + return toCreate; + } + + private static List
getTablesToDrop(Tables existingTables, Predicate
excludeTable) { + + List
toDrop = new ArrayList<>(existingTables.tables().size()); + + for (Table table : existingTables.tables()) { + if (!excludeTable.test(table)) { + toDrop.add(table); + } + } + + return toDrop; + } + + private static List diffTable(Tables mappedEntities, Map existingIndex, + Predicate
includeTable, Comparator nameComparator) { + + List tableDiffs = new ArrayList<>(); + + for (Table mappedEntity : mappedEntities.tables()) { + + if (!includeTable.test(mappedEntity)) { + continue; + } + + // TODO: How to handle changed columns (type?) + + Table existingTable = existingIndex.get(getKey(mappedEntity)); + TableDiff tableDiff = new TableDiff(mappedEntity); + + Map mappedColumns = createMapping(mappedEntity.columns(), Column::name, nameComparator); + Map existingColumns = createMapping(existingTable.columns(), Column::name, nameComparator); + // Identify deleted columns + tableDiff.columnsToDrop().addAll(findDiffs(mappedColumns, existingColumns, nameComparator)); + // Identify added columns and add columns in order. This order can interleave with existing columns. + Collection addedColumns = findDiffs(existingColumns, mappedColumns, nameComparator); + for (Column column : mappedEntity.columns()) { + if (addedColumns.contains(column)) { + tableDiff.columnsToAdd().add(column); + } + } + + Map mappedForeignKeys = createMapping(mappedEntity.foreignKeys(), ForeignKey::name, + nameComparator); + Map existingForeignKeys = createMapping(existingTable.foreignKeys(), ForeignKey::name, + nameComparator); + // Identify deleted foreign keys + tableDiff.fkToDrop().addAll(findDiffs(mappedForeignKeys, existingForeignKeys, nameComparator)); + // Identify added foreign keys + tableDiff.fkToAdd().addAll(findDiffs(existingForeignKeys, mappedForeignKeys, nameComparator)); + + tableDiffs.add(tableDiff); + } + + return tableDiffs; + } + + private static Collection findDiffs(Map baseMapping, Map toCompareMapping, + Comparator nameComparator) { + + Map diff = new TreeMap<>(nameComparator); + diff.putAll(toCompareMapping); + baseMapping.keySet().forEach(diff::remove); + return diff.values(); + } + + private static SortedMap createMapping(List items, Function keyFunction, + Comparator nameComparator) { + + SortedMap mapping = new TreeMap<>(nameComparator); + items.forEach(it -> mapping.put(keyFunction.apply(it), it)); + return mapping; + } + + private static String getKey(Table table) { + return table.schema() + "." + table.name(); + } + + private static Predicate
withTableKey(Predicate predicate) { + return it -> predicate.test(getKey(it)); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java new file mode 100644 index 0000000000..5a7da71aed --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java @@ -0,0 +1,117 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Strategy interface for mapping a {@link RelationalPersistentProperty} to a Database type. + * + * @author Kurt Niemi + * @author Mark Paluch + * @author Evgenii Koba + * @author Jens Schauder + * @since 3.2 + */ +@FunctionalInterface +public interface SqlTypeMapping { + + /** + * Determines a column type for a persistent property. + * + * @param property the property for which the type should be determined. + * @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy + * cannot provide a column type. + */ + @Nullable + String getColumnType(RelationalPersistentProperty property); + + /** + * Determines a column type for Class. + * + * @param type class for which the type should be determined. + * @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy + * cannot provide a column type. + * + * @since 3.3 + */ + @Nullable + default String getColumnType(Class type) { + return null; + } + + /** + * Returns the required column type for a persistent property or throws {@link IllegalArgumentException} if the type + * cannot be determined. + * + * @param property the property for which the type should be determined. + * @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy + * cannot provide a column type. + * @throws IllegalArgumentException if the column type cannot be determined. + */ + default String getRequiredColumnType(RelationalPersistentProperty property) { + + String columnType = getColumnType(property); + + if (ObjectUtils.isEmpty(columnType)) { + throw new IllegalArgumentException(String.format("Cannot determined required column type for %s", property)); + } + + return columnType; + } + + /** + * Determine whether a column is nullable. + * + * @param property the property for which nullability should be determined. + * @return whether the property is nullable. + */ + default boolean isNullable(RelationalPersistentProperty property) { + return !property.getActualType().isPrimitive(); + } + + /** + * Returns a composed {@link SqlTypeMapping} that represents a fallback of this type mapping and another. When + * evaluating the composed predicate, if this mapping does not contain a column mapping (i.e. + * {@link #getColumnType(RelationalPersistentProperty)} returns{@literal null}), then the {@code other} mapping is + * evaluated. + *

+ * Any exceptions thrown during evaluation of either type mapping are relayed to the caller; if evaluation of this + * type mapping throws an exception, the {@code other} predicate will not be evaluated. + * + * @param other a type mapping that will be used as fallback, must not be {@literal null}. + * @return a composed type mapping + */ + default SqlTypeMapping and(SqlTypeMapping other) { + + Assert.notNull(other, "Other SqlTypeMapping must not be null"); + + return property -> { + + String columnType = getColumnType(property); + + if (ObjectUtils.isEmpty(columnType)) { + return other.getColumnType(property); + } + + return columnType; + }; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java new file mode 100644 index 0000000000..d72d820df8 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java @@ -0,0 +1,71 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.util.ArrayList; +import java.util.List; + +import java.util.stream.Collectors; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Models a Table for generating SQL for Schema generation. + * + * @author Kurt Niemi + * @author Evgenii Koba + * @since 3.2 + */ +record Table(@Nullable String schema, String name, List columns, List foreignKeys) { + + public Table(@Nullable String schema, String name) { + this(schema, name, new ArrayList<>(), new ArrayList<>()); + } + + public Table(String name) { + this(null, name); + } + + public List getIdColumns() { + return columns().stream().filter(Column::identity).collect(Collectors.toList()); + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + Table table = (Table) o; + return ObjectUtils.nullSafeEquals(schema, table.schema) && ObjectUtils.nullSafeEquals(name, table.name); + } + + @Override + public int hashCode() { + + int result = 17; + + result += ObjectUtils.nullSafeHashCode(this.schema); + result += ObjectUtils.nullSafeHashCode(this.name); + + return result; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java new file mode 100644 index 0000000000..0ed33817cd --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.util.ArrayList; +import java.util.List; + +/** + * Used to keep track of columns that should be added or deleted, when performing a difference between a source and + * target {@link Tables}. + * + * @author Kurt Niemi + * @author Evgenii Koba + * @since 3.2 + */ +record TableDiff(Table table, List columnsToAdd, List columnsToDrop, List fkToAdd, + List fkToDrop) { + + public TableDiff(Table table) { + this(table, new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), new ArrayList<>()); + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java new file mode 100644 index 0000000000..c8d28cc309 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java @@ -0,0 +1,217 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Model class that contains Table/Column information that can be used to generate SQL for Schema generation. + * + * @author Kurt Niemi + * @author Evgenii Koba + * @since 3.2 + */ +record Tables(List

tables) { + + public static Tables from(RelationalMappingContext context) { + return from(context.getPersistentEntities().stream(), new DefaultSqlTypeMapping(), null, context); + } + + // TODO: Add support (i.e. create tickets) to support entities, embedded properties, and aggregate references. + + public static Tables from(Stream> persistentEntities, + SqlTypeMapping sqlTypeMapping, @Nullable String defaultSchema, + MappingContext, ? extends RelationalPersistentProperty> context) { + + List foreignKeyMetadataList = new ArrayList<>(); + List
tables = persistentEntities + .filter(it -> it.isAnnotationPresent(org.springframework.data.relational.core.mapping.Table.class)) // + .map(entity -> { + + Table table = new Table(defaultSchema, entity.getTableName().getReference()); + + Set identifierColumns = new LinkedHashSet<>(); + entity.getPersistentProperties(Id.class).forEach(identifierColumns::add); + + for (RelationalPersistentProperty property : entity) { + + if (property.isEntity() && !property.isEmbedded()) { + foreignKeyMetadataList.add(createForeignKeyMetadata(entity, property, context, sqlTypeMapping)); + continue; + } + + Column column = new Column(property.getColumnName().getReference(), sqlTypeMapping.getColumnType(property), + sqlTypeMapping.isNullable(property), identifierColumns.contains(property)); + table.columns().add(column); + } + return table; + }).collect(Collectors.toList()); + + applyForeignKeyMetadata(tables, foreignKeyMetadataList); + + return new Tables(tables); + } + + public static Tables empty() { + return new Tables(Collections.emptyList()); + } + + /** + * Apply all information we know about foreign keys to correctly create foreign and primary keys + */ + private static void applyForeignKeyMetadata(List
tables, List foreignKeyMetadataList) { + + foreignKeyMetadataList.forEach(foreignKeyMetadata -> { + + Table table = tables.stream().filter(t -> t.name().equals(foreignKeyMetadata.tableName)).findAny().orElseThrow(); + + List parentIdColumns = collectParentIdentityColumns(foreignKeyMetadata, foreignKeyMetadataList, tables); + List parentIdColumnNames = parentIdColumns.stream().map(Column::name).toList(); + + String foreignKeyName = getForeignKeyName(foreignKeyMetadata.parentTableName, parentIdColumnNames); + if (parentIdColumnNames.size() == 1) { + + addIfAbsent(table.columns(), new Column(foreignKeyMetadata.referencingColumnName(), + parentIdColumns.get(0).type(), false, table.getIdColumns().isEmpty())); + if (foreignKeyMetadata.keyColumnName() != null) { + addIfAbsent(table.columns(), + new Column(foreignKeyMetadata.keyColumnName(), foreignKeyMetadata.keyColumnType(), false, true)); + } + addIfAbsent(table.foreignKeys(), + new ForeignKey(foreignKeyName, foreignKeyMetadata.tableName(), + List.of(foreignKeyMetadata.referencingColumnName()), foreignKeyMetadata.parentTableName(), + parentIdColumnNames)); + } else { + + addIfAbsent(table.columns(), parentIdColumns.toArray(new Column[0])); + addIfAbsent(table.columns(), + new Column(foreignKeyMetadata.keyColumnName(), foreignKeyMetadata.keyColumnType(), false, true)); + addIfAbsent(table.foreignKeys(), new ForeignKey(foreignKeyName, foreignKeyMetadata.tableName(), + parentIdColumnNames, foreignKeyMetadata.parentTableName(), parentIdColumnNames)); + } + + }); + } + + private static void addIfAbsent(List list, E... elements) { + + for (E element : elements) { + if (!list.contains(element)) { + list.add(element); + } + } + } + + private static List collectParentIdentityColumns(ForeignKeyMetadata child, + List foreignKeyMetadataList, List
tables) { + return collectParentIdentityColumns(child, foreignKeyMetadataList, tables, new HashSet<>()); + } + + private static List collectParentIdentityColumns(ForeignKeyMetadata child, + List foreignKeyMetadataList, List
tables, Set excludeTables) { + + excludeTables.add(child.tableName()); + + Table parentTable = findTableByName(tables, child.parentTableName()); + ForeignKeyMetadata parentMetadata = findMetadataByTableName(foreignKeyMetadataList, child.parentTableName(), + excludeTables); + List parentIdColumns = parentTable.getIdColumns(); + + if (!parentIdColumns.isEmpty()) { + return new ArrayList<>(parentIdColumns); + } + + Assert.state(parentMetadata != null, "parentMetadata must not be null at this stage"); + + List parentParentIdColumns = collectParentIdentityColumns(parentMetadata, foreignKeyMetadataList, tables); + if (parentParentIdColumns.size() == 1) { + Column parentParentIdColumn = parentParentIdColumns.get(0); + Column withChangedName = new Column(parentMetadata.referencingColumnName, parentParentIdColumn.type(), false, + true); + parentParentIdColumns = new LinkedList<>(List.of(withChangedName)); + } + if (parentMetadata.keyColumnName() != null) { + parentParentIdColumns + .add(new Column(parentMetadata.keyColumnName(), parentMetadata.keyColumnType(), false, true)); + } + return parentParentIdColumns; + } + + @Nullable + private static Table findTableByName(List
tables, String tableName) { + return tables.stream().filter(table -> table.name().equals(tableName)).findAny().orElse(null); + } + + @Nullable + private static ForeignKeyMetadata findMetadataByTableName(List metadata, String tableName, + Set excludeTables) { + + return metadata.stream() + .filter(m -> m.tableName().equals(tableName) && !excludeTables.contains(m.parentTableName())).findAny() + .orElse(null); + } + + private static ForeignKeyMetadata createForeignKeyMetadata(RelationalPersistentEntity entity, + RelationalPersistentProperty property, + MappingContext, ? extends RelationalPersistentProperty> context, + SqlTypeMapping sqlTypeMapping) { + + RelationalPersistentEntity childEntity = context.getRequiredPersistentEntity(property.getActualType()); + + String referencedKeyColumnType = null; + if (property.isAnnotationPresent(MappedCollection.class)) { + if (property.getType() == List.class) { + referencedKeyColumnType = sqlTypeMapping.getColumnType(Integer.class); + } else if (property.getType() == Map.class) { + referencedKeyColumnType = sqlTypeMapping.getColumnType(property.getComponentType()); + } + } + + return new ForeignKeyMetadata(childEntity.getTableName().getReference(), + property.getReverseColumnName(entity).getReference(), + Optional.ofNullable(property.getKeyColumn()).map(SqlIdentifier::getReference).orElse(null), + referencedKeyColumnType, entity.getTableName().getReference()); + } + + private static String getForeignKeyName(String referencedTableName, List referencedColumnNames) { + return String.format("%s_%s_fk", referencedTableName, String.join("_", referencedColumnNames)); + } + + private record ForeignKeyMetadata(String tableName, String referencingColumnName, @Nullable String keyColumnName, + @Nullable String keyColumnType, String parentTableName) { + + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java new file mode 100644 index 0000000000..2173c50d6f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java @@ -0,0 +1,7 @@ +/** + * Schema creation and schema update integration with Liquibase. + */ +@NonNullApi +package org.springframework.data.jdbc.core.mapping.schema; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/package-info.java new file mode 100644 index 0000000000..51e8e0fbc1 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/package-info.java @@ -0,0 +1,7 @@ +/** + * Core JDBC implementation. + */ +@NonNullApi +package org.springframework.data.jdbc.core; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisContext.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisContext.java new file mode 100644 index 0000000000..ae43172856 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisContext.java @@ -0,0 +1,115 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.mybatis; + +import java.util.Collections; +import java.util.Map; + +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.lang.Nullable; + +/** + * {@link MyBatisContext} instances get passed to MyBatis mapped statements as arguments, making Ids, instances, + * domainType and other attributes available to the statements. All methods might return {@literal null} depending on + * the kind of values available on invocation. + * + * @author Jens Schauder + * @author Christoph Strobl + */ +public class MyBatisContext { + + private final @Nullable Object id; + private final @Nullable Object instance; + private final @Nullable Identifier identifier; + private final @Nullable Class domainType; + private final Map additionalValues; + + public MyBatisContext(@Nullable Object id, @Nullable Object instance, @Nullable Class domainType, + Map additionalValues) { + + this.id = id; + this.identifier = null; + this.instance = instance; + this.domainType = domainType; + this.additionalValues = additionalValues; + } + + public MyBatisContext(Identifier identifier, @Nullable Object instance, @Nullable Class domainType) { + + this.id = null; + this.identifier = identifier; + this.instance = instance; + this.domainType = domainType; + this.additionalValues = Collections.emptyMap(); + } + + /** + * The ID of the entity to query/act upon. + * + * @return Might return {@code null}. + */ + @Nullable + public Object getId() { + return id; + } + + /** + * The {@link Identifier} for a path to query. + * + * @return Might return {@literal null}. + */ + @Nullable + public Identifier getIdentifier() { + return identifier; + } + + /** + * The entity to act upon. This is {@code null} for queries, since the object doesn't exist before the query. + * + * @return Might return {@code null}. + */ + @Nullable + public Object getInstance() { + return instance; + } + + /** + * The domain type of the entity to query or act upon. + * + * @return Might return {@code null}. + */ + @Nullable + public Class getDomainType() { + return domainType; + } + + /** + * Returns a value for the given key. Used to communicate ids of parent entities. + * + * @param key Must not be {@code null}. + * @return Might return {@code null}. + */ + @Nullable + public Object get(String key) { + + Object value = null; + if (identifier != null) { + value = identifier.toMap().get(key); + } + + return value == null ? additionalValues.get(key) : value; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategy.java new file mode 100644 index 0000000000..beb6cbf6c8 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategy.java @@ -0,0 +1,407 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.mybatis; + +import static java.util.Arrays.*; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.apache.ibatis.cursor.Cursor; +import org.apache.ibatis.session.SqlSession; +import org.mybatis.spring.SqlSessionTemplate; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.*; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.util.Assert; + +/** + * {@link DataAccessStrategy} implementation based on MyBatis. Each method gets mapped to a statement. The name of the + * statement gets constructed as follows: By default, the namespace is based on the class of the entity plus the suffix + * "Mapper". This is then followed by the method name separated by a dot. For methods taking a {@link PropertyPath} as + * argument, the relevant entity is that of the root of the path, and the path itself gets as dot separated String + * appended to the statement name. Each statement gets an instance of {@link MyBatisContext}, which at least has the + * entityType set. For methods taking a {@link PropertyPath} the entityType if the context is set to the class of the + * leaf type. + * + * @author Jens Schauder + * @author Kazuki Shimizu + * @author Oliver Gierke + * @author Mark Paluch + * @author Tyler Van Gorder + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @author Christopher Klein + * @author Mikhail Polivakha + * @author Sergey Korotaev + */ +public class MyBatisDataAccessStrategy implements DataAccessStrategy { + + private static final String VERSION_SQL_PARAMETER_NAME_OLD = "___oldOptimisticLockingVersion"; + + private final SqlSession sqlSession; + private NamespaceStrategy namespaceStrategy = NamespaceStrategy.DEFAULT_INSTANCE; + + /** + * Create a {@link DataAccessStrategy} that first checks for queries defined by MyBatis and if it doesn't find one + * uses a {@link DefaultDataAccessStrategy} + */ + public static DataAccessStrategy createCombinedAccessStrategy(RelationalMappingContext context, + JdbcConverter converter, NamedParameterJdbcOperations operations, SqlSession sqlSession, Dialect dialect) { + return createCombinedAccessStrategy(context, converter, operations, sqlSession, NamespaceStrategy.DEFAULT_INSTANCE, + dialect); + } + + /** + * Create a {@link DataAccessStrategy} that first checks for queries defined by MyBatis and if it doesn't find one + * uses a {@link DefaultDataAccessStrategy} + */ + public static DataAccessStrategy createCombinedAccessStrategy(RelationalMappingContext context, + JdbcConverter converter, NamedParameterJdbcOperations operations, SqlSession sqlSession, + NamespaceStrategy namespaceStrategy, Dialect dialect) { + + SqlGeneratorSource sqlGeneratorSource = new SqlGeneratorSource(context, converter, dialect); + SqlParametersFactory sqlParametersFactory = new SqlParametersFactory(context, converter); + InsertStrategyFactory insertStrategyFactory = new InsertStrategyFactory(operations, dialect); + + DataAccessStrategy defaultDataAccessStrategy = new DataAccessStrategyFactory( // + sqlGeneratorSource, // + converter, // + operations, // + sqlParametersFactory, // + insertStrategyFactory // + ).create(); + + // the DefaultDataAccessStrategy needs a reference to the returned DataAccessStrategy. This creates a dependency + // cycle. In order to create it, we need something that allows to defer closing the cycle until all the elements are + // created. That is the purpose of the DelegatingAccessStrategy. + MyBatisDataAccessStrategy myBatisDataAccessStrategy = new MyBatisDataAccessStrategy(sqlSession); + myBatisDataAccessStrategy.setNamespaceStrategy(namespaceStrategy); + + return new CascadingDataAccessStrategy( + asList(myBatisDataAccessStrategy, new DelegatingDataAccessStrategy(defaultDataAccessStrategy))); + } + + /** + * Constructs a {@link DataAccessStrategy} based on MyBatis. + *

+ * Use a {@link SqlSessionTemplate} for {@link SqlSession} or a similar implementation tying the session to the proper + * transaction. Note that the resulting {@link DataAccessStrategy} only handles MyBatis. It does not include the + * functionality of the {@link DefaultDataAccessStrategy} which one normally still wants. Use + * {@link #createCombinedAccessStrategy(RelationalMappingContext, JdbcConverter, NamedParameterJdbcOperations, SqlSession, NamespaceStrategy, Dialect)} + * to create such a {@link DataAccessStrategy}. + * + * @param sqlSession Must be non {@literal null}. + * @since 3.1 + */ + public MyBatisDataAccessStrategy(SqlSession sqlSession) { + this.sqlSession = sqlSession; + } + + /** + * Set a NamespaceStrategy to be used. + * + * @param namespaceStrategy Must be non {@literal null} + */ + public void setNamespaceStrategy(NamespaceStrategy namespaceStrategy) { + + Assert.notNull(namespaceStrategy, "The NamespaceStrategy must not be null"); + + this.namespaceStrategy = namespaceStrategy; + } + + @Override + public Object insert(T instance, Class domainType, Identifier identifier, IdValueSource idValueSource) { + + MyBatisContext myBatisContext = new MyBatisContext(identifier, instance, domainType); + sqlSession().insert(namespace(domainType) + ".insert", myBatisContext); + + return myBatisContext.getId(); + } + + @Override + public Object[] insert(List> insertSubjects, Class domainType, IdValueSource idValueSource) { + + return insertSubjects.stream().map( + insertSubject -> insert(insertSubject.getInstance(), domainType, insertSubject.getIdentifier(), idValueSource)) + .toArray(); + } + + @Override + public boolean update(S instance, Class domainType) { + + return sqlSession().update(namespace(domainType) + ".update", + new MyBatisContext(null, instance, domainType, Collections.emptyMap())) != 0; + } + + @Override + public boolean updateWithVersion(S instance, Class domainType, Number previousVersion) { + + String statement = namespace(domainType) + ".updateWithVersion"; + MyBatisContext parameter = new MyBatisContext(null, instance, domainType, + Collections.singletonMap(VERSION_SQL_PARAMETER_NAME_OLD, previousVersion)); + return sqlSession().update(statement, parameter) != 0; + } + + @Override + public void delete(Object id, Class domainType) { + + String statement = namespace(domainType) + ".delete"; + MyBatisContext parameter = new MyBatisContext(id, null, domainType, Collections.emptyMap()); + sqlSession().delete(statement, parameter); + } + + @Override + public void delete(Iterable ids, Class domainType) { + ids.forEach(id -> delete(id, domainType)); + } + + @Override + public void deleteWithVersion(Object id, Class domainType, Number previousVersion) { + + String statement = namespace(domainType) + ".deleteWithVersion"; + MyBatisContext parameter = new MyBatisContext(id, null, domainType, + Collections.singletonMap(VERSION_SQL_PARAMETER_NAME_OLD, previousVersion)); + sqlSession().delete(statement, parameter); + } + + @Override + public void delete(Object rootId, PersistentPropertyPath propertyPath) { + + Class ownerType = getOwnerTyp(propertyPath); + String statement = namespace(ownerType) + ".delete-" + toDashPath(propertyPath); + Class leafType = propertyPath.getLeafProperty().getTypeInformation().getType(); + MyBatisContext parameter = new MyBatisContext(rootId, null, leafType, Collections.emptyMap()); + + sqlSession().delete(statement, parameter); + } + + @Override + public void delete(Iterable rootIds, PersistentPropertyPath propertyPath) { + rootIds.forEach(rootId -> delete(rootId, propertyPath)); + } + + @Override + public void deleteAll(Class domainType) { + + String statement = namespace(domainType) + ".deleteAll"; + MyBatisContext parameter = new MyBatisContext(null, null, domainType, Collections.emptyMap()); + sqlSession().delete(statement, parameter); + } + + @Override + public void deleteAll(PersistentPropertyPath propertyPath) { + + Class leafType = propertyPath.getLeafProperty().getTypeInformation().getType(); + + String statement = namespace(getOwnerTyp(propertyPath)) + ".deleteAll-" + toDashPath(propertyPath); + MyBatisContext parameter = new MyBatisContext(null, null, leafType, Collections.emptyMap()); + sqlSession().delete(statement, parameter); + } + + @Override + public void acquireLockById(Object id, LockMode lockMode, Class domainType) { + + String statement = namespace(domainType) + ".acquireLockById"; + MyBatisContext parameter = new MyBatisContext(id, null, domainType, Collections.emptyMap()); + + long result = sqlSession().selectOne(statement, parameter); + if (result < 1) { + + String message = String.format("The lock target does not exist; id: %s, statement: %s", id, statement); + throw new EmptyResultDataAccessException(message, 1); + } + } + + @Override + public void acquireLockAll(LockMode lockMode, Class domainType) { + + String statement = namespace(domainType) + ".acquireLockAll"; + MyBatisContext parameter = new MyBatisContext(null, null, domainType, Collections.emptyMap()); + + sqlSession().selectOne(statement, parameter); + } + + @Override + public T findById(Object id, Class domainType) { + + String statement = namespace(domainType) + ".findById"; + MyBatisContext parameter = new MyBatisContext(id, null, domainType, Collections.emptyMap()); + return sqlSession().selectOne(statement, parameter); + } + + @Override + public List findAll(Class domainType) { + + String statement = namespace(domainType) + ".findAll"; + MyBatisContext parameter = new MyBatisContext(null, null, domainType, Collections.emptyMap()); + return sqlSession().selectList(statement, parameter); + } + + @Override + public Stream streamAll(Class domainType) { + + String statement = namespace(domainType) + ".streamAll"; + MyBatisContext parameter = new MyBatisContext(null, null, domainType, Collections.emptyMap()); + Cursor cursor = sqlSession().selectCursor(statement, parameter); + return StreamSupport.stream(cursor.spliterator(), false); + } + + @Override + public List findAllById(Iterable ids, Class domainType) { + + return sqlSession().selectList(namespace(domainType) + ".findAllById", + new MyBatisContext(ids, null, domainType, Collections.emptyMap())); + } + + @Override + public Stream streamAllByIds(Iterable ids, Class domainType) { + + String statement = namespace(domainType) + ".streamAllByIds"; + MyBatisContext parameter = new MyBatisContext(ids, null, domainType, Collections.emptyMap()); + Cursor cursor = sqlSession().selectCursor(statement, parameter); + return StreamSupport.stream(cursor.spliterator(), false); + } + + @Override + public List findAllByPath(Identifier identifier, + PersistentPropertyPath path) { + + String statementName = namespace(getOwnerTyp(path)) + ".findAllByPath-" + path.toDotPath(); + + return sqlSession().selectList(statementName, + new MyBatisContext(identifier, null, path.getLeafProperty().getType())); + } + + @Override + public boolean existsById(Object id, Class domainType) { + + String statement = namespace(domainType) + ".existsById"; + MyBatisContext parameter = new MyBatisContext(id, null, domainType, Collections.emptyMap()); + return sqlSession().selectOne(statement, parameter); + } + + @Override + public List findAll(Class domainType, Sort sort) { + + Map additionalContext = new HashMap<>(); + additionalContext.put("sort", sort); + return sqlSession().selectList(namespace(domainType) + ".findAllSorted", + new MyBatisContext(null, null, domainType, additionalContext)); + } + + @Override + public Stream streamAll(Class domainType, Sort sort) { + + Map additionalContext = new HashMap<>(); + additionalContext.put("sort", sort); + + String statement = namespace(domainType) + ".streamAllSorted"; + MyBatisContext parameter = new MyBatisContext(null, null, domainType, additionalContext); + + Cursor cursor = sqlSession().selectCursor(statement, parameter); + return StreamSupport.stream(cursor.spliterator(), false); + } + + @Override + public List findAll(Class domainType, Pageable pageable) { + + Map additionalContext = new HashMap<>(); + additionalContext.put("pageable", pageable); + return sqlSession().selectList(namespace(domainType) + ".findAllPaged", + new MyBatisContext(null, null, domainType, additionalContext)); + } + + @Override + public Optional findOne(Query query, Class probeType) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public List findAll(Query query, Class probeType) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public Stream streamAll(Query query, Class probeType) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public List findAll(Query query, Class probeType, Pageable pageable) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public boolean exists(Query query, Class probeType) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public long count(Query query, Class probeType) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public long count(Class domainType) { + + String statement = namespace(domainType) + ".count"; + MyBatisContext parameter = new MyBatisContext(null, null, domainType, Collections.emptyMap()); + return sqlSession().selectOne(statement, parameter); + } + + private String namespace(Class domainType) { + return this.namespaceStrategy.getNamespace(domainType); + } + + private SqlSession sqlSession() { + return this.sqlSession; + } + + private static String toDashPath(PersistentPropertyPath propertyPath) { + + String dotPath = propertyPath.toDotPath(); + if (dotPath == null) { + return ""; + } + return dotPath.replaceAll("\\.", "-"); + } + + private Class getOwnerTyp(PersistentPropertyPath propertyPath) { + + RelationalPersistentProperty baseProperty = propertyPath.getBaseProperty(); + + Assert.notNull(baseProperty, "BaseProperty must not be null"); + + return baseProperty.getOwner().getType(); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/NamespaceStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/NamespaceStrategy.java new file mode 100644 index 0000000000..95e3cd39f5 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/NamespaceStrategy.java @@ -0,0 +1,40 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.mybatis; + +/** + * A strategy to derive a MyBatis namespace from a domainType. + * + * @author Kazuki Shimizu + * @author Jens Schauder + */ +public interface NamespaceStrategy { + + NamespaceStrategy DEFAULT_INSTANCE = new NamespaceStrategy() {}; + + /** + * Get a namespace that corresponds to the given domain type. + *

+ * By default, the namespace is based on the class of the entity plus the suffix "Mapper". + * + * @param domainType Must be non {@literal null}. + * @return a namespace that correspond domain type + */ + default String getNamespace(Class domainType) { + return domainType.getName() + "Mapper"; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/package-info.java new file mode 100644 index 0000000000..8c7a6f928c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/mybatis/package-info.java @@ -0,0 +1,4 @@ +@NonNullApi +package org.springframework.data.jdbc.mybatis; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/QueryMappingConfiguration.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/QueryMappingConfiguration.java new file mode 100644 index 0000000000..8610284ece --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/QueryMappingConfiguration.java @@ -0,0 +1,33 @@ +package org.springframework.data.jdbc.repository; + +import org.springframework.jdbc.core.RowMapper; +import org.springframework.lang.Nullable; + +/** + * Configures a {@link org.springframework.jdbc.core.RowMapper} for each type to be used for extracting entities of that + * type from a {@link java.sql.ResultSet}. + * + * @author Jens Schauder + * @author Evgeni Dimitrov + * @since 1.1 + */ +public interface QueryMappingConfiguration { + + @Nullable + default RowMapper getRowMapper(Class type) { + return null; + } + + /** + * An immutable empty instance that will return {@literal null} for all arguments. + */ + QueryMappingConfiguration EMPTY = new QueryMappingConfiguration() { + + @Override + public RowMapper getRowMapper(Class type) { + return null; + } + + }; + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/AbstractJdbcConfiguration.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/AbstractJdbcConfiguration.java new file mode 100644 index 0000000000..17dc978dc9 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/AbstractJdbcConfiguration.java @@ -0,0 +1,290 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.jdbc.core.JdbcAggregateOperations; +import org.springframework.data.jdbc.core.JdbcAggregateTemplate; +import org.springframework.data.jdbc.core.convert.*; +import org.springframework.data.jdbc.core.dialect.JdbcArrayColumns; +import org.springframework.data.jdbc.core.dialect.JdbcDialect; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.core.mapping.JdbcSimpleTypes; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.RelationalManagedTypes; +import org.springframework.data.relational.core.conversion.RelationalConverter; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.util.TypeScanner; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.util.StringUtils; + +/** + * Beans that must be registered for Spring Data JDBC to work. + * + * @author Greg Turnquist + * @author Jens Schauder + * @author Mark Paluch + * @author Michael Simons + * @author Christoph Strobl + * @author Myeonghyeon Lee + * @author Chirag Tailor + * @since 1.1 + */ +@Configuration(proxyBeanMethods = false) +public class AbstractJdbcConfiguration implements ApplicationContextAware { + + private static final Log LOG = LogFactory.getLog(AbstractJdbcConfiguration.class); + + private ApplicationContext applicationContext; + + /** + * Returns the base packages to scan for JDBC mapped entities at startup. Returns the package name of the + * configuration class' (the concrete class, not this one here) by default. So if you have a + * {@code com.acme.AppConfig} extending {@link AbstractJdbcConfiguration} the base package will be considered + * {@code com.acme} unless the method is overridden to implement alternate behavior. + * + * @return the base packages to scan for mapped {@link Table} classes or an empty collection to not enable scanning + * for entities. + * @since 3.0 + */ + protected Collection getMappingBasePackages() { + + Package mappingBasePackage = getClass().getPackage(); + return Collections.singleton(mappingBasePackage == null ? null : mappingBasePackage.getName()); + } + + /** + * Returns the a {@link RelationalManagedTypes} object holding the initial entity set. + * + * @return new instance of {@link RelationalManagedTypes}. + * @throws ClassNotFoundException + * @since 3.0 + */ + @Bean + public RelationalManagedTypes jdbcManagedTypes() throws ClassNotFoundException { + return RelationalManagedTypes.fromIterable(getInitialEntitySet()); + } + + /** + * Register a {@link JdbcMappingContext} and apply an optional {@link NamingStrategy}. + * + * @param namingStrategy optional {@link NamingStrategy}. Use + * {@link org.springframework.data.relational.core.mapping.DefaultNamingStrategy#INSTANCE} as fallback. + * @param customConversions see {@link #jdbcCustomConversions()}. + * @param jdbcManagedTypes JDBC managed types, typically discovered through {@link #jdbcManagedTypes() an entity + * scan}. + * @return must not be {@literal null}. + */ + @Bean + public JdbcMappingContext jdbcMappingContext(Optional namingStrategy, + JdbcCustomConversions customConversions, RelationalManagedTypes jdbcManagedTypes) { + + JdbcMappingContext mappingContext = new JdbcMappingContext(namingStrategy.orElse(DefaultNamingStrategy.INSTANCE)); + mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); + mappingContext.setManagedTypes(jdbcManagedTypes); + + return mappingContext; + } + + /** + * Creates a {@link IdGeneratingEntityCallback} bean using the configured + * {@link #jdbcMappingContext(Optional, JdbcCustomConversions, RelationalManagedTypes)} and + * {@link #jdbcDialect(NamedParameterJdbcOperations)}. + * + * @return must not be {@literal null}. + * @since 3.5 + */ + @Bean + public IdGeneratingEntityCallback idGeneratingBeforeSaveCallback(JdbcMappingContext mappingContext, + NamedParameterJdbcOperations operations, Dialect dialect) { + return new IdGeneratingEntityCallback(mappingContext, dialect, operations); + } + + /** + * Creates a {@link RelationalConverter} using the configured + * {@link #jdbcMappingContext(Optional, JdbcCustomConversions, RelationalManagedTypes)}. + * + * @see #jdbcMappingContext(Optional, JdbcCustomConversions, RelationalManagedTypes) + * @see #jdbcCustomConversions() + * @return must not be {@literal null}. + */ + @Bean + public JdbcConverter jdbcConverter(JdbcMappingContext mappingContext, NamedParameterJdbcOperations operations, + @Lazy RelationResolver relationResolver, JdbcCustomConversions conversions, Dialect dialect) { + + org.springframework.data.jdbc.core.dialect.JdbcArrayColumns arrayColumns = dialect instanceof JdbcDialect jd + ? jd.getArraySupport() + : JdbcArrayColumns.DefaultSupport.INSTANCE; + DefaultJdbcTypeFactory jdbcTypeFactory = new DefaultJdbcTypeFactory(operations.getJdbcOperations(), arrayColumns); + + return new MappingJdbcConverter(mappingContext, relationResolver, conversions, jdbcTypeFactory); + } + + /** + * Register custom {@link Converter}s in a {@link JdbcCustomConversions} object if required. These + * {@link JdbcCustomConversions} will be registered with the + * {@link #jdbcConverter(JdbcMappingContext, NamedParameterJdbcOperations, RelationResolver, JdbcCustomConversions, Dialect)}. + * Returns an empty {@link JdbcCustomConversions} instance by default. + * + * @return will never be {@literal null}. + */ + @Bean + public JdbcCustomConversions jdbcCustomConversions() { + + try { + + Dialect dialect = applicationContext.getBean(Dialect.class); + SimpleTypeHolder simpleTypeHolder = dialect.simpleTypes().isEmpty() ? JdbcSimpleTypes.HOLDER + : new SimpleTypeHolder(dialect.simpleTypes(), JdbcSimpleTypes.HOLDER); + + return new JdbcCustomConversions( + CustomConversions.StoreConversions.of(simpleTypeHolder, storeConverters(dialect)), userConverters()); + + } catch (NoSuchBeanDefinitionException exception) { + + LOG.warn("No dialect found; CustomConversions will be configured without dialect specific conversions"); + + return new JdbcCustomConversions(); + } + } + + protected List userConverters() { + return Collections.emptyList(); + } + + private List storeConverters(Dialect dialect) { + + List converters = new ArrayList<>(); + converters.addAll(dialect.getConverters()); + converters.addAll(JdbcCustomConversions.storeConverters()); + return converters; + } + + /** + * Register a {@link JdbcAggregateTemplate} as a bean for easy use in applications that need a lower level of + * abstraction than the normal repository abstraction. + * + * @param applicationContext for publishing events. Must not be {@literal null}. + * @param mappingContext the mapping context to be used. Must not be {@literal null}. + * @param converter the conversions used when reading and writing from/to the database. Must not be {@literal null}. + * @return a {@link JdbcAggregateTemplate}. Will never be {@literal null}. + */ + @Bean + public JdbcAggregateTemplate jdbcAggregateTemplate(ApplicationContext applicationContext, + JdbcMappingContext mappingContext, JdbcConverter converter, DataAccessStrategy dataAccessStrategy) { + + return new JdbcAggregateTemplate(applicationContext, mappingContext, converter, dataAccessStrategy); + } + + /** + * Create a {@link DataAccessStrategy} for reuse in the {@link JdbcAggregateOperations} and the {@link JdbcConverter}. + * Override this method to register a bean of type {@link DataAccessStrategy} if your use case requires a more + * specialized {@link DataAccessStrategy}. + * + * @return will never be {@literal null}. + */ + @Bean + public DataAccessStrategy dataAccessStrategyBean(NamedParameterJdbcOperations operations, JdbcConverter jdbcConverter, + JdbcMappingContext context, Dialect dialect) { + + SqlGeneratorSource sqlGeneratorSource = new SqlGeneratorSource(context, jdbcConverter, dialect); + DataAccessStrategyFactory factory = new DataAccessStrategyFactory(sqlGeneratorSource, jdbcConverter, operations, + new SqlParametersFactory(context, jdbcConverter), new InsertStrategyFactory(operations, dialect)); + + return factory.create(); + } + + /** + * Resolves a {@link Dialect JDBC dialect} by inspecting {@link NamedParameterJdbcOperations}. + * + * @param operations the {@link NamedParameterJdbcOperations} allowing access to a {@link java.sql.Connection}. + * @return the {@link Dialect} to be used. + * @since 2.0 + * @throws org.springframework.data.jdbc.repository.config.DialectResolver.NoDialectException if the {@link Dialect} + * cannot be determined. + */ + @Bean + public Dialect jdbcDialect(NamedParameterJdbcOperations operations) { + return DialectResolver.getDialect(operations.getJdbcOperations()); + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext = applicationContext; + } + + /** + * Scans the mapping base package for classes annotated with {@link Table}. By default, it scans for entities in all + * packages returned by {@link #getMappingBasePackages()}. + * + * @see #getMappingBasePackages() + * @return + * @throws ClassNotFoundException + * @since 3.0 + */ + protected Set> getInitialEntitySet() throws ClassNotFoundException { + + Set> initialEntitySet = new HashSet<>(); + + for (String basePackage : getMappingBasePackages()) { + initialEntitySet.addAll(scanForEntities(basePackage)); + } + + return initialEntitySet; + } + + /** + * Scans the given base package for entities, i.e. JDBC-specific types annotated with {@link Table}. + * + * @param basePackage must not be {@literal null}. + * @return a set of classes identified as entities. + * @since 3.0 + */ + @SuppressWarnings("unchecked") + protected Set> scanForEntities(String basePackage) { + + if (!StringUtils.hasText(basePackage)) { + return Collections.emptySet(); + } + + return TypeScanner.typeScanner(AbstractJdbcConfiguration.class.getClassLoader()) // + .forTypesAnnotatedWith(Table.class) // + .scanPackages(basePackage) // + .collectAsSet(); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/DefaultQueryMappingConfiguration.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/DefaultQueryMappingConfiguration.java new file mode 100644 index 0000000000..15ab15a2ed --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/DefaultQueryMappingConfiguration.java @@ -0,0 +1,54 @@ +package org.springframework.data.jdbc.repository.config; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * A {@link QueryMappingConfiguration} that allows for registration of {@link RowMapper}s and + * {@link ResultSetExtractor}s via a fluent Api. + * + * @author Jens Schauder + * @author Evgeni Dimitrov + * @since 1.1 + */ +public class DefaultQueryMappingConfiguration implements QueryMappingConfiguration { + + private Map, RowMapper> mappers = new LinkedHashMap<>(); + + @Nullable + public RowMapper getRowMapper(Class type) { + + Assert.notNull(type, "Type must not be null"); + + RowMapper candidate = mappers.get(type); + + if (candidate == null) { + + for (Map.Entry, RowMapper> entry : mappers.entrySet()) { + + if (type.isAssignableFrom(entry.getKey())) { + candidate = entry.getValue(); + } + } + } + return (RowMapper) candidate; + } + + /** + * Registers a the given {@link RowMapper} as to be used for the given type. + * + * @return this instance, so this can be used as a fluent interface. + */ + public DefaultQueryMappingConfiguration registerRowMapper(Class type, RowMapper rowMapper) { + + mappers.put(type, rowMapper); + + return this; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/DialectResolver.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/DialectResolver.java new file mode 100644 index 0000000000..1f81381741 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/DialectResolver.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import java.sql.Connection; +import java.util.Optional; + +import javax.sql.DataSource; + +import org.springframework.core.io.support.SpringFactoriesLoader; +import org.springframework.data.jdbc.core.dialect.JdbcDialect; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.jdbc.core.JdbcOperations; + +/** + * Resolves a {@link Dialect}. Resolution typically uses {@link JdbcOperations} to obtain and inspect a + * {@link Connection}. Dialect resolution uses Spring's {@link SpringFactoriesLoader spring.factories} to determine + * available {@link JdbcDialectProvider extensions}. + * + * @author Jens Schauder + * @author Mikhail Polivakha + * @since 2.0 + * @see Dialect + * @see SpringFactoriesLoader + * @deprecated since 3.5, replacement {@link org.springframework.data.jdbc.core.dialect.DialectResolver} was moved to + * the {@link org.springframework.data.jdbc.core.dialect} package. + */ +@Deprecated(since = "3.5", forRemoval = true) +public class DialectResolver { + + // utility constructor. + private DialectResolver() {} + + /** + * Retrieve a {@link Dialect} by inspecting a {@link Connection}. + * + * @param operations must not be {@literal null}. + * @return the resolved {@link Dialect} {@link NoDialectException} if the database type cannot be determined from + * {@link DataSource}. + * @throws NoDialectException if no {@link Dialect} can be found. + */ + public static JdbcDialect getDialect(JdbcOperations operations) { + return org.springframework.data.jdbc.core.dialect.DialectResolver.getDialect(operations); + } + + /** + * SPI to extend Spring's default JDBC Dialect discovery mechanism. Implementations of this interface are discovered + * through Spring's {@link SpringFactoriesLoader} mechanism. + * + * @author Jens Schauder + * @see org.springframework.core.io.support.SpringFactoriesLoader + * @deprecated since 3.5, replacement {@link org.springframework.data.jdbc.core.dialect.DialectResolver} was moved to + * the {@link org.springframework.data.jdbc.core.dialect} package. + */ + @Deprecated(since = "3.5", forRemoval = true) + public interface JdbcDialectProvider + extends org.springframework.data.jdbc.core.dialect.DialectResolver.JdbcDialectProvider { + + /** + * Returns a {@link Dialect} for a {@link DataSource}. + * + * @param operations the {@link JdbcOperations} to be used with the {@link Dialect}. + * @return {@link Optional} containing the {@link Dialect} if the {@link JdbcDialectProvider} can provide a dialect + * object, otherwise {@link Optional#empty()}. + */ + Optional getDialect(JdbcOperations operations); + } + + @Deprecated(since = "3.5", forRemoval = true) + static public class DefaultDialectProvider extends + org.springframework.data.jdbc.core.dialect.DialectResolver.DefaultDialectProvider implements JdbcDialectProvider { + + } + + /** + * Exception thrown when {@link DialectResolver} cannot resolve a {@link Dialect}. + */ + @Deprecated(since = "3.5", forRemoval = true) + public static class NoDialectException + extends org.springframework.data.jdbc.core.dialect.DialectResolver.NoDialectException { + + /** + * Constructor for NoDialectFoundException. + * + * @param msg the detail message + */ + NoDialectException(String msg) { + super(msg); + } + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcAuditing.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcAuditing.java new file mode 100644 index 0000000000..7a3303af3f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcAuditing.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.context.annotation.Import; +import org.springframework.data.auditing.DateTimeProvider; +import org.springframework.data.domain.AuditorAware; + +/** + * Annotation to enable auditing in JDBC via annotation configuration. + * + * @see EnableJdbcRepositories + * @author Kazuki Shimizu + */ +@Inherited +@Documented +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Import(JdbcAuditingRegistrar.class) +public @interface EnableJdbcAuditing { + + /** + * Configures the {@link AuditorAware} bean to be used to lookup the current principal. + * + * @see AuditorAware + */ + String auditorAwareRef() default ""; + + /** + * Configures whether the creation and modification dates are set. + */ + boolean setDates() default true; + + /** + * Configures whether the entity shall be marked as modified on creation. + */ + boolean modifyOnCreate() default true; + + /** + * Configures a {@link DateTimeProvider} bean name that allows customizing the {@link java.time.LocalDateTime} to be + * used for setting creation and modification dates. + * + * @see DateTimeProvider + */ + String dateTimeProviderRef() default ""; + +} diff --git a/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositories.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositories.java similarity index 59% rename from src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositories.java rename to spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositories.java index 14ad2b71be..e42547c541 100644 --- a/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositories.java +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositories.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,9 +23,13 @@ import java.lang.annotation.Target; import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Import; import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactoryBean; +import org.springframework.data.repository.config.DefaultRepositoryBaseClass; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; /** * Annotation to enable JDBC repositories. Will scan the package of the annotated configuration class for Spring Data @@ -33,18 +37,24 @@ * * @author Jens Schauder * @author Greg Turnquist - * @since 2.0 + * @author Mark Paluch + * @author Fei Dong + * @author Antoine Sauray + * @author Diego Krupitza + * @author Christoph Strobl + * @see AbstractJdbcConfiguration */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @Inherited -@Import({JdbcRepositoriesRegistrar.class, JdbcConfiguration.class}) +@Import(JdbcRepositoriesRegistrar.class) public @interface EnableJdbcRepositories { /** * Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.: - * {@code @EnableJdbcRepositories("org.my.pkg")} instead of {@code @EnableJdbcRepositories(basePackages="org.my.pkg")}. + * {@code @EnableJdbcRepositories("org.my.pkg")} instead of + * {@code @EnableJdbcRepositories(basePackages="org.my.pkg")}. */ String[] value() default {}; @@ -73,10 +83,17 @@ Filter[] excludeFilters() default {}; /** - * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the - * repositories infrastructure. + * Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So + * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning + * for {@code PersonRepositoryImpl}. */ - boolean considerNestedRepositories() default false; + String repositoryImplementationPostfix() default "Impl"; + + /** + * Configures the location of where to find the Spring Data named queries properties file. Will default to + * {@code META-INF/jdbc-named-queries.properties}. + */ + String namedQueriesLocation() default ""; /** * Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to @@ -85,15 +102,51 @@ Class repositoryFactoryBeanClass() default JdbcRepositoryFactoryBean.class; /** - * Configures the location of where to find the Spring Data named queries properties file. Will default to - * {@code META-INF/jdbc-named-queries.properties}. + * Configure the repository base class to be used to create repository proxies for this particular configuration. + * + * @since 2.1 */ - String namedQueriesLocation() default ""; + Class repositoryBaseClass() default DefaultRepositoryBaseClass.class; /** - * Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So - * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning - * for {@code PersonRepositoryImpl}. + * Configure a specific {@link BeanNameGenerator} to be used when creating the repository beans. + * @return the {@link BeanNameGenerator} to be used or the base {@link BeanNameGenerator} interface to indicate context default. + * @since 3.4 */ - String repositoryImplementationPostfix() default "Impl"; + Class nameGenerator() default BeanNameGenerator.class; + + /** + * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the + * repositories infrastructure. + */ + boolean considerNestedRepositories() default false; + + /** + * Configures the name of the {@link org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations} bean + * definition to be used to create repositories discovered through this annotation. Defaults to + * {@code namedParameterJdbcTemplate}. + */ + String jdbcOperationsRef() default ""; + + /** + * Configures the name of the {@link org.springframework.data.jdbc.core.convert.DataAccessStrategy} bean definition to + * be used to create repositories discovered through this annotation. Defaults to {@code defaultDataAccessStrategy}. + */ + String dataAccessStrategyRef() default ""; + + /** + * Configures the name of the {@link DataSourceTransactionManager} bean definition to be used to create repositories + * discovered through this annotation. Defaults to {@code transactionManager}. + * + * @since 2.1 + */ + String transactionManagerRef() default "transactionManager"; + + /** + * Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to + * {@link QueryLookupStrategy.Key#CREATE_IF_NOT_FOUND}. + * + * @since 2.1 + */ + QueryLookupStrategy.Key queryLookupStrategy() default QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND; } diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcAuditingRegistrar.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcAuditingRegistrar.java new file mode 100644 index 0000000000..4edb205b1e --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcAuditingRegistrar.java @@ -0,0 +1,96 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import java.lang.annotation.Annotation; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; +import org.springframework.data.auditing.config.AuditingConfiguration; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.relational.auditing.RelationalAuditingCallback; +import org.springframework.util.Assert; + +/** + * {@link ImportBeanDefinitionRegistrar} which registers additional beans in order to enable auditing via the + * {@link EnableJdbcAuditing} annotation. + * + * @see EnableJdbcAuditing + * @author Kazuki Shimizu + * @author Jens Schauder + * @author Christoph Strobl + */ +class JdbcAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { + + private static final String AUDITING_HANDLER_BEAN_NAME = "jdbcAuditingHandler"; + + /** + * {@inheritDoc} + * + * @return return the {@link EnableJdbcAuditing} + * @see AuditingBeanDefinitionRegistrarSupport#getAnnotation() + */ + @Override + protected Class getAnnotation() { + return EnableJdbcAuditing.class; + } + + /** + * {@inheritDoc} + * + * @return return "{@literal jdbcAuditingHandler}" + * @see AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() + */ + @Override + protected String getAuditingHandlerBeanName() { + return AUDITING_HANDLER_BEAN_NAME; + } + + @Override + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { + builder.setFactoryMethod("from").addConstructorArgReference("jdbcMappingContext"); + } + + @Override + protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { + + Assert.notNull(configuration, "AuditingConfiguration must not be null"); + + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)); + } + + @Override + protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, + BeanDefinitionRegistry registry) { + + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); + + BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder + .rootBeanDefinition(RelationalAuditingCallback.class); + listenerBeanDefinitionBuilder + .addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(AUDITING_HANDLER_BEAN_NAME, registry)); + + registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(), + RelationalAuditingCallback.class.getName(), registry); + } +} diff --git a/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrar.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrar.java similarity index 77% rename from src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrar.java rename to spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrar.java index 54a3532b0f..11b8904e13 100644 --- a/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrar.java +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,23 +25,14 @@ * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableJdbcRepositories} annotation. * * @author Jens Schauder - * @since 2.0 */ class JdbcRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableJdbcRepositories.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension() - */ @Override protected RepositoryConfigurationExtension getExtension() { return new JdbcRepositoryConfigExtension(); diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtension.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtension.java new file mode 100644 index 0000000000..64e05f0fbf --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtension.java @@ -0,0 +1,94 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import java.lang.annotation.Annotation; +import java.util.Collection; +import java.util.Collections; +import java.util.Locale; +import java.util.Optional; + +import org.springframework.beans.factory.config.RuntimeBeanReference; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactoryBean; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport; +import org.springframework.data.repository.config.RepositoryConfigurationSource; +import org.springframework.util.StringUtils; + +/** + * {@link org.springframework.data.repository.config.RepositoryConfigurationExtension} extending the repository + * registration process by registering JDBC repositories. + * + * @author Jens Schauder + * @author Fei Dong + * @author Mark Paluch + * @author Antoine Sauray + */ +public class JdbcRepositoryConfigExtension extends RepositoryConfigurationExtensionSupport { + + private static final String DEFAULT_TRANSACTION_MANAGER_BEAN_NAME = "transactionManager"; + + @Override + public String getModuleName() { + return "JDBC"; + } + + @Override + public String getRepositoryFactoryBeanClassName() { + return JdbcRepositoryFactoryBean.class.getName(); + } + + @Override + protected String getModulePrefix() { + return getModuleName().toLowerCase(Locale.US); + } + + @Override + public String getModuleIdentifier() { + return getModulePrefix(); + } + + @Override + public void postProcess(BeanDefinitionBuilder builder, RepositoryConfigurationSource source) { + + source.getAttribute("jdbcOperationsRef") // + .filter(StringUtils::hasText) // + .ifPresent(s -> builder.addPropertyReference("jdbcOperations", s)); + + source.getAttribute("dataAccessStrategyRef") // + .filter(StringUtils::hasText) // + .ifPresent(s -> builder.addPropertyReference("dataAccessStrategy", s)); + + Optional transactionManagerRef = source.getAttribute("transactionManagerRef"); + builder.addPropertyValue("transactionManager", transactionManagerRef.orElse(DEFAULT_TRANSACTION_MANAGER_BEAN_NAME)); + + builder.addPropertyValue("mappingContext", new RuntimeBeanReference(JdbcMappingContext.class)); + builder.addPropertyValue("dialect", new RuntimeBeanReference(Dialect.class)); + builder.addPropertyValue("converter", new RuntimeBeanReference(JdbcConverter.class)); + } + + /** + * In strict mode only domain types having a {@link Table} annotation get a repository. + */ + @Override + protected Collection> getIdentifyingAnnotations() { + return Collections.singleton(Table.class); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/MyBatisJdbcConfiguration.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/MyBatisJdbcConfiguration.java new file mode 100644 index 0000000000..b010d83aa3 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/MyBatisJdbcConfiguration.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import org.apache.ibatis.session.SqlSession; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.mybatis.MyBatisDataAccessStrategy; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; + +/** + * Configuration class tweaking Spring Data JDBC to use a {@link MyBatisDataAccessStrategy} instead of the default one. + * + * @author Oliver Drotbohm + * @since 1.1 + */ +@Configuration(proxyBeanMethods = false) +public class MyBatisJdbcConfiguration extends AbstractJdbcConfiguration { + + private @Autowired SqlSession session; + + @Bean + @Override + public DataAccessStrategy dataAccessStrategyBean(NamedParameterJdbcOperations operations, JdbcConverter jdbcConverter, + JdbcMappingContext context, Dialect dialect) { + + return MyBatisDataAccessStrategy.createCombinedAccessStrategy(context, jdbcConverter, operations, session, dialect); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/package-info.java new file mode 100644 index 0000000000..fe1dfb9ec6 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/config/package-info.java @@ -0,0 +1,4 @@ +@NonNullApi +package org.springframework.data.jdbc.repository.config; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/package-info.java new file mode 100644 index 0000000000..58773ae344 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/package-info.java @@ -0,0 +1,4 @@ +@NonNullApi +package org.springframework.data.jdbc.repository; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/AbstractJdbcQuery.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/AbstractJdbcQuery.java new file mode 100644 index 0000000000..100e7e44f4 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/AbstractJdbcQuery.java @@ -0,0 +1,212 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.RowMapperResultSetExtractor; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Base class for queries based on a repository method. It holds the infrastructure for executing a query and knows how + * to execute a query based on the return type of the method. How to construct the query is left to subclasses. + * + * @author Jens Schauder + * @author Kazuki Shimizu + * @author Oliver Gierke + * @author Maciej Walkowiak + * @author Mark Paluch + * @author Dennis Effing + * @author Mikhail Polivakha + * @since 2.0 + */ +public abstract class AbstractJdbcQuery implements RepositoryQuery { + + private final JdbcQueryMethod queryMethod; + private final NamedParameterJdbcOperations operations; + + /** + * Creates a new {@link AbstractJdbcQuery} for the given {@link JdbcQueryMethod} and + * {@link NamedParameterJdbcOperations}. + * + * @param queryMethod must not be {@literal null}. + * @param operations must not be {@literal null}. + */ + AbstractJdbcQuery(JdbcQueryMethod queryMethod, NamedParameterJdbcOperations operations) { + + Assert.notNull(queryMethod, "Query method must not be null"); + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + + this.queryMethod = queryMethod; + this.operations = operations; + } + + @Override + public JdbcQueryMethod getQueryMethod() { + return queryMethod; + } + + /** + * Creates a {@link JdbcQueryExecution} given a {@link ResultSetExtractor} or a {@link RowMapper}. Prefers the given + * {@link ResultSetExtractor} over {@link RowMapper}. + * + * @param extractor may be {@literal null}. + * @param rowMapper must not be {@literal null}. + * @return a JdbcQueryExecution appropriate for {@literal queryMethod}. Guaranteed to be not {@literal null}. + */ + JdbcQueryExecution createReadingQueryExecution(@Nullable ResultSetExtractor extractor, + Supplier> rowMapper) { + + if (getQueryMethod().isCollectionQuery()) { + return extractor != null ? createSingleReadingQueryExecution(extractor) : collectionQuery(rowMapper.get()); + } + + if (getQueryMethod().isStreamQuery()) { + return extractor != null ? createSingleReadingQueryExecution(extractor) : streamQuery(rowMapper.get()); + } + + return extractor != null ? createSingleReadingQueryExecution(extractor) : singleObjectQuery(rowMapper.get()); + } + + JdbcQueryExecution createModifyingQueryExecutor() { + + return (query, parameters) -> { + + int updatedCount = operations.update(query, parameters); + Class returnedObjectType = queryMethod.getReturnedObjectType(); + + return (returnedObjectType == boolean.class || returnedObjectType == Boolean.class) // + ? updatedCount != 0 // + : updatedCount; + }; + } + + JdbcQueryExecution singleObjectQuery(RowMapper rowMapper) { + + return (query, parameters) -> { + try { + return operations.queryForObject(query, parameters, rowMapper); + } catch (EmptyResultDataAccessException e) { + return null; + } + }; + } + + JdbcQueryExecution> collectionQuery(RowMapper rowMapper) { + return createSingleReadingQueryExecution(new RowMapperResultSetExtractor<>(rowMapper)); + } + + /** + * Obtain the result type to read from {@link ResultProcessor}. + * + * @param resultProcessor the {@link ResultProcessor} used to determine the result type. Must not be {@literal null}. + * @return the type that should get loaded from the database before it gets converted into the actual return type of a + * method. Guaranteed to be not {@literal null}. + */ + protected Class resolveTypeToRead(ResultProcessor resultProcessor) { + + ReturnedType returnedType = resultProcessor.getReturnedType(); + + if (returnedType.getReturnedType().isAssignableFrom(returnedType.getDomainType())) { + return returnedType.getDomainType(); + } + // Slight deviation from R2DBC: Allow direct mapping into DTOs + return returnedType.isProjecting() && returnedType.getReturnedType().isInterface() ? returnedType.getDomainType() + : returnedType.getReturnedType(); + } + + private JdbcQueryExecution> streamQuery(RowMapper rowMapper) { + return (query, parameters) -> operations.queryForStream(query, parameters, rowMapper); + } + + private JdbcQueryExecution createSingleReadingQueryExecution(ResultSetExtractor resultSetExtractor) { + return (query, parameters) -> operations.query(query, parameters, resultSetExtractor); + } + + /** + * Factory to create a {@link RowMapper} for a given class. + * + * @since 2.3 + */ + public interface RowMapperFactory { + + /** + * Create a {@link RowMapper} based on the expected return type passed in as an argument. + * + * @param result must not be {@code null}. + * @return a {@code RowMapper} producing instances of {@code result}. + */ + RowMapper create(Class result); + + /** + * Obtain a {@code RowMapper} from some other source, typically a {@link org.springframework.beans.factory.BeanFactory}. + * + * @param reference must not be {@code null}. + * @since 3.4 + */ + default RowMapper getRowMapper(String reference) { + throw new UnsupportedOperationException("getRowMapper is not supported"); + } + + /** + * Obtain a {@code ResultSetExtractor} from some other source, typically a {@link org.springframework.beans.factory.BeanFactory}. + * + * @param reference must not be {@code null}. + * @since 3.4 + */ + default ResultSetExtractor getResultSetExtractor(String reference) { + throw new UnsupportedOperationException("getResultSetExtractor is not supported"); + } + } + + /** + * Delegating {@link RowMapper} that reads a row into {@code T} and converts it afterwards into {@code Object}. + * + * @param + * @since 2.3 + */ + protected static class ConvertingRowMapper implements RowMapper { + + private final RowMapper delegate; + private final Converter converter; + + public ConvertingRowMapper(RowMapper delegate, Converter converter) { + this.delegate = delegate; + this.converter = converter; + } + + @Override + public Object mapRow(ResultSet rs, int rowNum) throws SQLException { + + T object = delegate.mapRow(rs, rowNum); + + return object == null ? null : converter.convert(object); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/EscapingParameterSource.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/EscapingParameterSource.java new file mode 100644 index 0000000000..b8f4031556 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/EscapingParameterSource.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.repository.query; + +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.data.relational.core.query.ValueFunction; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; + +/** + * This {@link SqlParameterSource} will apply escaping to its values. + * + * @author Jens Schauder + * @since 3.2 + */ +class EscapingParameterSource implements SqlParameterSource { + + private final SqlParameterSource parameterSource; + private final Escaper escaper; + + public EscapingParameterSource(SqlParameterSource parameterSource, Escaper escaper) { + + this.parameterSource = parameterSource; + this.escaper = escaper; + } + + @Override + public boolean hasValue(String paramName) { + return parameterSource.hasValue(paramName); + } + + @Override + public Object getValue(String paramName) throws IllegalArgumentException { + + Object value = parameterSource.getValue(paramName); + if (value instanceof ValueFunction valueFunction) { + return valueFunction.apply(escaper); + } + return value; + } + + + @Override + public int getSqlType(String paramName) { + return parameterSource.getSqlType(paramName); + } + + @Override + public String getTypeName(String paramName) { + return parameterSource.getTypeName(paramName); + } + + @Override + public String[] getParameterNames() { + return parameterSource.getParameterNames(); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcCountQueryCreator.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcCountQueryCreator.java new file mode 100644 index 0000000000..5995352b5f --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcCountQueryCreator.java @@ -0,0 +1,66 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.Expressions; +import org.springframework.data.relational.core.sql.Functions; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.SelectBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.repository.Lock; +import org.springframework.data.relational.repository.query.RelationalEntityMetadata; +import org.springframework.data.relational.repository.query.RelationalParameterAccessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.parser.PartTree; + +import java.util.Optional; + +/** + * {@link JdbcQueryCreator} that creates {@code COUNT(*)} queries without applying limit/offset and {@link Sort}. + * + * @author Mark Paluch + * @author Diego Krupitza + * @since 2.2 + */ +class JdbcCountQueryCreator extends JdbcQueryCreator { + + JdbcCountQueryCreator(RelationalMappingContext context, PartTree tree, JdbcConverter converter, Dialect dialect, + RelationalEntityMetadata entityMetadata, RelationalParameterAccessor accessor, boolean isSliceQuery, + ReturnedType returnedType, Optional lockMode) { + super(context, tree, converter, dialect, entityMetadata, accessor, isSliceQuery, returnedType, lockMode); + } + + @Override + SelectBuilder.SelectOrdered applyOrderBy(Sort sort, RelationalPersistentEntity entity, Table table, + SelectBuilder.SelectOrdered selectOrdered) { + return selectOrdered; + } + + @Override + SelectBuilder.SelectWhere applyLimitAndOffset(SelectBuilder.SelectLimitOffset limitOffsetBuilder) { + return (SelectBuilder.SelectWhere) limitOffsetBuilder; + } + + @Override + SelectBuilder.SelectLimitOffset createSelectClause(RelationalPersistentEntity entity, Table table) { + return Select.builder().select(Functions.count(Expressions.asterisk())).from(table); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcDeleteQueryCreator.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcDeleteQueryCreator.java new file mode 100644 index 0000000000..a7d187b441 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcDeleteQueryCreator.java @@ -0,0 +1,158 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; + +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.QueryMapper; +import org.springframework.data.mapping.Parameter; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Conditions; +import org.springframework.data.relational.core.sql.Delete; +import org.springframework.data.relational.core.sql.DeleteBuilder.DeleteWhere; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.SelectBuilder.SelectWhere; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.render.SqlRenderer; +import org.springframework.data.relational.repository.query.RelationalEntityMetadata; +import org.springframework.data.relational.repository.query.RelationalParameterAccessor; +import org.springframework.data.relational.repository.query.RelationalQueryCreator; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Implementation of {@link RelationalQueryCreator} that creates {@link List} of deletion {@link ParametrizedQuery} + * from a {@link PartTree}. + * + * @author Yunyoung LEE + * @author Nikita Konev + * @since 3.5 + */ +class JdbcDeleteQueryCreator extends RelationalQueryCreator> { + + private final RelationalMappingContext context; + private final QueryMapper queryMapper; + private final RelationalEntityMetadata entityMetadata; + private final RenderContextFactory renderContextFactory; + + /** + * Creates new instance of this class with the given {@link PartTree}, {@link JdbcConverter}, {@link Dialect}, + * {@link RelationalEntityMetadata} and {@link RelationalParameterAccessor}. + * + * @param context + * @param tree part tree, must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param dialect must not be {@literal null}. + * @param entityMetadata relational entity metadata, must not be {@literal null}. + * @param accessor parameter metadata provider, must not be {@literal null}. + */ + JdbcDeleteQueryCreator(RelationalMappingContext context, PartTree tree, JdbcConverter converter, Dialect dialect, + RelationalEntityMetadata entityMetadata, RelationalParameterAccessor accessor) { + + super(tree, accessor); + + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(entityMetadata, "Relational entity metadata must not be null"); + + this.context = context; + this.entityMetadata = entityMetadata; + this.queryMapper = new QueryMapper(converter); + this.renderContextFactory = new RenderContextFactory(dialect); + } + + @Override + protected List complete(@Nullable Criteria criteria, Sort sort) { + + RelationalPersistentEntity entity = entityMetadata.getTableEntity(); + Table table = Table.create(entityMetadata.getTableName()); + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + SqlContext sqlContext = new SqlContext(entity); + + Condition condition = criteria == null ? null + : queryMapper.getMappedObject(parameterSource, criteria, table, entity); + + // create select criteria query for subselect + SelectWhere selectBuilder = StatementBuilder.select(sqlContext.getIdColumn()).from(table); + Select select = condition == null ? selectBuilder.build() : selectBuilder.where(condition).build(); + + // create delete relation queries + List deleteChain = new ArrayList<>(); + deleteRelations(deleteChain, entity, select); + + // crate delete query + DeleteWhere deleteBuilder = StatementBuilder.delete(table); + Delete delete = condition == null ? deleteBuilder.build() : deleteBuilder.where(condition).build(); + + deleteChain.add(delete); + + SqlRenderer renderer = SqlRenderer.create(renderContextFactory.createRenderContext()); + + List queries = new ArrayList<>(deleteChain.size()); + for (Delete d : deleteChain) { + queries.add(new ParametrizedQuery(renderer.render(d), parameterSource)); + } + + return queries; + } + + private void deleteRelations(List deleteChain, RelationalPersistentEntity entity, Select parentSelect) { + + for (PersistentPropertyPath path : context + .findPersistentPropertyPaths(entity.getType(), p -> true)) { + + AggregatePath aggregatePath = context.getAggregatePath(path); + + // prevent duplication on recursive call + if (path.getLength() > 1 && !aggregatePath.getParentPath().isEmbedded()) { + continue; + } + + if (aggregatePath.isEntity() && !aggregatePath.isEmbedded()) { + + SqlContext sqlContext = new SqlContext(aggregatePath.getLeafEntity()); + + Condition inCondition = Conditions + .in(sqlContext.getTable().column(aggregatePath.getTableInfo().reverseColumnInfo().name()), parentSelect); + + Select select = StatementBuilder.select( // + sqlContext.getTable().column(aggregatePath.getIdDefiningParentPath().getTableInfo().idColumnName()) // + ).from(sqlContext.getTable()) // + .where(inCondition) // + .build(); + deleteRelations(deleteChain, aggregatePath.getLeafEntity(), select); + + deleteChain.add(StatementBuilder.delete(sqlContext.getTable()).where(inCondition).build()); + } + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcParameters.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcParameters.java new file mode 100755 index 0000000000..ddfb1e7431 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcParameters.java @@ -0,0 +1,99 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import java.sql.SQLType; +import java.util.List; + +import org.springframework.core.MethodParameter; +import org.springframework.data.jdbc.core.convert.JdbcColumnTypes; +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.relational.repository.query.RelationalParameters; +import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.ParametersSource; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.TypeInformation; + +/** + * Custom extension of {@link RelationalParameters}. + * + * @author Mark Paluch + * @since 3.2.6 + */ +public class JdbcParameters extends RelationalParameters { + + /** + * Creates a new {@link JdbcParameters} instance from the given {@link ParametersSource}. + * + * @param parametersSource must not be {@literal null}. + */ + public JdbcParameters(ParametersSource parametersSource) { + super(parametersSource, + methodParameter -> new JdbcParameter(methodParameter, parametersSource.getDomainTypeInformation())); + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + private JdbcParameters(List parameters) { + super((List) parameters); + } + + @Override + public JdbcParameter getParameter(int index) { + return (JdbcParameter) super.getParameter(index); + } + + @Override + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected JdbcParameters createFrom(List parameters) { + return new JdbcParameters((List) parameters); + } + + /** + * Custom {@link Parameter} implementation. + * + * @author Mark Paluch + * @author Chirag Tailor + */ + public static class JdbcParameter extends RelationalParameter { + + private final SQLType sqlType; + private final Lazy actualSqlType; + + /** + * Creates a new {@link RelationalParameter}. + * + * @param parameter must not be {@literal null}. + */ + JdbcParameter(MethodParameter parameter, TypeInformation domainType) { + super(parameter, domainType); + + TypeInformation typeInformation = getTypeInformation(); + + sqlType = JdbcUtil.targetSqlTypeFor(JdbcColumnTypes.INSTANCE.resolvePrimitiveType(typeInformation.getType())); + + actualSqlType = Lazy.of(() -> JdbcUtil + .targetSqlTypeFor(JdbcColumnTypes.INSTANCE.resolvePrimitiveType(typeInformation.getActualType().getType()))); + } + + public SQLType getSqlType() { + return sqlType; + } + + public SQLType getActualSqlType() { + return actualSqlType.get(); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryCreator.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryCreator.java new file mode 100644 index 0000000000..cc28ff2f18 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryCreator.java @@ -0,0 +1,373 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.QueryMapper; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Expressions; +import org.springframework.data.relational.core.sql.Functions; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.SelectBuilder; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.render.SqlRenderer; +import org.springframework.data.relational.repository.Lock; +import org.springframework.data.relational.repository.query.RelationalEntityMetadata; +import org.springframework.data.relational.repository.query.RelationalParameterAccessor; +import org.springframework.data.relational.repository.query.RelationalQueryCreator; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.parser.Part; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Implementation of {@link RelationalQueryCreator} that creates {@link ParametrizedQuery} from a {@link PartTree}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @author Diego Krupitza + * @since 2.0 + */ +class JdbcQueryCreator extends RelationalQueryCreator { + + private final RelationalMappingContext context; + private final PartTree tree; + private final RelationalParameterAccessor accessor; + private final QueryMapper queryMapper; + private final RelationalEntityMetadata entityMetadata; + private final RenderContextFactory renderContextFactory; + private final boolean isSliceQuery; + private final ReturnedType returnedType; + private final Optional lockMode; + + /** + * Creates new instance of this class with the given {@link PartTree}, {@link JdbcConverter}, {@link Dialect}, + * {@link RelationalEntityMetadata} and {@link RelationalParameterAccessor}. + * + * @param context the mapping context. Must not be {@literal null}. + * @param tree part tree, must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param dialect must not be {@literal null}. + * @param entityMetadata relational entity metadata, must not be {@literal null}. + * @param accessor parameter metadata provider, must not be {@literal null}. + * @param isSliceQuery flag denoting if the query returns a {@link org.springframework.data.domain.Slice}. + * @param returnedType the {@link ReturnedType} to be returned by the query. Must not be {@literal null}. + */ + JdbcQueryCreator(RelationalMappingContext context, PartTree tree, JdbcConverter converter, Dialect dialect, + RelationalEntityMetadata entityMetadata, RelationalParameterAccessor accessor, boolean isSliceQuery, + ReturnedType returnedType, Optional lockMode) { + super(tree, accessor); + + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(entityMetadata, "Relational entity metadata must not be null"); + Assert.notNull(returnedType, "ReturnedType must not be null"); + + this.context = context; + this.tree = tree; + this.accessor = accessor; + + this.entityMetadata = entityMetadata; + this.queryMapper = new QueryMapper(converter); + this.renderContextFactory = new RenderContextFactory(dialect); + this.isSliceQuery = isSliceQuery; + this.returnedType = returnedType; + this.lockMode = lockMode; + } + + /** + * Validate parameters for the derived query. Specifically checking that the query method defines scalar parameters + * and collection parameters where required and that invalid parameter declarations are rejected. + * + * @param tree the tree structure defining the predicate of the query. + * @param parameters parameters for the predicate. + */ + static void validate(PartTree tree, Parameters parameters, RelationalMappingContext context) { + + RelationalQueryCreator.validate(tree, parameters); + + for (PartTree.OrPart parts : tree) { + for (Part part : parts) { + + PersistentPropertyPath propertyPath = context + .getPersistentPropertyPath(part.getProperty()); + AggregatePath path = context.getAggregatePath(propertyPath); + + path.forEach(JdbcQueryCreator::validateProperty); + } + } + } + + private static void validateProperty(AggregatePath path) { + + if (path.isRoot()) { + return; + } + + if (!path.getParentPath().isEmbedded() && path.getLength() > 2) { + throw new IllegalArgumentException(String.format("Cannot query by nested property: %s", path.toDotPath())); + } + + if (path.isMultiValued() || path.isMap()) { + throw new IllegalArgumentException( + String.format("Cannot query by multi-valued property: %s", path.getRequiredLeafProperty().getName())); + } + + if (!path.isEmbedded() && path.isEntity()) { + throw new IllegalArgumentException(String.format("Cannot query by nested entity: %s", path.toDotPath())); + } + } + + /** + * Creates {@link ParametrizedQuery} applying the given {@link Criteria} and {@link Sort} definition. + * + * @param criteria {@link Criteria} to be applied to query + * @param sort sort option to be applied to query, must not be {@literal null}. + * @return instance of {@link ParametrizedQuery} + */ + @Override + protected ParametrizedQuery complete(@Nullable Criteria criteria, Sort sort) { + + RelationalPersistentEntity entity = entityMetadata.getTableEntity(); + Table table = Table.create(entityMetadata.getTableName()); + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + SelectBuilder.SelectLimitOffset limitOffsetBuilder = createSelectClause(entity, table); + SelectBuilder.SelectWhere whereBuilder = applyLimitAndOffset(limitOffsetBuilder); + SelectBuilder.SelectOrdered selectOrderBuilder = applyCriteria(criteria, entity, table, parameterSource, + whereBuilder); + selectOrderBuilder = applyOrderBy(sort, entity, table, selectOrderBuilder); + + SelectBuilder.BuildSelect completedBuildSelect = selectOrderBuilder; + if (this.lockMode.isPresent()) { + completedBuildSelect = selectOrderBuilder.lock(this.lockMode.get().value()); + } + + Select select = completedBuildSelect.build(); + + String sql = SqlRenderer.create(renderContextFactory.createRenderContext()).render(select); + + return new ParametrizedQuery(sql, parameterSource); + } + + SelectBuilder.SelectOrdered applyOrderBy(Sort sort, RelationalPersistentEntity entity, Table table, + SelectBuilder.SelectOrdered selectOrdered) { + + return sort.isSorted() ? // + selectOrdered.orderBy(queryMapper.getMappedSort(table, sort, entity)) // + : selectOrdered; + } + + SelectBuilder.SelectOrdered applyCriteria(@Nullable Criteria criteria, RelationalPersistentEntity entity, + Table table, MapSqlParameterSource parameterSource, SelectBuilder.SelectWhere whereBuilder) { + + return criteria != null // + ? whereBuilder.where(queryMapper.getMappedObject(parameterSource, criteria, table, entity)) // + : whereBuilder; + } + + SelectBuilder.SelectWhere applyLimitAndOffset(SelectBuilder.SelectLimitOffset limitOffsetBuilder) { + + if (tree.isExistsProjection()) { + limitOffsetBuilder = limitOffsetBuilder.limit(1); + } else if (tree.isLimiting()) { + limitOffsetBuilder = limitOffsetBuilder.limit(tree.getMaxResults()); + } + + Pageable pageable = accessor.getPageable(); + if (pageable.isPaged()) { + limitOffsetBuilder = limitOffsetBuilder.limit(isSliceQuery ? pageable.getPageSize() + 1 : pageable.getPageSize()) + .offset(pageable.getOffset()); + } + + return (SelectBuilder.SelectWhere) limitOffsetBuilder; + } + + SelectBuilder.SelectLimitOffset createSelectClause(RelationalPersistentEntity entity, Table table) { + + SelectBuilder.SelectJoin builder; + if (tree.isExistsProjection()) { + + Column idColumn = table.column(entity.getIdColumn()); + builder = Select.builder().select(idColumn).from(table); + } else if (tree.isCountProjection()) { + builder = Select.builder().select(Functions.count(Expressions.asterisk())).from(table); + } else { + builder = selectBuilder(table); + } + + return (SelectBuilder.SelectLimitOffset) builder; + } + + private SelectBuilder.SelectJoin selectBuilder(Table table) { + + List columnExpressions = new ArrayList<>(); + RelationalPersistentEntity entity = entityMetadata.getTableEntity(); + SqlContext sqlContext = new SqlContext(entity); + + List joinTables = new ArrayList<>(); + for (PersistentPropertyPath path : context + .findPersistentPropertyPaths(entity.getType(), p -> true)) { + + AggregatePath aggregatePath = context.getAggregatePath(path); + + if (returnedType.needsCustomConstruction()) { + if (!returnedType.getInputProperties().contains(aggregatePath.getRequiredBaseProperty().getName())) { + continue; + } + } + + // add a join if necessary + Join join = getJoin(sqlContext, aggregatePath); + if (join != null) { + joinTables.add(join); + } + + Column column = getColumn(sqlContext, aggregatePath); + if (column != null) { + columnExpressions.add(column); + } + } + + SelectBuilder.SelectAndFrom selectBuilder = StatementBuilder.select(columnExpressions); + SelectBuilder.SelectJoin baseSelect = selectBuilder.from(table); + + for (Join join : joinTables) { + baseSelect = baseSelect.leftOuterJoin(join.joinTable).on(join.joinColumn).equals(join.parentId); + } + + return baseSelect; + } + + /** + * Create a {@link Column} for {@link AggregatePath}. + * + * @param sqlContext + * @param path the path to the column in question. + * @return the statement as a {@link String}. Guaranteed to be not {@literal null}. + */ + @Nullable + private Column getColumn(SqlContext sqlContext, AggregatePath path) { + + // an embedded itself doesn't give an column, its members will though. + // if there is a collection or map on the path it won't get selected at all, but it will get loaded with a separate + // select + // only the parent path is considered in order to handle arrays that get stored as BINARY properly + if (path.isEmbedded() || path.getParentPath().isMultiValued()) { + return null; + } + + if (path.isEntity()) { + + // Simple entities without id include there backreference as an synthetic id in order to distinguish null entities + // from entities with only null values. + + if (path.isQualified() // + || path.isCollectionLike() // + || path.hasIdProperty() // + ) { + return null; + } + + return sqlContext.getReverseColumn(path); + } + + return sqlContext.getColumn(path); + } + + @Nullable + Join getJoin(SqlContext sqlContext, AggregatePath path) { + + if (!path.isEntity() || path.isEmbedded() || path.isMultiValued()) { + return null; + } + + Table currentTable = sqlContext.getTable(path); + + AggregatePath idDefiningParentPath = path.getIdDefiningParentPath(); + Table parentTable = sqlContext.getTable(idDefiningParentPath); + + return new Join( // + currentTable, // + currentTable.column(path.getTableInfo().reverseColumnInfo().name()), // + parentTable.column(idDefiningParentPath.getTableInfo().idColumnName()) // + ); + } + + /** + * Value object representing a {@code JOIN} association. + */ + static private final class Join { + + private final Table joinTable; + private final Column joinColumn; + private final Column parentId; + + Join(Table joinTable, Column joinColumn, Column parentId) { + + Assert.notNull(joinTable, "JoinTable must not be null"); + Assert.notNull(joinColumn, "JoinColumn must not be null"); + Assert.notNull(parentId, "ParentId must not be null"); + + this.joinTable = joinTable; + this.joinColumn = joinColumn; + this.parentId = parentId; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Join join = (Join) o; + return joinTable.equals(join.joinTable) && joinColumn.equals(join.joinColumn) && parentId.equals(join.parentId); + } + + @Override + public int hashCode() { + return Objects.hash(joinTable, joinColumn, parentId); + } + + @Override + public String toString() { + + return "Join{" + "joinTable=" + joinTable + ", joinColumn=" + joinColumn + ", parentId=" + parentId + '}'; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryExecution.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryExecution.java new file mode 100644 index 0000000000..bcebc67b5c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryExecution.java @@ -0,0 +1,83 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.DtoInstantiatingConverter; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.EntityInstantiators; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.util.Lazy; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * Interface specifying a query execution strategy. Implementations encapsulate information how to actually execute the + * query and how to process the result in order to get the desired return type. + * + * @author Mark Paluch + * @since 2.0 + */ +@FunctionalInterface +interface JdbcQueryExecution { + + /** + * Execute the given {@code query} and {@code parameter} and transforms the result into a {@code T}. + * + * @param query the query to be executed. Must not be {@literal null}. + * @param parameter the parameters to be bound to the query. Must not be {@literal null}. + * @return the result of the query. Might be {@literal null}. + */ + @Nullable + T execute(String query, SqlParameterSource parameter); + + /** + * A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}. + * + * @author Mark Paluch + * @since 2.3 + */ + class ResultProcessingConverter implements Converter { + + private final ResultProcessor processor; + private final Lazy> converter; + + ResultProcessingConverter(ResultProcessor processor, + MappingContext, ? extends RelationalPersistentProperty> mappingContext, + EntityInstantiators instantiators) { + this.processor = processor; + this.converter = Lazy.of(() -> new DtoInstantiatingConverter(processor.getReturnedType().getReturnedType(), + mappingContext, instantiators)); + } + + @Override + public Object convert(Object source) { + + ReturnedType returnedType = processor.getReturnedType(); + + if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType()) + || returnedType.getReturnedType().isInstance(source)) { + return source; + } + + return processor.processResult(source, converter.get()); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryMethod.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryMethod.java new file mode 100644 index 0000000000..7fc7c114c6 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/JdbcQueryMethod.java @@ -0,0 +1,275 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Method; +import java.util.Map; +import java.util.Optional; + +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.core.annotation.AnnotationUtils; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.repository.Lock; +import org.springframework.data.relational.repository.query.RelationalEntityMetadata; +import org.springframework.data.relational.repository.query.SimpleRelationalEntityMetadata; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.ParametersSource; +import org.springframework.data.repository.query.QueryMethod; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ConcurrentReferenceHashMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * {@link QueryMethod} implementation that implements a method by executing the query from a {@link Query} annotation on + * that method. Binds method arguments to named parameters in the SQL statement. + * + * @author Jens Schauder + * @author Kazuki Shimizu + * @author Moises Cisneros + * @author Hebert Coelho + * @author Diego Krupitza + * @author Mark Paluch + * @author Daeho Kwon + */ +public class JdbcQueryMethod extends QueryMethod { + + private final Method method; + private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; + private final Map, Optional> annotationCache; + private final NamedQueries namedQueries; + private @Nullable RelationalEntityMetadata metadata; + private final boolean modifyingQuery; + + // TODO: Remove NamedQueries and put it into JdbcQueryLookupStrategy + public JdbcQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory factory, + NamedQueries namedQueries, + MappingContext, ? extends RelationalPersistentProperty> mappingContext) { + + super(method, metadata, factory, JdbcParameters::new); + this.namedQueries = namedQueries; + this.method = method; + this.mappingContext = mappingContext; + this.annotationCache = new ConcurrentReferenceHashMap<>(); + this.modifyingQuery = AnnotationUtils.findAnnotation(method, Modifying.class) != null; + } + + @Override + @SuppressWarnings("unchecked") + public RelationalEntityMetadata getEntityInformation() { + + if (metadata == null) { + + Class returnedObjectType = getReturnedObjectType(); + Class domainClass = getDomainClass(); + + if (ClassUtils.isPrimitiveOrWrapper(returnedObjectType)) { + + this.metadata = new SimpleRelationalEntityMetadata<>((Class) domainClass, + mappingContext.getRequiredPersistentEntity(domainClass)); + + } else { + + RelationalPersistentEntity returnedEntity = mappingContext.getPersistentEntity(returnedObjectType); + RelationalPersistentEntity managedEntity = mappingContext.getRequiredPersistentEntity(domainClass); + returnedEntity = returnedEntity == null || returnedEntity.getType().isInterface() ? managedEntity + : returnedEntity; + RelationalPersistentEntity tableEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity + : managedEntity; + + this.metadata = new SimpleRelationalEntityMetadata<>((Class) returnedEntity.getType(), tableEntity); + } + } + + return this.metadata; + } + + @Override + public JdbcParameters getParameters() { + return (JdbcParameters) super.getParameters(); + } + + /** + * Returns the annotated query if it exists. + * + * @return May be {@code null}. + */ + @Nullable + String getDeclaredQuery() { + + String annotatedValue = getQueryValue(); + return StringUtils.hasText(annotatedValue) ? annotatedValue : getNamedQuery(); + } + + public String getRequiredQuery() { + + String query = getDeclaredQuery(); + + if (ObjectUtils.isEmpty(query)) { + throw new IllegalStateException(String.format("No query specified on %s", getName())); + } + + return query; + } + + /** + * Returns the annotated query if it exists. + * + * @return May be {@code null}. + */ + @Nullable + private String getQueryValue() { + return getMergedAnnotationAttribute("value"); + } + + /** + * Returns the named query for this method if it exists. + * + * @return May be {@code null}. + */ + @Nullable + private String getNamedQuery() { + + String name = getNamedQueryName(); + return this.namedQueries.hasQuery(name) ? this.namedQueries.getQuery(name) : null; + } + + /** + * @return {@literal true} if the method is annotated with {@code @Query(name=…)}. + */ + public boolean hasAnnotatedQueryName() { + return lookupQueryAnnotation() // + .map(Query::name) // + .map(StringUtils::hasText).orElse(false); + } + + @Override + public String getNamedQueryName() { + + String annotatedName = getMergedAnnotationAttribute("name"); + + return StringUtils.hasText(annotatedName) ? annotatedName : super.getNamedQueryName(); + } + + /** + * Returns the class to be used as {@link org.springframework.jdbc.core.RowMapper} + * + * @return May be {@code null}. + */ + @Nullable + Class getRowMapperClass() { + return getMergedAnnotationAttribute("rowMapperClass"); + } + + /** + * Returns the name of the bean to be used as {@link org.springframework.jdbc.core.RowMapper} + * + * @return May be {@code null}. + */ + @Nullable + String getRowMapperRef() { + return getMergedAnnotationAttribute("rowMapperRef"); + } + + /** + * Returns the class to be used as {@link org.springframework.jdbc.core.ResultSetExtractor} + * + * @return May be {@code null}. + */ + @Nullable + Class getResultSetExtractorClass() { + return getMergedAnnotationAttribute("resultSetExtractorClass"); + } + + /** + * Returns the bean name to be used as {@link org.springframework.jdbc.core.ResultSetExtractor} + * + * @return May be {@code null}. + */ + @Nullable + String getResultSetExtractorRef() { + return getMergedAnnotationAttribute("resultSetExtractorRef"); + } + + /** + * Returns whether the query method is a modifying one. + * + * @return if it's a modifying query, return {@code true}. + */ + @Override + public boolean isModifyingQuery() { + return modifyingQuery; + } + + @SuppressWarnings("unchecked") + @Nullable + private T getMergedAnnotationAttribute(String attribute) { + + Query queryAnnotation = AnnotatedElementUtils.findMergedAnnotation(method, Query.class); + return (T) AnnotationUtils.getValue(queryAnnotation, attribute); + } + + /** + * @return {@code true} if the method has an annotated query. + */ + public boolean hasAnnotatedQuery() { + return findAnnotatedQuery().isPresent(); + } + + private Optional findAnnotatedQuery() { + + return lookupQueryAnnotation() // + .map(Query::value) // + .filter(StringUtils::hasText); + } + + Optional lookupQueryAnnotation() { + return doFindAnnotation(Query.class); + } + + /** + * @return is a {@link Lock} annotation present or not. + */ + public boolean hasLockMode() { + return lookupLockAnnotation().isPresent(); + } + + /** + * Looks up the {@link Lock} annotation from the query method. + * + * @return the {@link Optional} wrapped {@link Lock} annotation. + */ + Optional lookupLockAnnotation() { + return doFindAnnotation(Lock.class); + } + + @SuppressWarnings("unchecked") + private Optional doFindAnnotation(Class annotationType) { + + return (Optional) this.annotationCache.computeIfAbsent(annotationType, + it -> Optional.ofNullable(AnnotatedElementUtils.findMergedAnnotation(method, it))); + } + +} diff --git a/src/main/java/org/springframework/data/jdbc/repository/query/Modifying.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/Modifying.java similarity index 90% rename from src/main/java/org/springframework/data/jdbc/repository/query/Modifying.java rename to spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/Modifying.java index 54c3fa0021..4d6da3df1c 100644 --- a/src/main/java/org/springframework/data/jdbc/repository/query/Modifying.java +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/Modifying.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/ParametrizedQuery.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/ParametrizedQuery.java new file mode 100644 index 0000000000..22bcb8d53d --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/ParametrizedQuery.java @@ -0,0 +1,52 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; + +/** + * Value object encapsulating a query containing named parameters and a{@link SqlParameterSource} to bind the + * parameters. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 2.0 + */ +class ParametrizedQuery { + + private final String query; + private final SqlParameterSource parameterSource; + + ParametrizedQuery(String query, SqlParameterSource parameterSource) { + + this.query = query; + this.parameterSource = parameterSource; + } + + String getQuery() { + return query; + } + + SqlParameterSource getParameterSource(Escaper escaper) { + return new EscapingParameterSource(parameterSource, escaper); + } + + @Override + public String toString() { + return this.query; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/PartTreeJdbcQuery.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/PartTreeJdbcQuery.java new file mode 100644 index 0000000000..a40056e72d --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/PartTreeJdbcQuery.java @@ -0,0 +1,326 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import static org.springframework.data.jdbc.repository.query.JdbcQueryExecution.*; + +import java.sql.ResultSet; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.function.Function; +import java.util.function.LongSupplier; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.relational.core.conversion.RelationalConverter; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.repository.query.RelationalEntityMetadata; +import org.springframework.data.relational.repository.query.RelationalParameterAccessor; +import org.springframework.data.relational.repository.query.RelationalParametersParameterAccessor; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.data.support.PageableExecutionUtils; +import org.springframework.data.util.Lazy; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * An {@link AbstractJdbcQuery} implementation based on a {@link PartTree}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Diego Krupitza + * @author Mikhail Polivakha + * @author Yunyoung LEE + * @author Nikita Konev + * @since 2.0 + */ +public class PartTreeJdbcQuery extends AbstractJdbcQuery { + + private final RelationalMappingContext context; + private final Parameters parameters; + private final Dialect dialect; + private final JdbcConverter converter; + private final CachedRowMapperFactory cachedRowMapperFactory; + private final PartTree tree; + + /** + * Creates a new {@link PartTreeJdbcQuery}. + * + * @param context must not be {@literal null}. + * @param queryMethod must not be {@literal null}. + * @param dialect must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param rowMapper must not be {@literal null}. + */ + public PartTreeJdbcQuery(RelationalMappingContext context, JdbcQueryMethod queryMethod, Dialect dialect, + JdbcConverter converter, NamedParameterJdbcOperations operations, RowMapper rowMapper) { + this(context, queryMethod, dialect, converter, operations, it -> rowMapper); + } + + /** + * Creates a new {@link PartTreeJdbcQuery}. + * + * @param context must not be {@literal null}. + * @param queryMethod must not be {@literal null}. + * @param dialect must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param rowMapperFactory must not be {@literal null}. + * @since 2.3 + */ + public PartTreeJdbcQuery(RelationalMappingContext context, JdbcQueryMethod queryMethod, Dialect dialect, + JdbcConverter converter, NamedParameterJdbcOperations operations, RowMapperFactory rowMapperFactory) { + + super(queryMethod, operations); + + Assert.notNull(context, "RelationalMappingContext must not be null"); + Assert.notNull(queryMethod, "JdbcQueryMethod must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(rowMapperFactory, "RowMapperFactory must not be null"); + + this.context = context; + this.parameters = queryMethod.getParameters(); + this.dialect = dialect; + this.converter = converter; + + this.tree = new PartTree(queryMethod.getName(), queryMethod.getResultProcessor().getReturnedType().getDomainType()); + JdbcQueryCreator.validate(this.tree, this.parameters, this.converter.getMappingContext()); + + this.cachedRowMapperFactory = new CachedRowMapperFactory(tree, rowMapperFactory, converter, + queryMethod.getResultProcessor()); + } + + private Sort getDynamicSort(RelationalParameterAccessor accessor) { + return parameters.potentiallySortsDynamically() ? accessor.getSort() : Sort.unsorted(); + } + + @Override + @Nullable + public Object execute(Object[] values) { + + RelationalParametersParameterAccessor accessor = new RelationalParametersParameterAccessor(getQueryMethod(), + values); + + if (tree.isDelete()) { + JdbcQueryExecution execution = createModifyingQueryExecutor(); + + List queries = createDeleteQueries(accessor); + Object result = null; + for (ParametrizedQuery query : queries) { + result = execution.execute(query.getQuery(), query.getParameterSource(dialect.getLikeEscaper())); + } + + return result; + } + + ResultProcessor processor = getQueryMethod().getResultProcessor().withDynamicProjection(accessor); + ParametrizedQuery query = createQuery(accessor, processor.getReturnedType()); + JdbcQueryExecution execution = getQueryExecution(processor, accessor); + + return execution.execute(query.getQuery(), query.getParameterSource(dialect.getLikeEscaper())); + } + + private JdbcQueryExecution getQueryExecution(ResultProcessor processor, + RelationalParametersParameterAccessor accessor) { + + ResultSetExtractor extractor = tree.isExistsProjection() ? (ResultSet::next) : null; + Supplier> rowMapper = parameters.hasDynamicProjection() + ? () -> cachedRowMapperFactory.getRowMapper(processor) + : cachedRowMapperFactory; + + JdbcQueryExecution queryExecution = getJdbcQueryExecution(extractor, rowMapper); + + if (getQueryMethod().isSliceQuery()) { + //noinspection unchecked + return new SliceQueryExecution<>((JdbcQueryExecution>) queryExecution, accessor.getPageable()); + } + + if (getQueryMethod().isPageQuery()) { + + //noinspection unchecked + return new PageQueryExecution<>((JdbcQueryExecution>) queryExecution, accessor.getPageable(), + () -> { + + RelationalEntityMetadata entityMetadata = getQueryMethod().getEntityInformation(); + + JdbcCountQueryCreator queryCreator = new JdbcCountQueryCreator(context, tree, converter, dialect, + entityMetadata, accessor, false, processor.getReturnedType(), getQueryMethod().lookupLockAnnotation()); + + ParametrizedQuery countQuery = queryCreator.createQuery(Sort.unsorted()); + Object count = singleObjectQuery((rs, i) -> rs.getLong(1)).execute(countQuery.getQuery(), + countQuery.getParameterSource(dialect.getLikeEscaper())); + + return converter.getConversionService().convert(count, Long.class); + }); + } + + return queryExecution; + } + + ParametrizedQuery createQuery(RelationalParametersParameterAccessor accessor, ReturnedType returnedType) { + + RelationalEntityMetadata entityMetadata = getQueryMethod().getEntityInformation(); + + JdbcQueryCreator queryCreator = new JdbcQueryCreator(context, tree, converter, dialect, entityMetadata, accessor, + getQueryMethod().isSliceQuery(), returnedType, this.getQueryMethod().lookupLockAnnotation()); + return queryCreator.createQuery(getDynamicSort(accessor)); + } + + private List createDeleteQueries(RelationalParametersParameterAccessor accessor) { + + RelationalEntityMetadata entityMetadata = getQueryMethod().getEntityInformation(); + + JdbcDeleteQueryCreator queryCreator = new JdbcDeleteQueryCreator(context, tree, converter, dialect, entityMetadata, + accessor); + return queryCreator.createQuery(); + } + + private JdbcQueryExecution getJdbcQueryExecution(@Nullable ResultSetExtractor extractor, + Supplier> rowMapper) { + + if (getQueryMethod().isPageQuery() || getQueryMethod().isSliceQuery()) { + return collectionQuery(rowMapper.get()); + } else { + + if (getQueryMethod().isModifyingQuery()) { + return createModifyingQueryExecutor(); + } else { + return createReadingQueryExecution(extractor, rowMapper); + } + } + } + + /** + * {@link JdbcQueryExecution} returning a {@link org.springframework.data.domain.Slice}. + * + * @param + */ + static class SliceQueryExecution implements JdbcQueryExecution> { + + private final JdbcQueryExecution> delegate; + private final Pageable pageable; + + public SliceQueryExecution(JdbcQueryExecution> delegate, Pageable pageable) { + this.delegate = delegate; + this.pageable = pageable; + } + + @Override + public Slice execute(String query, SqlParameterSource parameter) { + + Collection result = delegate.execute(query, parameter); + + int pageSize = 0; + if (pageable.isPaged()) { + + pageSize = pageable.getPageSize(); + } + + List resultList = result instanceof List ? (List) result : new ArrayList<>(result); + + boolean hasNext = pageable.isPaged() && resultList.size() > pageSize; + + return new SliceImpl<>(hasNext ? resultList.subList(0, pageSize) : resultList, pageable, hasNext); + } + } + + /** + * {@link JdbcQueryExecution} returning a {@link org.springframework.data.domain.Page}. + * + * @param + */ + static class PageQueryExecution implements JdbcQueryExecution> { + + private final JdbcQueryExecution> delegate; + private final Pageable pageable; + private final LongSupplier countSupplier; + + PageQueryExecution(JdbcQueryExecution> delegate, Pageable pageable, + LongSupplier countSupplier) { + this.delegate = delegate; + this.pageable = pageable; + this.countSupplier = countSupplier; + } + + @Override + public Slice execute(String query, SqlParameterSource parameter) { + + Collection result = delegate.execute(query, parameter); + + return PageableExecutionUtils.getPage(result instanceof List ? (List) result : new ArrayList<>(result), + pageable, countSupplier); + } + + } + + /** + * Cached implementation of {@link RowMapper} suppler providing either a cached variant of the RowMapper or creating a + * new one when using dynamic projections. + */ + class CachedRowMapperFactory implements Supplier> { + + private final Lazy> rowMapper; + private final Function> rowMapperFunction; + + public CachedRowMapperFactory(PartTree tree, RowMapperFactory rowMapperFactory, RelationalConverter converter, + ResultProcessor defaultResultProcessor) { + + this.rowMapperFunction = processor -> { + + if (tree.isCountProjection() || tree.isExistsProjection()) { + return rowMapperFactory.create(resolveTypeToRead(processor)); + } + Converter resultProcessingConverter = new ResultProcessingConverter(processor, + converter.getMappingContext(), converter.getEntityInstantiators()); + return new ConvertingRowMapper<>(rowMapperFactory.create(processor.getReturnedType().getDomainType()), + resultProcessingConverter); + }; + + this.rowMapper = Lazy.of(() -> this.rowMapperFunction.apply(defaultResultProcessor)); + } + + @Override + public RowMapper get() { + return getRowMapper(); + } + + public RowMapper getRowMapper() { + return rowMapper.get(); + } + + public RowMapper getRowMapper(ResultProcessor resultProcessor) { + return rowMapperFunction.apply(resultProcessor); + } + + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/Query.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/Query.java new file mode 100644 index 0000000000..ec6a693dd3 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/Query.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.QueryAnnotation; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; + +/** + * Annotation to provide SQL statements that will get used for executing the method. The SQL statement may contain named + * parameters as supported by {@link org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate}. Those + * parameters will get bound to the arguments of the annotated method. + *

+ * You can also specify the way to extract data from {@link java.sql.ResultSet}. There are 4 attribute of this + * annotation you can set to do that: + *

+ *

    + *
  1. {@link #resultSetExtractorRef()} + *
  2. {@link #resultSetExtractorClass()} + *
  3. {@link #rowMapperRef()} + *
  4. {@link #rowMapperClass()} + *
  5. + *
+ * + * The annotation attributes above are listed in their preference order, that is - the {@link #resultSetExtractorRef()}, + * has the highest privilege and, will suppress any other 3 attribute from above, and consequently {@link #rowMapperClass()} + * has the lowest privilege and will be used if any of three above are not specified. + * + * @author Jens Schauder + * @author Moises Cisneros + * @author Hebert Coelho + * @author Mikhail Polivakha + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@QueryAnnotation +@Documented +public @interface Query { + + /** + * The SQL statement to execute when the annotated method gets invoked. + */ + String value() default ""; + + /** + * The named query to be used. If not defined, the name of + * {@code ${domainClass}.${queryMethodName}} will be used. + */ + String name() default ""; + + /** + * Optional {@link RowMapper} to use to convert the result of the query to domain class instances. + */ + Class rowMapperClass() default RowMapper.class; + + /** + * Optional name of a bean of type {@link RowMapper} to use to convert the result of the query to domain class instances. + * @since 2.1 + */ + String rowMapperRef() default ""; + + /** + * Optional {@link ResultSetExtractor} to use to convert the result of the query to domain class instances. + */ + Class resultSetExtractorClass() default ResultSetExtractor.class; + + /** + * Optional name of a bean of type {@link ResultSetExtractor} to use to convert the result of the query to domain class instances. + * + * @since 2.1 + */ + String resultSetExtractorRef() default ""; +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/SqlContext.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/SqlContext.java new file mode 100644 index 0000000000..4d34666631 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/SqlContext.java @@ -0,0 +1,73 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.Table; + +/** + * Utility to get from path to SQL DSL elements. This is a temporary class and duplicates + * {@link org.springframework.data.jdbc.core.convert.SqlContext}. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Tyler Van Gorder + * @since 2.0 + */ +class SqlContext { + + private final RelationalPersistentEntity entity; + private final Table table; + + SqlContext(RelationalPersistentEntity entity) { + + this.entity = entity; + this.table = Table.create(entity.getQualifiedTableName()); + } + + Column getIdColumn() { + return table.column(entity.getIdColumn()); + } + + Column getVersionColumn() { + return table.column(entity.getRequiredVersionProperty().getColumnName()); + } + + Table getTable() { + return table; + } + + Table getTable(AggregatePath path) { + + SqlIdentifier tableAlias = path.getTableInfo().tableAlias(); + Table table = Table.create(path.getTableInfo().qualifiedTableName()); + return tableAlias == null ? table : table.as(tableAlias); + } + + Column getColumn(AggregatePath path) { + AggregatePath.ColumnInfo columnInfo = path.getColumnInfo(); + AggregatePath.ColumnInfo columnInfo1 = path.getColumnInfo(); + return getTable(path).column(columnInfo1.name()).as(columnInfo.alias()); + } + + Column getReverseColumn(AggregatePath path) { + return getTable(path).column(path.getTableInfo().reverseColumnInfo().name()) + .as(path.getTableInfo().reverseColumnInfo().alias()); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/StringBasedJdbcQuery.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/StringBasedJdbcQuery.java new file mode 100644 index 0000000000..7c4ff0d78c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/StringBasedJdbcQuery.java @@ -0,0 +1,537 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import static org.springframework.data.jdbc.repository.query.JdbcQueryExecution.*; + +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.sql.SQLType; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.beans.BeanInstantiationException; +import org.springframework.beans.BeanUtils; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.jdbc.core.convert.JdbcColumnTypes; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.repository.query.RelationalParameterAccessor; +import org.springframework.data.relational.repository.query.RelationalParametersParameterAccessor; +import org.springframework.data.repository.query.CachingValueExpressionDelegate; +import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.repository.query.ValueExpressionQueryRewriter; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.TypeInformation; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; + +/** + * A query to be executed based on a repository method, it's annotated SQL query and the arguments provided to the + * method. + * + * @author Jens Schauder + * @author Kazuki Shimizu + * @author Oliver Gierke + * @author Maciej Walkowiak + * @author Mark Paluch + * @author Hebert Coelho + * @author Chirag Tailor + * @author Christopher Klein + * @author Mikhail Polivakha + * @author Marcin Grzejszczak + * @since 2.0 + */ +public class StringBasedJdbcQuery extends AbstractJdbcQuery { + + private static final String PARAMETER_NEEDS_TO_BE_NAMED = "For queries with named parameters you need to provide names for method parameters; Use @Param for query method parameters, or use the javac flag -parameters"; + private final static String LOCKING_IS_NOT_SUPPORTED = "Currently, @Lock is supported only on derived queries. In other words, for queries created with @Query, the locking condition specified with @Lock does nothing"; + private static final Log LOG = LogFactory.getLog(StringBasedJdbcQuery.class); + private final JdbcConverter converter; + private final RowMapperFactory rowMapperFactory; + private final ValueExpressionQueryRewriter.ParsedQuery parsedQuery; + private final String query; + + private final CachedRowMapperFactory cachedRowMapperFactory; + private final CachedResultSetExtractorFactory cachedResultSetExtractorFactory; + private final ValueExpressionDelegate delegate; + + /** + * Creates a new {@link StringBasedJdbcQuery} for the given {@link JdbcQueryMethod}, {@link RelationalMappingContext} + * and {@link RowMapper}. + * + * @param queryMethod must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param defaultRowMapper can be {@literal null} (only in case of a modifying query). + * @deprecated since 3.4, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "3.4") + public StringBasedJdbcQuery(JdbcQueryMethod queryMethod, NamedParameterJdbcOperations operations, + @Nullable RowMapper defaultRowMapper, JdbcConverter converter, + QueryMethodEvaluationContextProvider evaluationContextProvider) { + this(queryMethod.getRequiredQuery(), queryMethod, operations, result -> (RowMapper) defaultRowMapper, + converter, evaluationContextProvider); + } + + /** + * Creates a new {@link StringBasedJdbcQuery} for the given {@link JdbcQueryMethod}, {@link RelationalMappingContext} + * and {@link RowMapperFactory}. + * + * @param queryMethod must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param rowMapperFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @since 2.3 + * @deprecated use alternative constructor + */ + @Deprecated(since = "3.4") + public StringBasedJdbcQuery(JdbcQueryMethod queryMethod, NamedParameterJdbcOperations operations, + RowMapperFactory rowMapperFactory, JdbcConverter converter, + QueryMethodEvaluationContextProvider evaluationContextProvider) { + this(queryMethod.getRequiredQuery(), queryMethod, operations, rowMapperFactory, converter, + evaluationContextProvider); + } + + /** + * Creates a new {@link StringBasedJdbcQuery} for the given {@link JdbcQueryMethod}, {@link RelationalMappingContext} + * and {@link RowMapperFactory}. + * + * @param queryMethod must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param rowMapperFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 3.4 + */ + public StringBasedJdbcQuery(JdbcQueryMethod queryMethod, NamedParameterJdbcOperations operations, + RowMapperFactory rowMapperFactory, JdbcConverter converter, ValueExpressionDelegate delegate) { + this(queryMethod.getRequiredQuery(), queryMethod, operations, rowMapperFactory, converter, delegate); + } + + /** + * Creates a new {@link StringBasedJdbcQuery} for the given {@link JdbcQueryMethod}, {@link RelationalMappingContext} + * and {@link RowMapperFactory}. + * + * @param query must not be {@literal null} or empty. + * @param queryMethod must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param rowMapperFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 3.4 + */ + public StringBasedJdbcQuery(String query, JdbcQueryMethod queryMethod, NamedParameterJdbcOperations operations, + RowMapperFactory rowMapperFactory, JdbcConverter converter, ValueExpressionDelegate delegate) { + super(queryMethod, operations); + Assert.hasText(query, "Query must not be null or empty"); + Assert.notNull(rowMapperFactory, "RowMapperFactory must not be null"); + + this.converter = converter; + this.rowMapperFactory = rowMapperFactory; + + if (queryMethod.isSliceQuery()) { + throw new UnsupportedOperationException( + "Slice queries are not supported using string-based queries; Offending method: " + queryMethod); + } + + if (queryMethod.isPageQuery()) { + throw new UnsupportedOperationException( + "Page queries are not supported using string-based queries; Offending method: " + queryMethod); + } + + if (queryMethod.getParameters().hasLimitParameter()) { + throw new UnsupportedOperationException( + "Queries with Limit are not supported using string-based queries; Offending method: " + queryMethod); + } + + this.cachedRowMapperFactory = new CachedRowMapperFactory( + () -> rowMapperFactory.create(queryMethod.getResultProcessor().getReturnedType().getReturnedType())); + this.cachedResultSetExtractorFactory = new CachedResultSetExtractorFactory( + this.cachedRowMapperFactory::getRowMapper); + + ValueExpressionQueryRewriter rewriter = ValueExpressionQueryRewriter.of(delegate, + (counter, expression) -> String.format("__$synthetic$__%d", counter + 1), String::concat); + + this.query = query; + + if (queryMethod.hasLockMode()) { + LOG.warn(LOCKING_IS_NOT_SUPPORTED); + } + this.parsedQuery = rewriter.parse(this.query); + this.delegate = delegate; + } + + /** + * Creates a new {@link StringBasedJdbcQuery} for the given {@link JdbcQueryMethod}, {@link RelationalMappingContext} + * and {@link RowMapperFactory}. + * + * @param query must not be {@literal null} or empty. + * @param queryMethod must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param rowMapperFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @since 3.4 + * @deprecated since 3.4, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "3.4") + public StringBasedJdbcQuery(String query, JdbcQueryMethod queryMethod, NamedParameterJdbcOperations operations, + RowMapperFactory rowMapperFactory, JdbcConverter converter, + QueryMethodEvaluationContextProvider evaluationContextProvider) { + this(query, queryMethod, operations, rowMapperFactory, converter, new CachingValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), rootObject -> evaluationContextProvider + .getEvaluationContext(queryMethod.getParameters(), new Object[] { rootObject })), + ValueExpressionParser.create())); + } + + @Override + public Object execute(Object[] objects) { + + RelationalParameterAccessor accessor = new RelationalParametersParameterAccessor(getQueryMethod(), objects); + ResultProcessor processor = getQueryMethod().getResultProcessor().withDynamicProjection(accessor); + + JdbcQueryExecution queryExecution = createJdbcQueryExecution(accessor, processor); + MapSqlParameterSource parameterMap = this.bindParameters(accessor); + + return queryExecution.execute(evaluateExpressions(objects, accessor.getBindableParameters(), parameterMap), + parameterMap); + } + + private String evaluateExpressions(Object[] objects, Parameters bindableParameters, + MapSqlParameterSource parameterMap) { + + if (parsedQuery.hasParameterBindings()) { + + ValueEvaluationContext evaluationContext = delegate.createValueContextProvider(bindableParameters) + .getEvaluationContext(objects); + + parsedQuery.getParameterMap().forEach((paramName, valueExpression) -> { + parameterMap.addValue(paramName, valueExpression.evaluate(evaluationContext)); + }); + + return parsedQuery.getQueryString(); + } + + return this.query; + } + + private JdbcQueryExecution createJdbcQueryExecution(RelationalParameterAccessor accessor, + ResultProcessor processor) { + + if (getQueryMethod().isModifyingQuery()) { + return createModifyingQueryExecutor(); + } + + Supplier> rowMapper = () -> determineRowMapper(processor, accessor.findDynamicProjection() != null); + ResultSetExtractor resultSetExtractor = determineResultSetExtractor(rowMapper); + + return createReadingQueryExecution(resultSetExtractor, rowMapper); + } + + private MapSqlParameterSource bindParameters(RelationalParameterAccessor accessor) { + + Parameters bindableParameters = accessor.getBindableParameters(); + MapSqlParameterSource parameters = new MapSqlParameterSource( + new LinkedHashMap<>(bindableParameters.getNumberOfParameters(), 1.0f)); + + for (Parameter bindableParameter : bindableParameters) { + + Object value = accessor.getBindableValue(bindableParameter.getIndex()); + String parameterName = bindableParameter.getName() + .orElseThrow(() -> new IllegalStateException(PARAMETER_NEEDS_TO_BE_NAMED)); + JdbcParameters.JdbcParameter parameter = getQueryMethod().getParameters() + .getParameter(bindableParameter.getIndex()); + + JdbcValue jdbcValue = writeValue(value, parameter.getTypeInformation(), parameter); + SQLType jdbcType = jdbcValue.getJdbcType(); + + if (jdbcType == null) { + parameters.addValue(parameterName, jdbcValue.getValue()); + } else { + parameters.addValue(parameterName, jdbcValue.getValue(), jdbcType.getVendorTypeNumber()); + } + } + + return parameters; + } + + private JdbcValue writeValue(@Nullable Object value, TypeInformation typeInformation, + JdbcParameters.JdbcParameter parameter) { + + if (value == null) { + return JdbcValue.of(value, parameter.getSqlType()); + } + + if (typeInformation.isCollectionLike() && value instanceof Collection collection) { + + TypeInformation actualType = typeInformation.getActualType(); + + // allow tuple-binding for collection of byte arrays to be used as BINARY, + // we do not want to convert to column arrays. + if (actualType != null && actualType.getType().isArray() && !actualType.getType().equals(byte[].class)) { + + TypeInformation nestedElementType = actualType.getRequiredActualType(); + return writeCollection(collection, parameter.getActualSqlType(), + array -> writeArrayValue(parameter, array, nestedElementType)); + } + + // parameter expansion + return writeCollection(collection, parameter.getActualSqlType(), + it -> converter.writeJdbcValue(it, typeInformation.getRequiredActualType(), parameter.getActualSqlType())); + } + + SQLType sqlType = parameter.getSqlType(); + return converter.writeJdbcValue(value, typeInformation, sqlType); + } + + private JdbcValue writeCollection(Collection value, SQLType defaultType, Function mapper) { + + if (value.isEmpty()) { + return JdbcValue.of(value, defaultType); + } + + JdbcValue jdbcValue; + List mapped = new ArrayList<>(value.size()); + SQLType jdbcType = null; + + for (Object o : value) { + + Object mappedValue = mapper.apply(o); + + if (mappedValue instanceof JdbcValue jv) { + if (jdbcType == null) { + jdbcType = jv.getJdbcType(); + } + mappedValue = jv.getValue(); + } + + mapped.add(mappedValue); + } + + jdbcValue = JdbcValue.of(mapped, jdbcType == null ? defaultType : jdbcType); + + return jdbcValue; + } + + private JdbcValue writeArrayValue(JdbcParameters.JdbcParameter parameter, Object array, + TypeInformation nestedElementType) { + + int length = Array.getLength(array); + Object[] mappedArray = new Object[length]; + SQLType sqlType = null; + + for (int i = 0; i < length; i++) { + + Object element = Array.get(array, i); + JdbcValue converted = converter.writeJdbcValue(element, nestedElementType, parameter.getActualSqlType()); + + if (sqlType == null && converted.getJdbcType() != null) { + sqlType = converted.getJdbcType(); + } + mappedArray[i] = converted.getValue(); + } + + if (sqlType == null) { + sqlType = JdbcUtil.targetSqlTypeFor(JdbcColumnTypes.INSTANCE.resolvePrimitiveType(nestedElementType.getType())); + } + + return JdbcValue.of(mappedArray, sqlType); + } + + RowMapper determineRowMapper(ResultProcessor resultProcessor, boolean hasDynamicProjection) { + + if (cachedRowMapperFactory.isConfiguredRowMapper()) { + return cachedRowMapperFactory.getRowMapper(); + } + + if (hasDynamicProjection) { + + RowMapper rowMapperToUse = rowMapperFactory.create(resultProcessor.getReturnedType().getDomainType()); + + ResultProcessingConverter converter = new ResultProcessingConverter(resultProcessor, + this.converter.getMappingContext(), this.converter.getEntityInstantiators()); + return new ConvertingRowMapper<>(rowMapperToUse, converter); + } + + return cachedRowMapperFactory.getRowMapper(); + } + + @Nullable + ResultSetExtractor determineResultSetExtractor(Supplier> rowMapper) { + + if (cachedResultSetExtractorFactory.isConfiguredResultSetExtractor()) { + + if (cachedResultSetExtractorFactory.requiresRowMapper() && !cachedRowMapperFactory.isConfiguredRowMapper()) { + return cachedResultSetExtractorFactory.getResultSetExtractor(rowMapper); + } + + // configured ResultSetExtractor defaults to configured RowMapper in case both are configured + return cachedResultSetExtractorFactory.getResultSetExtractor(); + } + + return null; + } + + private static boolean isUnconfigured(@Nullable Class configuredClass, Class defaultClass) { + return configuredClass == null || configuredClass == defaultClass; + } + + @Deprecated(since = "3.4") + public void setBeanFactory(BeanFactory beanFactory) {} + + class CachedRowMapperFactory { + + private final Lazy> cachedRowMapper; + private final boolean configuredRowMapper; + private final @Nullable Constructor constructor; + + @SuppressWarnings("unchecked") + public CachedRowMapperFactory(Supplier> defaultMapper) { + + String rowMapperRef = getQueryMethod().getRowMapperRef(); + Class rowMapperClass = getQueryMethod().getRowMapperClass(); + + if (!ObjectUtils.isEmpty(rowMapperRef) && !isUnconfigured(rowMapperClass, RowMapper.class)) { + throw new IllegalArgumentException( + "Invalid RowMapper configuration. Configure either one but not both via @Query(rowMapperRef = …, rowMapperClass = …) for query method " + + getQueryMethod()); + } + + this.configuredRowMapper = !ObjectUtils.isEmpty(rowMapperRef) || !isUnconfigured(rowMapperClass, RowMapper.class); + this.constructor = rowMapperClass != null ? findPrimaryConstructor(rowMapperClass) : null; + this.cachedRowMapper = Lazy.of(() -> { + + if (!ObjectUtils.isEmpty(rowMapperRef)) { + return rowMapperFactory.getRowMapper(rowMapperRef); + } + + if (isUnconfigured(rowMapperClass, RowMapper.class)) { + return defaultMapper.get(); + } + + return (RowMapper) BeanUtils.instantiateClass(constructor); + }); + } + + public boolean isConfiguredRowMapper() { + return configuredRowMapper; + } + + public RowMapper getRowMapper() { + return cachedRowMapper.get(); + } + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + class CachedResultSetExtractorFactory { + + private final Lazy> cachedResultSetExtractor; + private final boolean configuredResultSetExtractor; + private final @Nullable Constructor rowMapperConstructor; + private final @Nullable Constructor constructor; + private final Function>, ResultSetExtractor> resultSetExtractorFactory; + + public CachedResultSetExtractorFactory(Supplier> resultSetExtractor) { + + String resultSetExtractorRef = getQueryMethod().getResultSetExtractorRef(); + Class resultSetExtractorClass = getQueryMethod().getResultSetExtractorClass(); + + if (!ObjectUtils.isEmpty(resultSetExtractorRef) + && !isUnconfigured(resultSetExtractorClass, ResultSetExtractor.class)) { + throw new IllegalArgumentException( + "Invalid ResultSetExtractor configuration. Configure either one but not both via @Query(resultSetExtractorRef = …, resultSetExtractorClass = …) for query method " + + getQueryMethod()); + } + + this.configuredResultSetExtractor = !ObjectUtils.isEmpty(resultSetExtractorRef) + || !isUnconfigured(resultSetExtractorClass, ResultSetExtractor.class); + + this.rowMapperConstructor = resultSetExtractorClass != null + ? ClassUtils.getConstructorIfAvailable(resultSetExtractorClass, RowMapper.class) + : null; + this.constructor = resultSetExtractorClass != null ? findPrimaryConstructor(resultSetExtractorClass) : null; + this.resultSetExtractorFactory = rowMapper -> { + + if (!ObjectUtils.isEmpty(resultSetExtractorRef)) { + return rowMapperFactory.getResultSetExtractor(resultSetExtractorRef); + } + + if (isUnconfigured(resultSetExtractorClass, ResultSetExtractor.class)) { + throw new UnsupportedOperationException("This should not happen"); + } + + if (rowMapperConstructor != null) { + return BeanUtils.instantiateClass(rowMapperConstructor, rowMapper.get()); + } + + return BeanUtils.instantiateClass(constructor); + }; + + this.cachedResultSetExtractor = Lazy.of(() -> resultSetExtractorFactory.apply(resultSetExtractor)); + } + + public boolean isConfiguredResultSetExtractor() { + return configuredResultSetExtractor; + } + + public ResultSetExtractor getResultSetExtractor() { + return cachedResultSetExtractor.get(); + } + + public ResultSetExtractor getResultSetExtractor(Supplier> rowMapperSupplier) { + return resultSetExtractorFactory.apply(rowMapperSupplier); + } + + public boolean requiresRowMapper() { + return rowMapperConstructor != null; + } + } + + @Nullable + static Constructor findPrimaryConstructor(Class clazz) { + try { + return clazz.getDeclaredConstructor(); + } catch (NoSuchMethodException ex) { + return BeanUtils.findPrimaryConstructor(clazz); + + } catch (LinkageError err) { + throw new BeanInstantiationException(clazz, "Unresolvable class definition", err); + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/package-info.java new file mode 100644 index 0000000000..96d41b6bb5 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/query/package-info.java @@ -0,0 +1,7 @@ +/** + * Query derivation mechanism for JDBC specific repositories. + */ +@NonNullApi +package org.springframework.data.jdbc.repository.query; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/FetchableFluentQueryByExample.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/FetchableFluentQueryByExample.java new file mode 100644 index 0000000000..fe07f07588 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/FetchableFluentQueryByExample.java @@ -0,0 +1,172 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; +import java.util.function.UnaryOperator; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.springframework.data.domain.Example; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.jdbc.core.JdbcAggregateOperations; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.repository.query.RelationalExampleMapper; +import org.springframework.util.Assert; + +/** + * {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} using {@link Example}. + * + * @author Diego Krupitza + * @author Mark Paluch + * @since 3.0 + */ +class FetchableFluentQueryByExample extends FluentQuerySupport { + + private final RelationalExampleMapper exampleMapper; + private final JdbcAggregateOperations entityOperations; + + FetchableFluentQueryByExample(Example example, Class resultType, RelationalExampleMapper exampleMapper, + JdbcAggregateOperations entityOperations) { + this(example, Sort.unsorted(), 0, resultType, Collections.emptyList(), exampleMapper, entityOperations); + } + + FetchableFluentQueryByExample(Example example, Sort sort, int limit, Class resultType, + List fieldsToInclude, RelationalExampleMapper exampleMapper, JdbcAggregateOperations entityOperations) { + + super(example, sort, limit, resultType, fieldsToInclude); + + this.exampleMapper = exampleMapper; + this.entityOperations = entityOperations; + } + + @Override + public R oneValue() { + + return this.entityOperations.findOne(createQuery(), getExampleType()) + .map(item -> this.getConversionFunction().apply(item)).get(); + } + + @Override + public R firstValue() { + + return this.getConversionFunction() + .apply(this.entityOperations.findAll(createQuery().sort(getSort()), getExampleType()).iterator().next()); + } + + @Override + public List all() { + return findAll(createQuery().sort(getSort())); + } + + private List findAll(Query query) { + + Function conversionFunction = this.getConversionFunction(); + Iterable raw = this.entityOperations.findAll(query, getExampleType()); + + List result = new ArrayList<>(raw instanceof Collections ? ((Collection) raw).size() : 16); + + for (S s : raw) { + result.add(conversionFunction.apply(s)); + } + + return result; + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + + Assert.notNull(scrollPosition, "ScrollPosition must not be null"); + + if (scrollPosition instanceof OffsetScrollPosition osp) { + + Query query = createQuery().sort(getSort()); + + if (!osp.isInitial()) { + query = query.offset(osp.getOffset() + 1); + } + + if (getLimit() > 0) { + query = query.limit(getLimit()); + } + + return ScrollDelegate.scroll(query, this::findAll, osp); + } + + return super.scroll(scrollPosition); + } + + @Override + public Page page(Pageable pageable) { + + return this.entityOperations.findAll(createQuery(p -> p.with(pageable)), getExampleType(), pageable) + .map(item -> this.getConversionFunction().apply(item)); + } + + @Override + public Stream stream() { + + return StreamSupport + .stream(this.entityOperations.findAll(createQuery().sort(getSort()), getExampleType()).spliterator(), false) + .map(item -> this.getConversionFunction().apply(item)); + } + + @Override + public long count() { + return this.entityOperations.count(createQuery(), getExampleType()); + } + + @Override + public boolean exists() { + return this.entityOperations.exists(createQuery(), getExampleType()); + } + + private Query createQuery() { + return createQuery(UnaryOperator.identity()); + } + + private Query createQuery(UnaryOperator queryCustomizer) { + + Query query = exampleMapper.getMappedExample(getExample()); + + if (!getFieldsToInclude().isEmpty()) { + query = query.columns(getFieldsToInclude().toArray(new String[0])); + } + + query = query.limit(getLimit()); + + query = queryCustomizer.apply(query); + + return query; + } + + @Override + protected FluentQuerySupport create(Example example, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + + return new FetchableFluentQueryByExample<>(example, sort, limit, resultType, fieldsToInclude, this.exampleMapper, + this.entityOperations); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/FluentQuerySupport.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/FluentQuerySupport.java new file mode 100644 index 0000000000..1b96b3c668 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/FluentQuerySupport.java @@ -0,0 +1,131 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.function.Function; + +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Sort; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +/** + * Support class for {@link FluentQuery.FetchableFluentQuery} implementations. + * + * @author Diego Krupitza + * @author Mark Paluch + * @since 3.0 + */ +abstract class FluentQuerySupport implements FluentQuery.FetchableFluentQuery { + + private final Example example; + private final Sort sort; + private final int limit; + private final Class resultType; + private final List fieldsToInclude; + + private final SpelAwareProxyProjectionFactory projectionFactory = new SpelAwareProxyProjectionFactory(); + + FluentQuerySupport(Example example, Sort sort, int limit, Class resultType, List fieldsToInclude) { + + this.example = example; + this.sort = sort; + this.limit = limit; + this.resultType = resultType; + this.fieldsToInclude = fieldsToInclude; + } + + @Override + public FetchableFluentQuery sortBy(Sort sort) { + + Assert.notNull(sort, "Sort must not be null!"); + + return create(example, sort, limit, resultType, fieldsToInclude); + } + + @Override + public FetchableFluentQuery limit(int limit) { + + Assert.isTrue(limit >= 0, "Limit must not be negative"); + + return create(example, sort, limit, resultType, fieldsToInclude); + } + + @Override + public FetchableFluentQuery as(Class projection) { + + Assert.notNull(projection, "Projection target type must not be null!"); + + return create(example, sort, limit, projection, fieldsToInclude); + } + + @Override + public FetchableFluentQuery project(Collection properties) { + + Assert.notNull(properties, "Projection properties must not be null!"); + + return create(example, sort, limit, resultType, new ArrayList<>(properties)); + } + + protected abstract FluentQuerySupport create(Example example, Sort sort, int limit, Class resultType, + List fieldsToInclude); + + Class getExampleType() { + return this.example.getProbeType(); + } + + Example getExample() { + return this.example; + } + + Sort getSort() { + return sort; + } + + int getLimit() { + return limit; + } + + Class getResultType() { + return resultType; + } + + List getFieldsToInclude() { + return fieldsToInclude; + } + + private Function getConversionFunction(Class inputType, Class targetType) { + + if (targetType.isAssignableFrom(inputType)) { + return (Function) Function.identity(); + } + + if (targetType.isInterface()) { + return o -> projectionFactory.createProjection(targetType, o); + } + + return o -> DefaultConversionService.getSharedInstance().convert(o, targetType); + } + + protected Function getConversionFunction() { + return getConversionFunction(this.example.getProbeType(), getResultType()); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategy.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategy.java new file mode 100644 index 0000000000..fee40edb19 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategy.java @@ -0,0 +1,381 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.lang.reflect.Method; +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.jdbc.core.convert.EntityRowMapper; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.data.jdbc.repository.query.AbstractJdbcQuery; +import org.springframework.data.jdbc.repository.query.JdbcQueryMethod; +import org.springframework.data.jdbc.repository.query.PartTreeJdbcQuery; +import org.springframework.data.jdbc.repository.query.StringBasedJdbcQuery; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.event.AfterConvertCallback; +import org.springframework.data.relational.core.mapping.event.AfterConvertEvent; +import org.springframework.data.relational.repository.support.RelationalQueryLookupStrategy; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.SingleColumnRowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Abstract {@link QueryLookupStrategy} for JDBC repositories. + * + * @author Jens Schauder + * @author Kazuki Shimizu + * @author Oliver Gierke + * @author Mark Paluch + * @author Maciej Walkowiak + * @author Moises Cisneros + * @author Hebert Coelho + * @author Diego Krupitza + * @author Christopher Klein + */ +abstract class JdbcQueryLookupStrategy extends RelationalQueryLookupStrategy { + + private static final Log LOG = LogFactory.getLog(JdbcQueryLookupStrategy.class); + + private final ApplicationEventPublisher publisher; + private final RelationalMappingContext context; + private final @Nullable EntityCallbacks callbacks; + private final JdbcConverter converter; + private final QueryMappingConfiguration queryMappingConfiguration; + private final NamedParameterJdbcOperations operations; + protected final ValueExpressionDelegate delegate; + + JdbcQueryLookupStrategy(ApplicationEventPublisher publisher, @Nullable EntityCallbacks callbacks, + RelationalMappingContext context, JdbcConverter converter, Dialect dialect, + QueryMappingConfiguration queryMappingConfiguration, NamedParameterJdbcOperations operations, + ValueExpressionDelegate delegate) { + + super(context, dialect); + + Assert.notNull(publisher, "ApplicationEventPublisher must not be null"); + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(queryMappingConfiguration, "QueryMappingConfiguration must not be null"); + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + Assert.notNull(delegate, "ValueExpressionDelegate must not be null"); + + this.context = context; + this.publisher = publisher; + this.callbacks = callbacks; + this.converter = converter; + this.queryMappingConfiguration = queryMappingConfiguration; + this.operations = operations; + this.delegate = delegate; + } + + public RelationalMappingContext getMappingContext() { + return context; + } + + /** + * {@link QueryLookupStrategy} to create a query from the method name. + * + * @author Diego Krupitza + * @since 2.4 + */ + static class CreateQueryLookupStrategy extends JdbcQueryLookupStrategy { + + CreateQueryLookupStrategy(ApplicationEventPublisher publisher, @Nullable EntityCallbacks callbacks, + RelationalMappingContext context, JdbcConverter converter, Dialect dialect, + QueryMappingConfiguration queryMappingConfiguration, NamedParameterJdbcOperations operations, + ValueExpressionDelegate delegate) { + + super(publisher, callbacks, context, converter, dialect, queryMappingConfiguration, operations, + delegate); + } + + @Override + public RepositoryQuery resolveQuery(Method method, RepositoryMetadata repositoryMetadata, + ProjectionFactory projectionFactory, NamedQueries namedQueries) { + + JdbcQueryMethod queryMethod = getJdbcQueryMethod(method, repositoryMetadata, projectionFactory, namedQueries); + + return new PartTreeJdbcQuery(getMappingContext(), queryMethod, getDialect(), getConverter(), getOperations(), + this::createMapper); + } + } + + /** + * {@link QueryLookupStrategy} that tries to detect a declared query declared via + * {@link org.springframework.data.jdbc.repository.query.Query} annotation followed by a JPA named query lookup. + * + * @author Diego Krupitza + * @since 2.4 + */ + static class DeclaredQueryLookupStrategy extends JdbcQueryLookupStrategy { + + private final AbstractJdbcQuery.RowMapperFactory rowMapperFactory; + + DeclaredQueryLookupStrategy(ApplicationEventPublisher publisher, @Nullable EntityCallbacks callbacks, + RelationalMappingContext context, JdbcConverter converter, Dialect dialect, + QueryMappingConfiguration queryMappingConfiguration, NamedParameterJdbcOperations operations, + @Nullable BeanFactory beanfactory, ValueExpressionDelegate delegate) { + super(publisher, callbacks, context, converter, dialect, queryMappingConfiguration, operations, + delegate); + + this.rowMapperFactory = new BeanFactoryRowMapperFactory(beanfactory); + } + + @Override + public RepositoryQuery resolveQuery(Method method, RepositoryMetadata repositoryMetadata, + ProjectionFactory projectionFactory, NamedQueries namedQueries) { + + JdbcQueryMethod queryMethod = getJdbcQueryMethod(method, repositoryMetadata, projectionFactory, namedQueries); + + if (namedQueries.hasQuery(queryMethod.getNamedQueryName()) || queryMethod.hasAnnotatedQuery()) { + + if (queryMethod.hasAnnotatedQuery() && queryMethod.hasAnnotatedQueryName()) { + LOG.warn(String.format( + "Query method %s is annotated with both, a query and a query name; Using the declared query", method)); + } + + String queryString = evaluateTableExpressions(repositoryMetadata, queryMethod.getRequiredQuery()); + + return new StringBasedJdbcQuery(queryString, queryMethod, getOperations(), rowMapperFactory, getConverter(), + delegate); + } + + throw new IllegalStateException( + String.format("Did neither find a NamedQuery nor an annotated query for method %s", method)); + } + + @SuppressWarnings("unchecked") + private class BeanFactoryRowMapperFactory implements AbstractJdbcQuery.RowMapperFactory { + + private final @Nullable BeanFactory beanFactory; + + BeanFactoryRowMapperFactory(@Nullable BeanFactory beanFactory) { + this.beanFactory = beanFactory; + } + + @Override + public RowMapper create(Class result) { + return createMapper(result); + } + + @Override + public RowMapper getRowMapper(String reference) { + + if (beanFactory == null) { + throw new IllegalStateException( + "Cannot resolve RowMapper bean reference '" + reference + "'; BeanFactory is not configured."); + } + + return beanFactory.getBean(reference, RowMapper.class); + } + + @Override + public ResultSetExtractor getResultSetExtractor(String reference) { + + if (beanFactory == null) { + throw new IllegalStateException( + "Cannot resolve ResultSetExtractor bean reference '" + reference + "'; BeanFactory is not configured."); + } + + return beanFactory.getBean(reference, ResultSetExtractor.class); + } + } + + } + + /** + * {@link QueryLookupStrategy} to try to detect a declared query first ( + * {@link org.springframework.data.jdbc.repository.query.Query}, JDBC named query). In case none is found we fall back + * on query creation. + * + * @author Diego Krupitza + * @since 2.4 + */ + static class CreateIfNotFoundQueryLookupStrategy extends JdbcQueryLookupStrategy { + + private final DeclaredQueryLookupStrategy lookupStrategy; + private final CreateQueryLookupStrategy createStrategy; + + /** + * Creates a new {@link CreateIfNotFoundQueryLookupStrategy}. + * + * @param createStrategy must not be {@literal null}. + * @param lookupStrategy must not be {@literal null}. + */ + CreateIfNotFoundQueryLookupStrategy(ApplicationEventPublisher publisher, @Nullable EntityCallbacks callbacks, + RelationalMappingContext context, JdbcConverter converter, Dialect dialect, + QueryMappingConfiguration queryMappingConfiguration, NamedParameterJdbcOperations operations, + CreateQueryLookupStrategy createStrategy, + DeclaredQueryLookupStrategy lookupStrategy, ValueExpressionDelegate delegate) { + + super(publisher, callbacks, context, converter, dialect, queryMappingConfiguration, operations, + delegate); + + Assert.notNull(createStrategy, "CreateQueryLookupStrategy must not be null"); + Assert.notNull(lookupStrategy, "DeclaredQueryLookupStrategy must not be null"); + + this.createStrategy = createStrategy; + this.lookupStrategy = lookupStrategy; + } + + @Override + public RepositoryQuery resolveQuery(Method method, RepositoryMetadata repositoryMetadata, + ProjectionFactory projectionFactory, NamedQueries namedQueries) { + + try { + return lookupStrategy.resolveQuery(method, repositoryMetadata, projectionFactory, namedQueries); + } catch (IllegalStateException e) { + return createStrategy.resolveQuery(method, repositoryMetadata, projectionFactory, namedQueries); + } + } + } + + /** + * Creates a {@link JdbcQueryMethod} based on the parameters + */ + JdbcQueryMethod getJdbcQueryMethod(Method method, RepositoryMetadata repositoryMetadata, + ProjectionFactory projectionFactory, NamedQueries namedQueries) { + return new JdbcQueryMethod(method, repositoryMetadata, projectionFactory, namedQueries, getMappingContext()); + } + + /** + * Creates a {@link QueryLookupStrategy} based on the provided + * {@link org.springframework.data.repository.query.QueryLookupStrategy.Key}. + * + * @param key the key that decides what {@link QueryLookupStrategy} shozld be used. + * @param publisher must not be {@literal null} + * @param callbacks may be {@literal null} + * @param context must not be {@literal null} + * @param converter must not be {@literal null} + * @param dialect must not be {@literal null} + * @param queryMappingConfiguration must not be {@literal null} + * @param operations must not be {@literal null} + * @param beanFactory may be {@literal null} + */ + public static QueryLookupStrategy create(@Nullable Key key, ApplicationEventPublisher publisher, + @Nullable EntityCallbacks callbacks, RelationalMappingContext context, JdbcConverter converter, Dialect dialect, + QueryMappingConfiguration queryMappingConfiguration, NamedParameterJdbcOperations operations, + @Nullable BeanFactory beanFactory, ValueExpressionDelegate delegate) { + Assert.notNull(publisher, "ApplicationEventPublisher must not be null"); + Assert.notNull(context, "RelationalMappingContextPublisher must not be null"); + Assert.notNull(converter, "JdbcConverter must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(queryMappingConfiguration, "QueryMappingConfiguration must not be null"); + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + Assert.notNull(delegate, "ValueExpressionDelegate must not be null"); + + CreateQueryLookupStrategy createQueryLookupStrategy = new CreateQueryLookupStrategy(publisher, callbacks, context, + converter, dialect, queryMappingConfiguration, operations, delegate); + + DeclaredQueryLookupStrategy declaredQueryLookupStrategy = new DeclaredQueryLookupStrategy(publisher, callbacks, + context, converter, dialect, queryMappingConfiguration, operations, beanFactory, delegate); + + Key keyToUse = key != null ? key : Key.CREATE_IF_NOT_FOUND; + + LOG.debug(String.format("Using the queryLookupStrategy %s", keyToUse)); + + switch (keyToUse) { + case CREATE: + return createQueryLookupStrategy; + case USE_DECLARED_QUERY: + return declaredQueryLookupStrategy; + case CREATE_IF_NOT_FOUND: + return new CreateIfNotFoundQueryLookupStrategy(publisher, callbacks, context, converter, dialect, + queryMappingConfiguration, operations, createQueryLookupStrategy, declaredQueryLookupStrategy, + delegate); + default: + throw new IllegalArgumentException(String.format("Unsupported query lookup strategy %s", key)); + } + } + + JdbcConverter getConverter() { + return converter; + } + + NamedParameterJdbcOperations getOperations() { + return operations; + } + + @SuppressWarnings("unchecked") + RowMapper createMapper(Class returnedObjectType) { + + RelationalPersistentEntity persistentEntity = getMappingContext().getPersistentEntity(returnedObjectType); + + if (persistentEntity == null) { + return (RowMapper) SingleColumnRowMapper.newInstance(returnedObjectType, + converter.getConversionService()); + } + + return (RowMapper) determineDefaultMapper(returnedObjectType); + } + + private RowMapper determineDefaultMapper(Class returnedObjectType) { + + RowMapper configuredQueryMapper = queryMappingConfiguration.getRowMapper(returnedObjectType); + + if (configuredQueryMapper != null) + return configuredQueryMapper; + + EntityRowMapper defaultEntityRowMapper = new EntityRowMapper<>( // + getMappingContext().getRequiredPersistentEntity(returnedObjectType), // + converter // + ); + + return new PostProcessingRowMapper<>(defaultEntityRowMapper); + } + + class PostProcessingRowMapper implements RowMapper { + + private final RowMapper delegate; + + PostProcessingRowMapper(RowMapper delegate) { + this.delegate = delegate; + } + + @Override + public T mapRow(ResultSet rs, int rowNum) throws SQLException { + + T entity = delegate.mapRow(rs, rowNum); + + if (entity != null) { + + publisher.publishEvent(new AfterConvertEvent<>(entity)); + + if (callbacks != null) { + return callbacks.callback(AfterConvertCallback.class, entity); + } + } + + return entity; + } + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactory.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactory.java new file mode 100644 index 0000000000..e687e9a149 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactory.java @@ -0,0 +1,160 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.util.Optional; + +import org.springframework.beans.factory.BeanFactory; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.jdbc.core.JdbcAggregateTemplate; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.repository.core.EntityInformation; +import org.springframework.data.repository.core.RepositoryInformation; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.PersistentEntityInformation; +import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.data.repository.query.CachingValueExpressionDelegate; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Creates repository implementation based on JDBC. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Christoph Strobl + * @author Mark Paluch + * @author Hebert Coelho + * @author Diego Krupitza + * @author Christopher Klein + * @author Marcin Grzejszczak + */ +public class JdbcRepositoryFactory extends RepositoryFactorySupport { + + private final RelationalMappingContext context; + private final JdbcConverter converter; + private final ApplicationEventPublisher publisher; + private final DataAccessStrategy accessStrategy; + private final NamedParameterJdbcOperations operations; + private final Dialect dialect; + private @Nullable BeanFactory beanFactory; + + private QueryMappingConfiguration queryMappingConfiguration = QueryMappingConfiguration.EMPTY; + private EntityCallbacks entityCallbacks; + + /** + * Creates a new {@link JdbcRepositoryFactory} for the given {@link DataAccessStrategy}, + * {@link RelationalMappingContext} and {@link ApplicationEventPublisher}. + * + * @param dataAccessStrategy must not be {@literal null}. + * @param context must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param dialect must not be {@literal null}. + * @param publisher must not be {@literal null}. + * @param operations must not be {@literal null}. + */ + public JdbcRepositoryFactory(DataAccessStrategy dataAccessStrategy, RelationalMappingContext context, + JdbcConverter converter, Dialect dialect, ApplicationEventPublisher publisher, + NamedParameterJdbcOperations operations) { + + Assert.notNull(dataAccessStrategy, "DataAccessStrategy must not be null"); + Assert.notNull(context, "RelationalMappingContext must not be null"); + Assert.notNull(converter, "RelationalConverter must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(publisher, "ApplicationEventPublisher must not be null"); + + this.publisher = publisher; + this.context = context; + this.converter = converter; + this.dialect = dialect; + this.accessStrategy = dataAccessStrategy; + this.operations = operations; + } + + /** + * @param queryMappingConfiguration must not be {@literal null} consider {@link QueryMappingConfiguration#EMPTY} + * instead. + */ + public void setQueryMappingConfiguration(QueryMappingConfiguration queryMappingConfiguration) { + + Assert.notNull(queryMappingConfiguration, "QueryMappingConfiguration must not be null"); + + this.queryMappingConfiguration = queryMappingConfiguration; + } + + @SuppressWarnings("unchecked") + @Override + public EntityInformation getEntityInformation(Class aClass) { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(aClass); + + return (EntityInformation) new PersistentEntityInformation<>(entity); + } + + @Override + protected Object getTargetRepository(RepositoryInformation repositoryInformation) { + + JdbcAggregateTemplate template = new JdbcAggregateTemplate(publisher, context, converter, accessStrategy); + + if (entityCallbacks != null) { + template.setEntityCallbacks(entityCallbacks); + } + + RelationalPersistentEntity persistentEntity = context + .getRequiredPersistentEntity(repositoryInformation.getDomainType()); + + return getTargetRepositoryViaReflection(repositoryInformation, template, persistentEntity, + converter); + } + + @Override + protected Class getRepositoryBaseClass(RepositoryMetadata repositoryMetadata) { + return SimpleJdbcRepository.class; + } + + @Override + protected Optional getQueryLookupStrategy(@Nullable QueryLookupStrategy.Key key, + ValueExpressionDelegate valueExpressionDelegate) { + return Optional.of(JdbcQueryLookupStrategy.create(key, publisher, entityCallbacks, context, converter, dialect, + queryMappingConfiguration, operations, beanFactory, + new CachingValueExpressionDelegate(valueExpressionDelegate))); + } + + /** + * @param entityCallbacks + * @since 1.1 + */ + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + this.entityCallbacks = entityCallbacks; + } + + /** + * @param beanFactory the {@link BeanFactory} used for looking up {@link org.springframework.jdbc.core.RowMapper} and + * {@link org.springframework.jdbc.core.ResultSetExtractor} beans. + */ + public void setBeanFactory(@Nullable BeanFactory beanFactory) { + this.beanFactory = beanFactory; + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBean.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBean.java new file mode 100644 index 0000000000..db05fe9d85 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBean.java @@ -0,0 +1,200 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.io.Serializable; + +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationEventPublisherAware; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.DataAccessStrategyFactory; +import org.springframework.data.jdbc.core.convert.InsertStrategyFactory; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.SqlGeneratorSource; +import org.springframework.data.jdbc.core.convert.SqlParametersFactory; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.data.repository.core.support.TransactionalRepositoryFactoryBeanSupport; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.util.Assert; + +/** + * Special adapter for Springs {@link org.springframework.beans.factory.FactoryBean} interface to allow easy setup of + * repository factories via Spring configuration. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Christoph Strobl + * @author Oliver Gierke + * @author Mark Paluch + * @author Hebert Coelho + * @author Chirag Tailor + */ +public class JdbcRepositoryFactoryBean, S, ID extends Serializable> + extends TransactionalRepositoryFactoryBeanSupport implements ApplicationEventPublisherAware { + + private ApplicationEventPublisher publisher; + private BeanFactory beanFactory; + private RelationalMappingContext mappingContext; + private JdbcConverter converter; + private DataAccessStrategy dataAccessStrategy; + private QueryMappingConfiguration queryMappingConfiguration = QueryMappingConfiguration.EMPTY; + private NamedParameterJdbcOperations operations; + private EntityCallbacks entityCallbacks; + private Dialect dialect; + + /** + * Creates a new {@link JdbcRepositoryFactoryBean} for the given repository interface. + * + * @param repositoryInterface must not be {@literal null}. + */ + public JdbcRepositoryFactoryBean(Class repositoryInterface) { + super(repositoryInterface); + } + + @Override + public void setApplicationEventPublisher(ApplicationEventPublisher publisher) { + + super.setApplicationEventPublisher(publisher); + + this.publisher = publisher; + } + + /** + * Creates the actual {@link RepositoryFactorySupport} instance. + */ + @Override + protected RepositoryFactorySupport doCreateRepositoryFactory() { + + JdbcRepositoryFactory jdbcRepositoryFactory = new JdbcRepositoryFactory(dataAccessStrategy, mappingContext, + converter, dialect, publisher, operations); + jdbcRepositoryFactory.setQueryMappingConfiguration(queryMappingConfiguration); + jdbcRepositoryFactory.setEntityCallbacks(entityCallbacks); + jdbcRepositoryFactory.setBeanFactory(beanFactory); + + return jdbcRepositoryFactory; + } + + public void setMappingContext(RelationalMappingContext mappingContext) { + + Assert.notNull(mappingContext, "MappingContext must not be null"); + + super.setMappingContext(mappingContext); + this.mappingContext = mappingContext; + } + + public void setDialect(Dialect dialect) { + + Assert.notNull(dialect, "Dialect must not be null"); + + this.dialect = dialect; + } + + /** + * @param dataAccessStrategy can be {@literal null}. + */ + public void setDataAccessStrategy(DataAccessStrategy dataAccessStrategy) { + + Assert.notNull(dataAccessStrategy, "DataAccessStrategy must not be null"); + + this.dataAccessStrategy = dataAccessStrategy; + } + + /** + * @param queryMappingConfiguration can be {@literal null}. {@link #afterPropertiesSet()} defaults to + * {@link QueryMappingConfiguration#EMPTY} if {@literal null}. + */ + @Autowired(required = false) + public void setQueryMappingConfiguration(QueryMappingConfiguration queryMappingConfiguration) { + + Assert.notNull(queryMappingConfiguration, "QueryMappingConfiguration must not be null"); + + this.queryMappingConfiguration = queryMappingConfiguration; + } + + public void setJdbcOperations(NamedParameterJdbcOperations operations) { + + Assert.notNull(operations, "NamedParameterJdbcOperations must not be null"); + + this.operations = operations; + } + + public void setConverter(JdbcConverter converter) { + + Assert.notNull(converter, "JdbcConverter must not be null"); + + this.converter = converter; + } + + @Override + public void setBeanFactory(BeanFactory beanFactory) { + + super.setBeanFactory(beanFactory); + + this.beanFactory = beanFactory; + } + + @Override + public void afterPropertiesSet() { + + Assert.state(this.mappingContext != null, "MappingContext is required and must not be null"); + Assert.state(this.converter != null, "RelationalConverter is required and must not be null"); + + if (this.operations == null) { + + Assert.state(beanFactory != null, "If no JdbcOperations are set a BeanFactory must be available"); + + this.operations = beanFactory.getBean(NamedParameterJdbcOperations.class); + } + + if (this.dataAccessStrategy == null) { + + Assert.state(beanFactory != null, "If no DataAccessStrategy is set a BeanFactory must be available"); + + this.dataAccessStrategy = this.beanFactory.getBeanProvider(DataAccessStrategy.class) // + .getIfAvailable(() -> { + + Assert.state(this.dialect != null, "Dialect is required and must not be null"); + + SqlGeneratorSource sqlGeneratorSource = new SqlGeneratorSource(this.mappingContext, this.converter, + this.dialect); + SqlParametersFactory sqlParametersFactory = new SqlParametersFactory(this.mappingContext, this.converter); + InsertStrategyFactory insertStrategyFactory = new InsertStrategyFactory(this.operations, this.dialect); + + DataAccessStrategyFactory factory = new DataAccessStrategyFactory(sqlGeneratorSource, this.converter, + this.operations, sqlParametersFactory, insertStrategyFactory); + + return factory.create(); + }); + } + + if (this.queryMappingConfiguration == null) { + this.queryMappingConfiguration = QueryMappingConfiguration.EMPTY; + } + + if (beanFactory != null) { + entityCallbacks = EntityCallbacks.create(beanFactory); + } + + super.afterPropertiesSet(); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/ScrollDelegate.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/ScrollDelegate.java new file mode 100644 index 0000000000..a40c79b0c9 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/ScrollDelegate.java @@ -0,0 +1,89 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.util.List; +import java.util.function.Function; +import java.util.function.IntFunction; + +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.relational.core.query.Query; +import org.springframework.util.Assert; + +/** + * Delegate to run {@link ScrollPosition scroll queries} and create result {@link Window}. + * + * @author Mark Paluch + * @since 3.1.4 + */ +public class ScrollDelegate { + + /** + * Run the {@link Query} and return a scroll {@link Window}. + * + * @param query must not be {@literal null}. + * @param scrollPosition must not be {@literal null}. + * @return the scroll {@link Window}. + */ + @SuppressWarnings("unchecked") + public static Window scroll(Query query, Function> queryFunction, + ScrollPosition scrollPosition) { + + Assert.notNull(scrollPosition, "ScrollPosition must not be null"); + + int limit = query.getLimit(); + if (limit > 0 && limit != Integer.MAX_VALUE) { + query = query.limit(limit + 1); + } + + List result = queryFunction.apply(query); + + if (scrollPosition instanceof OffsetScrollPosition offset) { + return createWindow(result, limit, offset.positionFunction()); + } + + throw new UnsupportedOperationException("ScrollPosition " + scrollPosition + " not supported"); + } + + private static Window createWindow(List result, int limit, + IntFunction positionFunction) { + return Window.from(getFirst(limit, result), positionFunction, hasMoreElements(result, limit)); + } + + private static boolean hasMoreElements(List result, int limit) { + return !result.isEmpty() && result.size() > limit; + } + + /** + * Return the first {@code count} items from the list. + * + * @param count the number of first elements to be included in the returned list. + * @param list must not be {@literal null} + * @return the returned sublist if the {@code list} is greater {@code count}. + * @param the element type of the lists. + */ + public static List getFirst(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(0, count); + } + + return list; + } + +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/SimpleJdbcRepository.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/SimpleJdbcRepository.java new file mode 100644 index 0000000000..65eba4b02c --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/SimpleJdbcRepository.java @@ -0,0 +1,204 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import java.util.List; +import java.util.Optional; +import java.util.function.Function; + +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.JdbcAggregateOperations; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.relational.repository.query.RelationalExampleMapper; +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.repository.PagingAndSortingRepository; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.data.repository.query.QueryByExampleExecutor; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.Assert; + +/** + * Default implementation of the {@link org.springframework.data.repository.CrudRepository} interface. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Milan Milanov + * @author Chirag Tailor + * @author Diego Krupitza + * @author Dmitriy Kovalenko + */ +@Transactional(readOnly = true) +public class SimpleJdbcRepository + implements CrudRepository, PagingAndSortingRepository, QueryByExampleExecutor { + + private final JdbcAggregateOperations entityOperations; + private final PersistentEntity entity; + private final RelationalExampleMapper exampleMapper; + + public SimpleJdbcRepository(JdbcAggregateOperations entityOperations, PersistentEntity entity, + JdbcConverter converter) { + + Assert.notNull(entityOperations, "EntityOperations must not be null"); + Assert.notNull(entity, "Entity must not be null"); + + this.entityOperations = entityOperations; + this.entity = entity; + this.exampleMapper = new RelationalExampleMapper(converter.getMappingContext()); + } + + @Transactional + @Override + public S save(S instance) { + return entityOperations.save(instance); + } + + @Transactional + @Override + public List saveAll(Iterable entities) { + return entityOperations.saveAll(entities); + } + + @Override + public Optional findById(ID id) { + return Optional.ofNullable(entityOperations.findById(id, entity.getType())); + } + + @Override + public boolean existsById(ID id) { + return entityOperations.existsById(id, entity.getType()); + } + + @Override + public List findAll() { + return entityOperations.findAll(entity.getType()); + } + + @Override + public List findAllById(Iterable ids) { + return entityOperations.findAllById(ids, entity.getType()); + } + + @Override + public long count() { + return entityOperations.count(entity.getType()); + } + + @Transactional + @Override + public void deleteById(ID id) { + entityOperations.deleteById(id, entity.getType()); + } + + @Transactional + @Override + public void delete(T instance) { + entityOperations.delete(instance); + } + + @Transactional + @Override + public void deleteAllById(Iterable ids) { + entityOperations.deleteAllById(ids, entity.getType()); + } + + @Transactional + @Override + public void deleteAll(Iterable entities) { + entityOperations.deleteAll(entities); + } + + @Transactional + @Override + public void deleteAll() { + entityOperations.deleteAll(entity.getType()); + } + + @Override + public List findAll(Sort sort) { + return entityOperations.findAll(entity.getType(), sort); + } + + @Override + public Page findAll(Pageable pageable) { + return entityOperations.findAll(entity.getType(), pageable); + } + + @Override + public Optional findOne(Example example) { + + Assert.notNull(example, "Example must not be null"); + + return this.entityOperations.findOne(this.exampleMapper.getMappedExample(example), example.getProbeType()); + } + + @Override + public List findAll(Example example) { + + Assert.notNull(example, "Example must not be null"); + + return findAll(example, Sort.unsorted()); + } + + @Override + public List findAll(Example example, Sort sort) { + + Assert.notNull(example, "Example must not be null"); + Assert.notNull(sort, "Sort must not be null"); + + return this.entityOperations.findAll(this.exampleMapper.getMappedExample(example).sort(sort), + example.getProbeType()); + } + + @Override + public Page findAll(Example example, Pageable pageable) { + + Assert.notNull(example, "Example must not be null"); + + return this.entityOperations.findAll(this.exampleMapper.getMappedExample(example), example.getProbeType(), + pageable); + } + + @Override + public long count(Example example) { + + Assert.notNull(example, "Example must not be null"); + + return this.entityOperations.count(this.exampleMapper.getMappedExample(example), example.getProbeType()); + } + + @Override + public boolean exists(Example example) { + Assert.notNull(example, "Example must not be null"); + + return this.entityOperations.exists(this.exampleMapper.getMappedExample(example), example.getProbeType()); + } + + @Override + public R findBy(Example example, Function, R> queryFunction) { + + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + FluentQuery.FetchableFluentQuery fluentQuery = new FetchableFluentQueryByExample<>(example, + example.getProbeType(), this.exampleMapper, this.entityOperations); + + return queryFunction.apply(fluentQuery); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/package-info.java new file mode 100644 index 0000000000..d60ca96fde --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/repository/support/package-info.java @@ -0,0 +1,4 @@ +@NonNullApi +package org.springframework.data.jdbc.repository.support; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/support/JdbcUtil.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/support/JdbcUtil.java new file mode 100644 index 0000000000..5fec40f1a9 --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/support/JdbcUtil.java @@ -0,0 +1,109 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.support; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.sql.Date; +import java.sql.JDBCType; +import java.sql.SQLType; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.OffsetDateTime; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.util.Assert; + +/** + * Contains methods dealing with the quirks of JDBC, independent of any Entity, Aggregate or Repository abstraction. + * + * @author Jens Schauder + * @author Thomas Lang + */ +public final class JdbcUtil { + + public static final SQLType TYPE_UNKNOWN = new SQLType() { + @Override + public String getName() { + return "UNKNOWN"; + } + + @Override + public String getVendor() { + return "Spring"; + } + + @Override + public Integer getVendorTypeNumber() { + return JdbcUtils.TYPE_UNKNOWN; + } + + @Override + public String toString() { + return getName(); + } + }; + private static final Map, SQLType> sqlTypeMappings = new HashMap<>(); + + static { + + sqlTypeMappings.put(String.class, JDBCType.VARCHAR); + sqlTypeMappings.put(BigInteger.class, JDBCType.BIGINT); + sqlTypeMappings.put(BigDecimal.class, JDBCType.DECIMAL); + sqlTypeMappings.put(Byte.class, JDBCType.TINYINT); + sqlTypeMappings.put(byte.class, JDBCType.TINYINT); + sqlTypeMappings.put(Short.class, JDBCType.SMALLINT); + sqlTypeMappings.put(short.class, JDBCType.SMALLINT); + sqlTypeMappings.put(Integer.class, JDBCType.INTEGER); + sqlTypeMappings.put(int.class, JDBCType.INTEGER); + sqlTypeMappings.put(Long.class, JDBCType.BIGINT); + sqlTypeMappings.put(long.class, JDBCType.BIGINT); + sqlTypeMappings.put(Double.class, JDBCType.DOUBLE); + sqlTypeMappings.put(double.class, JDBCType.DOUBLE); + sqlTypeMappings.put(Float.class, JDBCType.REAL); + sqlTypeMappings.put(float.class, JDBCType.REAL); + sqlTypeMappings.put(Boolean.class, JDBCType.BIT); + sqlTypeMappings.put(boolean.class, JDBCType.BIT); + sqlTypeMappings.put(byte[].class, JDBCType.VARBINARY); + sqlTypeMappings.put(Date.class, JDBCType.DATE); + sqlTypeMappings.put(Time.class, JDBCType.TIME); + sqlTypeMappings.put(Timestamp.class, JDBCType.TIMESTAMP); + sqlTypeMappings.put(OffsetDateTime.class, JDBCType.TIMESTAMP_WITH_TIMEZONE); + } + + private JdbcUtil() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + /** + * Returns the {@link SQLType} value suitable for passing a value of the provided type to JDBC driver. + * + * @param type The type of value to be bound to a {@link java.sql.PreparedStatement}. + * @return a matching {@link SQLType} or {@link #TYPE_UNKNOWN}. + */ + public static SQLType targetSqlTypeFor(Class type) { + + Assert.notNull(type, "Type must not be null"); + + return sqlTypeMappings.keySet().stream() // + .filter(k -> k.isAssignableFrom(type)) // + .findFirst() // + .map(sqlTypeMappings::get) // + .orElse(JdbcUtil.TYPE_UNKNOWN); + } +} diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/support/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/support/package-info.java new file mode 100644 index 0000000000..8ff6810baa --- /dev/null +++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/support/package-info.java @@ -0,0 +1,4 @@ +@NonNullApi +package org.springframework.data.jdbc.support; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-jdbc/src/main/kotlin/org/springframework/data/jdbc/core/JdbcAggregateOperationsExtensions.kt b/spring-data-jdbc/src/main/kotlin/org/springframework/data/jdbc/core/JdbcAggregateOperationsExtensions.kt new file mode 100644 index 0000000000..b1b7fcd26d --- /dev/null +++ b/spring-data-jdbc/src/main/kotlin/org/springframework/data/jdbc/core/JdbcAggregateOperationsExtensions.kt @@ -0,0 +1,120 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core + +import org.springframework.data.domain.Page +import org.springframework.data.domain.Pageable +import org.springframework.data.domain.Sort +import org.springframework.data.relational.core.query.Query +import java.util.Optional + +/** + * Kotlin extensions for [JdbcAggregateOperations]. + * + * @author Felix Desyatirikov + * @since 3.5 + */ + +/** + * Extension for [JdbcAggregateOperations.count]. + */ +inline fun JdbcAggregateOperations.count(): Long = + count(T::class.java) + +/** + * Extension for [JdbcAggregateOperations.count] with a query. + */ +inline fun JdbcAggregateOperations.count(query: Query): Long = + count(query, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.exists]. + */ +inline fun JdbcAggregateOperations.exists(query: Query): Boolean = + exists(query, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.existsById]. + */ +inline fun JdbcAggregateOperations.existsById(id: Any): Boolean = + existsById(id, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.findById]. + */ +inline fun JdbcAggregateOperations.findById(id: Any): T? = + findById(id, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.findAllById]. + */ +inline fun JdbcAggregateOperations.findAllById(ids: Iterable<*>): List = + findAllById(ids, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.findAll]. + */ +inline fun JdbcAggregateOperations.findAll(): List = + findAll(T::class.java) + +/** + * Extension for [JdbcAggregateOperations.findAll] with sorting. + */ +inline fun JdbcAggregateOperations.findAll(sort: Sort): List = + findAll(T::class.java, sort) + +/** + * Extension for [JdbcAggregateOperations.findAll] with pagination. + */ +inline fun JdbcAggregateOperations.findAll(pageable: Pageable): Page = + findAll(T::class.java, pageable) + +/** + * Extension for [JdbcAggregateOperations.findOne] with a query. + */ +inline fun JdbcAggregateOperations.findOne(query: Query): Optional = + findOne(query, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.findAll] with a query. + */ +inline fun JdbcAggregateOperations.findAll(query: Query): List = + findAll(query, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.findAll] with query and pagination. + */ +inline fun JdbcAggregateOperations.findAll(query: Query, pageable: Pageable): Page = + findAll(query, T::class.java, pageable) + +/** + * Extension for [JdbcAggregateOperations.deleteById]. + */ +inline fun JdbcAggregateOperations.deleteById(id: Any): Unit = + deleteById(id, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.deleteAllById]. + */ +inline fun JdbcAggregateOperations.deleteAllById(ids: Iterable<*>): Unit = + deleteAllById(ids, T::class.java) + +/** + * Extension for [JdbcAggregateOperations.deleteAll]. + */ +inline fun JdbcAggregateOperations.deleteAll(): Unit = + deleteAll(T::class.java) \ No newline at end of file diff --git a/spring-data-jdbc/src/main/resources/META-INF/spring.factories b/spring-data-jdbc/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000..dedc6fdf90 --- /dev/null +++ b/spring-data-jdbc/src/main/resources/META-INF/spring.factories @@ -0,0 +1,2 @@ +org.springframework.data.repository.core.support.RepositoryFactorySupport=org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory +org.springframework.data.jdbc.core.dialect.DialectResolver$JdbcDialectProvider=org.springframework.data.jdbc.core.dialect.DialectResolver.DefaultDialectProvider diff --git a/spring-data-jdbc/src/main/resources/META-INF/spring/aot.factories b/spring-data-jdbc/src/main/resources/META-INF/spring/aot.factories new file mode 100644 index 0000000000..719661141b --- /dev/null +++ b/spring-data-jdbc/src/main/resources/META-INF/spring/aot.factories @@ -0,0 +1,2 @@ +org.springframework.aot.hint.RuntimeHintsRegistrar=\ + org.springframework.data.jdbc.aot.JdbcRuntimeHints diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/DependencyTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/DependencyTests.java new file mode 100644 index 0000000000..d7d142b4a8 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/DependencyTests.java @@ -0,0 +1,181 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.data.auditing.config.AuditingHandlerBeanDefinitionParser; + +import com.tngtech.archunit.base.DescribedPredicate; +import com.tngtech.archunit.core.domain.JavaClass; +import com.tngtech.archunit.core.domain.JavaClasses; +import com.tngtech.archunit.core.importer.ClassFileImporter; +import com.tngtech.archunit.core.importer.ImportOption; +import com.tngtech.archunit.lang.ArchRule; +import com.tngtech.archunit.library.dependencies.SliceAssignment; +import com.tngtech.archunit.library.dependencies.SliceIdentifier; +import com.tngtech.archunit.library.dependencies.SlicesRuleDefinition; + +/** + * Test package dependencies for violations. + * + * @author Jens Schauder + */ +@Disabled("Disabled because of JdbcArrayColumns and Dialect cycle to be resolved in 4.0") +public class DependencyTests { + + @Test + void cycleFree() { + + JavaClasses importedClasses = new ClassFileImporter() // + .withImportOption(ImportOption.Predefined.DO_NOT_INCLUDE_TESTS) // + .withImportOption(ImportOption.Predefined.DO_NOT_INCLUDE_JARS) // we just analyze the code of this module. + .importPackages("org.springframework.data.jdbc").that( // + onlySpringData() // + ); + + ArchRule rule = SlicesRuleDefinition.slices() // + .matching("org.springframework.data.jdbc.(**)") // + .should() // + .beFreeOfCycles(); + + rule.check(importedClasses); + } + + @Test + void acrossModules() { + + JavaClasses importedClasses = new ClassFileImporter().withImportOption(ImportOption.Predefined.DO_NOT_INCLUDE_TESTS) + .importPackages( // + "org.springframework.data.jdbc", // Spring Data Relational + "org.springframework.data.relational", // Spring Data Relational + "org.springframework.data" // Spring Data Commons + ).that(onlySpringData()) // + .that(ignore(AuditingHandlerBeanDefinitionParser.class)) // + .that(ignorePackage("org.springframework.data.aot.hint")) // ignoring aot, since it causes cycles in commons + .that(ignorePackage("org.springframework.data.aot")); // ignoring aot, since it causes cycles in commons + + ArchRule rule = SlicesRuleDefinition.slices() // + .assignedFrom(subModuleSlicing()) // + .should().beFreeOfCycles(); + + rule.check(importedClasses); + } + + @Test // GH-1058 + void testGetFirstPackagePart() { + + SoftAssertions.assertSoftly(softly -> { + softly.assertThat(getFirstPackagePart("a.b.c")).isEqualTo("a"); + softly.assertThat(getFirstPackagePart("a")).isEqualTo("a"); + }); + } + + @Test // GH-1058 + void testSubModule() { + + SoftAssertions.assertSoftly(softly -> { + softly.assertThat(subModule("a.b", "a.b.c.d")).isEqualTo("c"); + softly.assertThat(subModule("a.b", "a.b.c")).isEqualTo("c"); + softly.assertThat(subModule("a.b", "a.b")).isEqualTo(""); + }); + } + + private DescribedPredicate onlySpringData() { + + return new DescribedPredicate<>("Spring Data Classes") { + @Override + public boolean test(JavaClass input) { + return input.getPackageName().startsWith("org.springframework.data"); + } + }; + } + + private DescribedPredicate ignore(Class type) { + + return new DescribedPredicate<>("ignored class " + type.getName()) { + @Override + public boolean test(JavaClass input) { + return !input.getFullName().startsWith(type.getName()); + } + }; + } + + private DescribedPredicate ignorePackage(String type) { + + return new DescribedPredicate<>("ignored class " + type) { + @Override + public boolean test(JavaClass input) { + return !input.getPackageName().equals(type); + } + }; + } + + private String getFirstPackagePart(String subpackage) { + + int index = subpackage.indexOf("."); + if (index < 0) { + return subpackage; + } + return subpackage.substring(0, index); + } + + private String subModule(String basePackage, String packageName) { + + if (packageName.startsWith(basePackage) && packageName.length() > basePackage.length()) { + + final int index = basePackage.length() + 1; + String subpackage = packageName.substring(index); + return getFirstPackagePart(subpackage); + } + return ""; + } + + private SliceAssignment subModuleSlicing() { + return new SliceAssignment() { + + @Override + public SliceIdentifier getIdentifierOf(JavaClass javaClass) { + + String packageName = javaClass.getPackageName(); + + String subModule = subModule("org.springframework.data.jdbc", packageName); + if (!subModule.isEmpty()) { + return SliceIdentifier.of(subModule); + } + + subModule = subModule("org.springframework.data.relational", packageName); + if (!subModule.isEmpty()) { + return SliceIdentifier.of(subModule); + } + + subModule = subModule("org.springframework.data", packageName); + if (!subModule.isEmpty()) { + return SliceIdentifier.of(subModule); + } + + return SliceIdentifier.ignore(); + } + + @Override + public String getDescription() { + return "Submodule"; + } + }; + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AbstractJdbcAggregateTemplateIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AbstractJdbcAggregateTemplateIntegrationTests.java new file mode 100644 index 0000000000..4f047f8406 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AbstractJdbcAggregateTemplateIntegrationTests.java @@ -0,0 +1,2289 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.springframework.data.jdbc.testing.TestConfiguration.*; +import static org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature.*; + +import java.time.LocalDateTime; +import java.util.*; +import java.util.ArrayList; +import java.util.function.Function; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.IncorrectUpdateSemanticsDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Persistable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.jdbc.testing.TestDatabaseFeatures; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.relational.core.conversion.DbActionExecutionException; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.InsertOnlyProperty; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.query.CriteriaDefinition; +import org.springframework.data.relational.core.query.Query; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.ContextConfiguration; + +/** + * Integration tests for {@link JdbcAggregateTemplate}. + * + * @author Jens Schauder + * @author Thomas Lang + * @author Mark Paluch + * @author Myeonghyeon Lee + * @author Tom Hombergs + * @author Tyler Van Gorder + * @author Clemens Hahn + * @author Milan Milanov + * @author Mikhail Polivakha + * @author Chirag Tailor + * @author Vincent Galloy + * @author Sergey Korotaev + */ +@IntegrationTest +abstract class AbstractJdbcAggregateTemplateIntegrationTests { + + @Autowired JdbcAggregateOperations template; + @Autowired NamedParameterJdbcOperations jdbcTemplate; + @Autowired RelationalMappingContext mappingContext; + @Autowired NamedParameterJdbcOperations jdbc; + + LegoSet legoSet = createLegoSet("Star Destroyer"); + + /** + * creates an instance of {@link NoIdListChain4} with the following properties: + *
    + *
  • Each element has two children with indices 0 and 1.
  • + *
  • the xxxValue of each element is a {@literal v} followed by the indices used to navigate to the given instance. + *
  • + *
+ */ + private static NoIdListChain4 createNoIdTree() { + + NoIdListChain4 chain4 = new NoIdListChain4(); + chain4.fourValue = "v"; + + IntStream.of(0, 1).forEach(i -> { + + NoIdListChain3 c3 = new NoIdListChain3(); + c3.threeValue = chain4.fourValue + i; + chain4.chain3.add(c3); + + IntStream.of(0, 1).forEach(j -> { + + NoIdListChain2 c2 = new NoIdListChain2(); + c2.twoValue = c3.threeValue + j; + c3.chain2.add(c2); + + IntStream.of(0, 1).forEach(k -> { + + NoIdListChain1 c1 = new NoIdListChain1(); + c1.oneValue = c2.twoValue + k; + c2.chain1.add(c1); + + IntStream.of(0, 1).forEach(m -> { + + NoIdListChain0 c0 = new NoIdListChain0(); + c0.zeroValue = c1.oneValue + m; + c1.chain0.add(c0); + }); + }); + }); + }); + + return chain4; + } + + private static NoIdMapChain4 createNoIdMapTree() { + + NoIdMapChain4 chain4 = new NoIdMapChain4(); + chain4.fourValue = "v"; + + IntStream.of(0, 1).forEach(i -> { + + NoIdMapChain3 c3 = new NoIdMapChain3(); + c3.threeValue = chain4.fourValue + i; + chain4.chain3.put(asString(i), c3); + + IntStream.of(0, 1).forEach(j -> { + + NoIdMapChain2 c2 = new NoIdMapChain2(); + c2.twoValue = c3.threeValue + j; + c3.chain2.put(asString(j), c2); + + IntStream.of(0, 1).forEach(k -> { + + NoIdMapChain1 c1 = new NoIdMapChain1(); + c1.oneValue = c2.twoValue + k; + c2.chain1.put(asString(k), c1); + + IntStream.of(0, 1).forEach(it -> { + + NoIdMapChain0 c0 = new NoIdMapChain0(); + c0.zeroValue = c1.oneValue + it; + c1.chain0.put(asString(it), c0); + }); + }); + }); + }); + + return chain4; + } + + private static String asString(int i) { + return "_" + i; + } + + private static LegoSet createLegoSet(String name) { + + LegoSet entity = new LegoSet(); + entity.name = name; + + Manual manual = new Manual(); + manual.content = "Accelerates to 99% of light speed; Destroys almost everything. See https://what-if.xkcd.com/1/"; + entity.manual = manual; + + return entity; + } + + @Test // GH-1446 + void findById() { + + WithInsertOnly entity = new WithInsertOnly(); + entity.insertOnly = "entity"; + entity = template.save(entity); + + WithInsertOnly other = new WithInsertOnly(); + other.insertOnly = "other"; + other = template.save(other); + + assertThat(template.findById(entity.id, WithInsertOnly.class).insertOnly).isEqualTo("entity"); + assertThat(template.findById(other.id, WithInsertOnly.class).insertOnly).isEqualTo("other"); + } + + @Test // GH-1446 + void findAllById() { + + WithInsertOnly entity = new WithInsertOnly(); + entity.insertOnly = "entity"; + entity = template.save(entity); + + WithInsertOnly other = new WithInsertOnly(); + other.insertOnly = "other"; + other = template.save(other); + + WithInsertOnly yetAnother = new WithInsertOnly(); + yetAnother.insertOnly = "yetAnother"; + yetAnother = template.save(yetAnother); + + Iterable reloadedById = template.findAllById(asList(entity.id, yetAnother.id), + WithInsertOnly.class); + assertThat(reloadedById).extracting(e -> e.id, e -> e.insertOnly) + .containsExactlyInAnyOrder(tuple(entity.id, "entity"), tuple(yetAnother.id, "yetAnother")); + } + + @Test // GH-1601 + void findAllByQuery() { + + template.save(SimpleListParent.of("one", "one_1")); + SimpleListParent two = template.save(SimpleListParent.of("two", "two_1", "two_2")); + template.save(SimpleListParent.of("three", "three_1", "three_2", "three_3")); + + CriteriaDefinition criteria = CriteriaDefinition.from(Criteria.where("id").is(two.id)); + Query query = Query.query(criteria); + Iterable reloadedById = template.findAll(query, SimpleListParent.class); + + assertThat(reloadedById).extracting(e -> e.id, e -> e.content.size()).containsExactly(tuple(two.id, 2)); + } + + @Test // GH-1601 + void findOneByQuery() { + + template.save(SimpleListParent.of("one", "one_1")); + SimpleListParent two = template.save(SimpleListParent.of("two", "two_1", "two_2")); + template.save(SimpleListParent.of("three", "three_1", "three_2", "three_3")); + + CriteriaDefinition criteria = CriteriaDefinition.from(Criteria.where("id").is(two.id)); + Query query = Query.query(criteria); + Optional reloadedById = template.findOne(query, SimpleListParent.class); + + assertThat(reloadedById).get().extracting(e -> e.id, e -> e.content.size()).containsExactly(two.id, 2); + } + + @Test // GH-1601 + void findOneByQueryNothingFound() { + + template.save(SimpleListParent.of("one", "one_1")); + SimpleListParent two = template.save(SimpleListParent.of("two", "two_1", "two_2")); + template.save(SimpleListParent.of("three", "three_1", "three_2", "three_3")); + + CriteriaDefinition criteria = CriteriaDefinition.from(Criteria.where("id").is(4711)); + Query query = Query.query(criteria); + Optional reloadedById = template.findOne(query, SimpleListParent.class); + + assertThat(reloadedById).isEmpty(); + } + + @Test // GH-1601 + void findOneByQueryToManyResults() { + + template.save(SimpleListParent.of("one", "one_1")); + SimpleListParent two = template.save(SimpleListParent.of("two", "two_1", "two_2")); + template.save(SimpleListParent.of("three", "three_1", "three_2", "three_3")); + + CriteriaDefinition criteria = CriteriaDefinition.from(Criteria.where("id").not(two.id)); + Query query = Query.query(criteria); + + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> template.findOne(query, SimpleListParent.class)); + } + + @Test // DATAJDBC-112 + void saveAndLoadAnEntityWithReferencedEntityById() { + + template.save(legoSet); + + assertThat(legoSet.manual.id).describedAs("id of stored manual").isNotNull(); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.manual).isNotNull(); + + assertThat(reloadedLegoSet.manual.id) // + .isEqualTo(legoSet.manual.id) // + .isNotNull(); + assertThat(reloadedLegoSet.manual.content).isEqualTo(legoSet.manual.content); + } + + @Test // DATAJDBC-112 + void saveAndLoadManyEntitiesWithReferencedEntity() { + + template.save(legoSet); + + Iterable reloadedLegoSets = template.findAll(LegoSet.class); + + assertThat(reloadedLegoSets) // + .extracting("id", "manual.id", "manual.content") // + .containsExactly(tuple(legoSet.id, legoSet.manual.id, legoSet.manual.content)); + } + + @Test // GH-1714 + void saveAndLoadManeEntitiesWithReferenceEntityLikeStream() { + + template.save(legoSet); + + Stream streamable = template.streamAll(LegoSet.class); + + assertThat(streamable) + .extracting("id", "manual.id", "manual.content") // + .containsExactly(tuple(legoSet.id, legoSet.manual.id, legoSet.manual.content)); + } + + @Test // DATAJDBC-101 + void saveAndLoadManyEntitiesWithReferencedEntitySorted() { + + template.save(createLegoSet("Lava")); + template.save(createLegoSet("Star")); + template.save(createLegoSet("Frozen")); + + Iterable reloadedLegoSets = template.findAll(LegoSet.class, Sort.by("name")); + + assertThat(reloadedLegoSets) // + .extracting("name") // + .containsExactly("Frozen", "Lava", "Star"); + } + + @Test // GH-1714 + void saveAndLoadManyEntitiesWithReferencedEntitySortedLikeStream() { + + template.save(createLegoSet("Lava")); + template.save(createLegoSet("Star")); + template.save(createLegoSet("Frozen")); + + Stream reloadedLegoSets = template.streamAll(LegoSet.class, Sort.by("name")); + + assertThat(reloadedLegoSets) // + .extracting("name") // + .containsExactly("Frozen", "Lava", "Star"); + } + + @Test // DATAJDBC-101 + void saveAndLoadManyEntitiesWithReferencedEntitySortedAndPaged() { + + template.save(createLegoSet("Lava")); + template.save(createLegoSet("Star")); + template.save(createLegoSet("Frozen")); + + Iterable reloadedLegoSets = template.findAll(LegoSet.class, PageRequest.of(1, 2, Sort.by("name"))); + + assertThat(reloadedLegoSets) // + .extracting("name") // + .containsExactly("Star"); + } + + @Test // GH-821 + @EnabledOnFeature(SUPPORTS_NULL_PRECEDENCE) + void saveAndLoadManyEntitiesWithReferencedEntitySortedWithNullPrecedence() { + + template.save(createLegoSet(null)); + template.save(createLegoSet("Star")); + template.save(createLegoSet("Frozen")); + + Iterable reloadedLegoSets = template.findAll(LegoSet.class, + Sort.by(new Sort.Order(Sort.Direction.ASC, "name", Sort.NullHandling.NULLS_LAST))); + + assertThat(reloadedLegoSets) // + .extracting("name") // + .containsExactly("Frozen", "Star", null); + } + + @Test // + void findByNonPropertySortFails() { + + assertThatThrownBy(() -> template.findAll(LegoSet.class, Sort.by("somethingNotExistant"))) + .isInstanceOf(InvalidPersistentPropertyPath.class); + } + + @Test // GH-1714 + void findByNonPropertySortLikeStreamFails() { + + assertThatThrownBy(() -> template.streamAll(LegoSet.class, Sort.by("somethingNotExistant"))) + .isInstanceOf(InvalidPersistentPropertyPath.class); + } + + @Test // DATAJDBC-112 + void saveAndLoadManyEntitiesByIdWithReferencedEntity() { + + template.save(legoSet); + + Iterable reloadedLegoSets = template.findAllById(singletonList(legoSet.id), LegoSet.class); + + assertThat(reloadedLegoSets).hasSize(1).extracting("id", "manual.id", "manual.content") + .contains(tuple(legoSet.id, legoSet.manual.id, legoSet.manual.content)); + } + + @Test // GH-1714 + void saveAndLoadManyEntitiesByIdWithReferencedEntityLikeStream() { + + template.save(legoSet); + + Stream reloadedLegoSets = template.streamAllByIds(singletonList(legoSet.id), LegoSet.class); + + assertThat(reloadedLegoSets).hasSize(1).extracting("id", "manual.id", "manual.content") + .contains(tuple(legoSet.id, legoSet.manual.id, legoSet.manual.content)); + } + + @Test // DATAJDBC-112 + void saveAndLoadAnEntityWithReferencedNullEntity() { + + legoSet.manual = null; + + template.save(legoSet); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.manual).isNull(); + } + + @Test // DATAJDBC-112 + void saveAndDeleteAnEntityWithReferencedEntity() { + + template.save(legoSet); + + template.delete(legoSet); + + assertThat(template.findAll(LegoSet.class)).isEmpty(); + assertThat(template.findAll(Manual.class)).isEmpty(); + } + + @Test // DATAJDBC-112 + void saveAndDeleteAllWithReferencedEntity() { + + template.save(legoSet); + + template.deleteAll(LegoSet.class); + + assertThat(template.findAll(LegoSet.class)).isEmpty(); + assertThat(template.findAll(Manual.class)).isEmpty(); + } + + @Test // GH-537 + void saveAndDeleteAllByAggregateRootsWithReferencedEntity() { + + LegoSet legoSet1 = template.save(legoSet); + LegoSet legoSet2 = template.save(createLegoSet("Some Name")); + template.save(createLegoSet("Some other Name")); + + template.deleteAll(List.of(legoSet1, legoSet2)); + + assertThat(template.findAll(LegoSet.class)).extracting(l -> l.name).containsExactly("Some other Name"); + assertThat(template.findAll(Manual.class)).hasSize(1); + } + + @Test // GH-537 + void saveAndDeleteAllByIdsWithReferencedEntity() { + + LegoSet legoSet1 = template.save(legoSet); + LegoSet legoSet2 = template.save(createLegoSet("Some Name")); + template.save(createLegoSet("Some other Name")); + + template.deleteAllById(List.of(legoSet1.id, legoSet2.id), LegoSet.class); + + assertThat(template.findAll(LegoSet.class)).extracting(l -> l.name).containsExactly("Some other Name"); + assertThat(template.findAll(Manual.class)).hasSize(1); + } + + @Test + // GH-537 + void saveAndDeleteAllByAggregateRootsWithVersion() { + + AggregateWithImmutableVersion aggregate1 = new AggregateWithImmutableVersion(null, null); + AggregateWithImmutableVersion aggregate2 = new AggregateWithImmutableVersion(null, null); + AggregateWithImmutableVersion aggregate3 = new AggregateWithImmutableVersion(null, null); + Iterator savedAggregatesIterator = template + .saveAll(List.of(aggregate1, aggregate2, aggregate3)).iterator(); + AggregateWithImmutableVersion savedAggregate1 = savedAggregatesIterator.next(); + AggregateWithImmutableVersion twiceSavedAggregate2 = template.save(savedAggregatesIterator.next()); + AggregateWithImmutableVersion twiceSavedAggregate3 = template.save(savedAggregatesIterator.next()); + + assertThat(template.count(AggregateWithImmutableVersion.class)).isEqualTo(3); + + template.deleteAll(List.of(savedAggregate1, twiceSavedAggregate2, twiceSavedAggregate3)); + + assertThat(template.count(AggregateWithImmutableVersion.class)).isEqualTo(0); + } + + @Test + // GH-1395 + void insertAndUpdateAllByAggregateRootsWithVersion() { + + AggregateWithImmutableVersion aggregate1 = new AggregateWithImmutableVersion(null, null); + AggregateWithImmutableVersion aggregate2 = new AggregateWithImmutableVersion(null, null); + AggregateWithImmutableVersion aggregate3 = new AggregateWithImmutableVersion(null, null); + + Iterator savedAggregatesIterator = template + .insertAll(List.of(aggregate1, aggregate2, aggregate3)).iterator(); + assertThat(template.count(AggregateWithImmutableVersion.class)).isEqualTo(3); + + AggregateWithImmutableVersion savedAggregate1 = savedAggregatesIterator.next(); + AggregateWithImmutableVersion twiceSavedAggregate2 = template.save(savedAggregatesIterator.next()); + AggregateWithImmutableVersion twiceSavedAggregate3 = template.save(savedAggregatesIterator.next()); + + savedAggregatesIterator = template.updateAll(List.of(savedAggregate1, twiceSavedAggregate2, twiceSavedAggregate3)) + .iterator(); + + assertThat(savedAggregatesIterator.next().version).isEqualTo(1); + assertThat(savedAggregatesIterator.next().version).isEqualTo(2); + assertThat(savedAggregatesIterator.next().version).isEqualTo(2); + + AggregateWithImmutableVersion.clearConstructorInvocationData(); + } + + @Test // DATAJDBC-112 + @EnabledOnFeature(SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) + void updateReferencedEntityFromNull() { + + legoSet.manual = (null); + template.save(legoSet); + + Manual manual = new Manual(); + manual.id = 23L; + manual.content = "Some content"; + legoSet.manual = manual; + + template.save(legoSet); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.manual.content).isEqualTo("Some content"); + } + + @Test // DATAJDBC-112 + void updateReferencedEntityToNull() { + + template.save(legoSet); + + legoSet.manual = null; + + template.save(legoSet); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.manual).isNull(); + assertThat(template.findAll(Manual.class)).describedAs("Manuals failed to delete").isEmpty(); + } + + @Test + // DATAJDBC-438 + void updateFailedRootDoesNotExist() { + + LegoSet entity = new LegoSet(); + entity.id = 100L; // does not exist in the database + + assertThatExceptionOfType(DbActionExecutionException.class) // + .isThrownBy(() -> template.save(entity)) // + .withCauseInstanceOf(IncorrectUpdateSemanticsDataAccessException.class); + } + + @Test // DATAJDBC-112 + void replaceReferencedEntity() { + + template.save(legoSet); + + Manual manual = new Manual(); + manual.content = "other content"; + legoSet.manual = manual; + + template.save(legoSet); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.manual.content).isEqualTo("other content"); + assertThat(template.findAll(Manual.class)).describedAs("There should be only one manual").hasSize(1); + } + + @Test // DATAJDBC-112 + @EnabledOnFeature(TestDatabaseFeatures.Feature.SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) + void changeReferencedEntity() { + + template.save(legoSet); + + legoSet.manual.content = "new content"; + + template.save(legoSet); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.manual.content).isEqualTo("new content"); + } + + @Test // DATAJDBC-266 + void oneToOneChildWithoutId() { + + OneToOneParent parent = new OneToOneParent(); + + parent.content = "parent content"; + parent.child = new ChildNoId(); + parent.child.content = "child content"; + + template.save(parent); + + OneToOneParent reloaded = template.findById(parent.id, OneToOneParent.class); + + assertThat(reloaded.child.content).isEqualTo("child content"); + } + + @Test // DATAJDBC-266 + void oneToOneNullChildWithoutId() { + + OneToOneParent parent = new OneToOneParent(); + + parent.content = "parent content"; + parent.child = null; + + template.save(parent); + + OneToOneParent reloaded = template.findById(parent.id, OneToOneParent.class); + + assertThat(reloaded.child).isNull(); + } + + @Test // DATAJDBC-266 + void oneToOneNullAttributes() { + + OneToOneParent parent = new OneToOneParent(); + + parent.content = "parent content"; + parent.child = new ChildNoId(); + + template.save(parent); + + OneToOneParent reloaded = template.findById(parent.id, OneToOneParent.class); + + assertThat(reloaded.child).isNotNull(); + } + + @Test // DATAJDBC-125 + void saveAndLoadAnEntityWithSecondaryReferenceNull() { + + template.save(legoSet); + + assertThat(legoSet.manual.id).describedAs("id of stored manual").isNotNull(); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.alternativeInstructions).isNull(); + } + + @Test // DATAJDBC-125 + void saveAndLoadAnEntityWithSecondaryReferenceNotNull() { + + legoSet.alternativeInstructions = new Manual(); + legoSet.alternativeInstructions.content = "alternative content"; + template.save(legoSet); + + assertThat(legoSet.manual.id).describedAs("id of stored manual").isNotNull(); + + LegoSet reloadedLegoSet = template.findById(legoSet.id, LegoSet.class); + + assertThat(reloadedLegoSet.alternativeInstructions).isNotNull(); + assertThat(reloadedLegoSet.alternativeInstructions.id).isNotNull(); + assertThat(reloadedLegoSet.alternativeInstructions.id).isNotEqualTo(reloadedLegoSet.manual.id); + assertThat(reloadedLegoSet.alternativeInstructions.content) + .isEqualTo(reloadedLegoSet.alternativeInstructions.content); + } + + @Test // DATAJDBC-276 + void saveAndLoadAnEntityWithListOfElementsWithoutId() { + + ListParent entity = new ListParent(); + entity.name = "name"; + + ElementNoId element = new ElementNoId(); + element.content = "content"; + + entity.content.add(element); + + template.save(entity); + + ListParent reloaded = template.findById(entity.id, ListParent.class); + + assertThat(reloaded.content).extracting(e -> e.content).containsExactly("content"); + } + + @Test // GH-498 DATAJDBC-273 + void saveAndLoadAnEntityWithListOfElementsInConstructor() { + + ElementNoId element = new ElementNoId(); + element.content = "content"; + ListParentAllArgs entity = new ListParentAllArgs("name", singletonList(element)); + + entity = template.save(entity); + + ListParentAllArgs reloaded = template.findById(entity.id, ListParentAllArgs.class); + + assertThat(reloaded.content).extracting(e -> e.content).containsExactly("content"); + } + + @Test // DATAJDBC-259 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithArray() { + + ArrayOwner arrayOwner = new ArrayOwner(); + arrayOwner.digits = new String[] { "one", "two", "three" }; + + ArrayOwner saved = template.save(arrayOwner); + + assertThat(saved.id).isNotNull(); + + ArrayOwner reloaded = template.findById(saved.id, ArrayOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits).isEqualTo(new String[] { "one", "two", "three" }); + } + + @Test // GH-1826 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithEmptyArray() { + + ArrayOwner arrayOwner = new ArrayOwner(); + arrayOwner.digits = new String[] { }; + + ArrayOwner saved = template.save(arrayOwner); + + assertThat(saved.id).isNotNull(); + + ArrayOwner reloaded = template.findById(saved.id, ArrayOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits) // + .isNotNull() // + .isEmpty(); + } + + @Test // DATAJDBC-259, DATAJDBC-512 + @EnabledOnFeature(SUPPORTS_MULTIDIMENSIONAL_ARRAYS) + void saveAndLoadAnEntityWithMultidimensionalArray() { + + ArrayOwner arrayOwner = new ArrayOwner(); + arrayOwner.multidimensional = new String[][] { { "one-a", "two-a", "three-a" }, { "one-b", "two-b", "three-b" } }; + + ArrayOwner saved = template.save(arrayOwner); + + assertThat(saved.id).isNotNull(); + + ArrayOwner reloaded = template.findById(saved.id, ArrayOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.multidimensional) + .isEqualTo(new String[][] { { "one-a", "two-a", "three-a" }, { "one-b", "two-b", "three-b" } }); + } + + @Test // DATAJDBC-259 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithList() { + + ListOwner arrayOwner = new ListOwner(); + arrayOwner.digits.addAll(asList("one", "two", "three")); + + ListOwner saved = template.save(arrayOwner); + + assertThat(saved.id).isNotNull(); + + ListOwner reloaded = template.findById(saved.id, ListOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits).isEqualTo(asList("one", "two", "three")); + } + + @Test // DATAJDBC-1826 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithEmptyList() { + + ListOwner arrayOwner = new ListOwner(); + + ListOwner saved = template.save(arrayOwner); + + assertThat(saved.id).isNotNull(); + + ListOwner reloaded = template.findById(saved.id, ListOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits).isNotNull().isEmpty(); + } + + @Test // GH-1033 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithListOfDouble() { + + DoubleListOwner doubleListOwner = new DoubleListOwner(); + doubleListOwner.digits.addAll(asList(1.2, 1.3, 1.4)); + + DoubleListOwner saved = template.save(doubleListOwner); + + assertThat(saved.id).isNotNull(); + + DoubleListOwner reloaded = template.findById(saved.id, DoubleListOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits).isEqualTo(asList(1.2, 1.3, 1.4)); + } + + @Test // GH-1033, GH-1046 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithListOfFloat() { + + FloatListOwner floatListOwner = new FloatListOwner(); + final List values = asList(1.2f, 1.3f, 1.4f); + floatListOwner.digits.addAll(values); + + FloatListOwner saved = template.save(floatListOwner); + + assertThat(saved.id).isNotNull(); + + FloatListOwner reloaded = template.findById(saved.id, FloatListOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits).isEqualTo(values); + } + + @Test // DATAJDBC-259 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadAnEntityWithSet() { + + SetOwner setOwner = new SetOwner(); + setOwner.digits.addAll(asList("one", "two", "three")); + + SetOwner saved = template.save(setOwner); + + assertThat(saved.id).isNotNull(); + + SetOwner reloaded = template.findById(saved.id, SetOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.digits).isEqualTo(new HashSet<>(asList("one", "two", "three"))); + } + + @Test //GH-1737 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadEmbeddedArray() { + + EmbeddedStringListOwner embeddedStringListOwner = new EmbeddedStringListOwner(); + embeddedStringListOwner.embeddedStringList = new EmbeddedStringList(); + embeddedStringListOwner.embeddedStringList.digits = List.of("one", "two", "three"); + + EmbeddedStringListOwner saved = template.save(embeddedStringListOwner); + + EmbeddedStringListOwner reloaded = template.findById(saved.id, EmbeddedStringListOwner.class); + + assertThat(reloaded.embeddedStringList.digits).containsExactly("one", "two", "three"); + } + + @Test //GH-1737 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void saveAndLoadEmptyEmbeddedArray() { + + EmbeddedStringListOwner embeddedStringListOwner = new EmbeddedStringListOwner(); + embeddedStringListOwner.embeddedStringList = new EmbeddedStringList(); + embeddedStringListOwner.embeddedStringList.digits = emptyList(); + + EmbeddedStringListOwner saved = template.save(embeddedStringListOwner); + + EmbeddedStringListOwner reloaded = template.findById(saved.id, EmbeddedStringListOwner.class); + + assertThat(reloaded.embeddedStringList).isNull(); + } + + @Test + // DATAJDBC-327 + void saveAndLoadAnEntityWithByteArray() { + + ByteArrayOwner owner = new ByteArrayOwner(); + owner.binaryData = new byte[] { 1, 23, 42 }; + + ByteArrayOwner saved = template.save(owner); + + ByteArrayOwner reloaded = template.findById(saved.id, ByteArrayOwner.class); + + assertThat(reloaded).isNotNull(); + assertThat(reloaded.id).isEqualTo(saved.id); + assertThat(reloaded.binaryData).isEqualTo(new byte[] { 1, 23, 42 }); + } + + @Test // DATAJDBC-340 + void saveAndLoadLongChain() { + + Chain4 chain4 = new Chain4(); + chain4.fourValue = "omega"; + chain4.chain3 = new Chain3(); + chain4.chain3.threeValue = "delta"; + chain4.chain3.chain2 = new Chain2(); + chain4.chain3.chain2.twoValue = "gamma"; + chain4.chain3.chain2.chain1 = new Chain1(); + chain4.chain3.chain2.chain1.oneValue = "beta"; + chain4.chain3.chain2.chain1.chain0 = new Chain0(); + chain4.chain3.chain2.chain1.chain0.zeroValue = "alpha"; + + template.save(chain4); + + Chain4 reloaded = template.findById(chain4.four, Chain4.class); + + assertThat(reloaded).isNotNull(); + + assertThat(reloaded.four).isEqualTo(chain4.four); + assertThat(reloaded.chain3.chain2.chain1.chain0.zeroValue).isEqualTo(chain4.chain3.chain2.chain1.chain0.zeroValue); + + template.delete(chain4); + + assertThat(count("CHAIN0")).isEqualTo(0); + } + + @Test // DATAJDBC-359 + void saveAndLoadLongChainWithoutIds() { + + NoIdChain4 chain4 = new NoIdChain4(); + chain4.fourValue = "omega"; + chain4.chain3 = new NoIdChain3(); + chain4.chain3.threeValue = "delta"; + chain4.chain3.chain2 = new NoIdChain2(); + chain4.chain3.chain2.twoValue = "gamma"; + chain4.chain3.chain2.chain1 = new NoIdChain1(); + chain4.chain3.chain2.chain1.oneValue = "beta"; + chain4.chain3.chain2.chain1.chain0 = new NoIdChain0(); + chain4.chain3.chain2.chain1.chain0.zeroValue = "alpha"; + + template.save(chain4); + + assertThat(chain4.four).isNotNull(); + + NoIdChain4 reloaded = template.findById(chain4.four, NoIdChain4.class); + + assertThat(reloaded).isNotNull(); + + assertThat(reloaded.four).isEqualTo(chain4.four); + assertThat(reloaded.chain3.chain2.chain1.chain0.zeroValue).isEqualTo(chain4.chain3.chain2.chain1.chain0.zeroValue); + + template.delete(chain4); + + assertThat(count("CHAIN0")).isEqualTo(0); + } + + @Test + // DATAJDBC-223 + void saveAndLoadLongChainOfListsWithoutIds() { + + NoIdListChain4 saved = template.save(createNoIdTree()); + + assertThat(saved.four).describedAs("Something went wrong during saving").isNotNull(); + + NoIdListChain4 reloaded = template.findById(saved.four, NoIdListChain4.class); + + assertThat(reloaded.chain3).hasSameSizeAs(saved.chain3); + assertThat(reloaded.chain3.get(0).chain2).hasSameSizeAs(saved.chain3.get(0).chain2); + assertThat(reloaded).isEqualTo(saved); + } + + @Test + // DATAJDBC-223 + void shouldDeleteChainOfListsWithoutIds() { + + NoIdListChain4 saved = template.save(createNoIdTree()); + template.deleteById(saved.four, NoIdListChain4.class); + + assertThat(count("NO_ID_LIST_CHAIN4")).describedAs("Chain4 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_LIST_CHAIN3")).describedAs("Chain3 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_LIST_CHAIN2")).describedAs("Chain2 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_LIST_CHAIN1")).describedAs("Chain1 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_LIST_CHAIN0")).describedAs("Chain0 elements got deleted").isEqualTo(0); + } + + @Test + // DATAJDBC-223 + void saveAndLoadLongChainOfMapsWithoutIds() { + + NoIdMapChain4 saved = template.save(createNoIdMapTree()); + + assertThat(saved.four).isNotNull(); + + NoIdMapChain4 reloaded = template.findById(saved.four, NoIdMapChain4.class); + assertThat(reloaded).isEqualTo(saved); + } + + @Test + // DATAJDBC-223 + void shouldDeleteChainOfMapsWithoutIds() { + + NoIdMapChain4 saved = template.save(createNoIdMapTree()); + template.deleteById(saved.four, NoIdMapChain4.class); + + assertThat(count("NO_ID_MAP_CHAIN4")).describedAs("Chain4 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_MAP_CHAIN3")).describedAs("Chain3 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_MAP_CHAIN2")).describedAs("Chain2 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_MAP_CHAIN1")).describedAs("Chain1 elements got deleted").isEqualTo(0); + assertThat(count("NO_ID_MAP_CHAIN0")).describedAs("Chain0 elements got deleted").isEqualTo(0); + } + + @Test // DATAJDBC-431 + @EnabledOnFeature(IS_HSQL) + void readOnlyGetsLoadedButNotWritten() { + + WithReadOnly entity = new WithReadOnly(); + entity.name = "Alfred"; + entity.readOnly = "not used"; + + template.save(entity); + + assertThat( + jdbcTemplate.queryForObject("SELECT read_only FROM with_read_only", Collections.emptyMap(), String.class)) + .isEqualTo("from-db"); + } + + @Test + // DATAJDBC-219 Test that immutable version attribute works as expected. + void saveAndUpdateAggregateWithImmutableVersion() { + + AggregateWithImmutableVersion aggregate = new AggregateWithImmutableVersion(null, null); + aggregate = template.save(aggregate); + assertThat(aggregate.version).isEqualTo(0L); + + Long id = aggregate.id; + + AggregateWithImmutableVersion reloadedAggregate = template.findById(id, aggregate.getClass()); + assertThat(reloadedAggregate.getVersion()).describedAs("version field should initially have the value 0") + .isEqualTo(0L); + + AggregateWithImmutableVersion savedAgain = template.save(reloadedAggregate); + AggregateWithImmutableVersion reloadedAgain = template.findById(id, aggregate.getClass()); + + assertThat(savedAgain.version).describedAs("The object returned by save should have an increased version") + .isEqualTo(1L); + + assertThat(reloadedAgain.getVersion()).describedAs("version field should increment by one with each save") + .isEqualTo(1L); + + assertThatThrownBy(() -> template.save(new AggregateWithImmutableVersion(id, 0L))) + .describedAs("saving an aggregate with an outdated version should raise an exception") + .isInstanceOf(OptimisticLockingFailureException.class); + + assertThatThrownBy(() -> template.save(new AggregateWithImmutableVersion(id, 2L))) + .describedAs("saving an aggregate with a future version should raise an exception") + .isInstanceOf(OptimisticLockingFailureException.class); + } + + @Test + // GH-1137 + void testUpdateEntityWithVersionDoesNotTriggerAnewConstructorInvocation() { + + AggregateWithImmutableVersion aggregateWithImmutableVersion = new AggregateWithImmutableVersion(null, null); + + AggregateWithImmutableVersion savedRoot = template.save(aggregateWithImmutableVersion); + + assertThat(savedRoot).isNotNull(); + assertThat(savedRoot.version).isEqualTo(0L); + + assertThat(AggregateWithImmutableVersion.constructorInvocations).containsExactly( + new ConstructorInvocation(null, null), // Initial invocation, done by client + new ConstructorInvocation(null, savedRoot.version), // Assigning the version + new ConstructorInvocation(savedRoot.id, savedRoot.version) // Assigning the id + ); + + AggregateWithImmutableVersion.clearConstructorInvocationData(); + + AggregateWithImmutableVersion updatedRoot = template.save(savedRoot); + + assertThat(updatedRoot).isNotNull(); + assertThat(updatedRoot.version).isEqualTo(1L); + + // Expect only one assignment of the version to AggregateWithImmutableVersion + assertThat(AggregateWithImmutableVersion.constructorInvocations) + .containsOnly(new ConstructorInvocation(savedRoot.id, updatedRoot.version)); + } + + @Test + // DATAJDBC-219 Test that a delete with a version attribute works as expected. + void deleteAggregateWithVersion() { + + AggregateWithImmutableVersion aggregate = new AggregateWithImmutableVersion(null, null); + aggregate = template.save(aggregate); + // as non-primitive versions start from 0, we need to save one more time to make version equal 1 + aggregate = template.save(aggregate); + + // Should have an ID and a version of 1. + final Long id = aggregate.id; + + assertThatThrownBy(() -> template.delete(new AggregateWithImmutableVersion(id, 0L))) + .describedAs("deleting an aggregate with an outdated version should raise an exception") + .isInstanceOf(OptimisticLockingFailureException.class); + + assertThatThrownBy(() -> template.delete(new AggregateWithImmutableVersion(id, 2L))) + .describedAs("deleting an aggregate with a future version should raise an exception") + .isInstanceOf(OptimisticLockingFailureException.class); + + // This should succeed + template.delete(aggregate); + + aggregate = new AggregateWithImmutableVersion(null, null); + aggregate = template.save(aggregate); + + // This should succeed, as version will not be used. + template.deleteById(aggregate.id, AggregateWithImmutableVersion.class); + + } + + @Test + // DATAJDBC-219 + void saveAndUpdateAggregateWithLongVersion() { + saveAndUpdateAggregateWithVersion(new AggregateWithLongVersion(), Number::longValue); + } + + @Test + // DATAJDBC-219 + void saveAndUpdateAggregateWithPrimitiveLongVersion() { + saveAndUpdateAggregateWithPrimitiveVersion(new AggregateWithPrimitiveLongVersion(), Number::longValue); + } + + @Test + // DATAJDBC-219 + void saveAndUpdateAggregateWithIntegerVersion() { + saveAndUpdateAggregateWithVersion(new AggregateWithIntegerVersion(), Number::intValue); + } + + @Test + // DATAJDBC-219 + void saveAndUpdateAggregateWithPrimitiveIntegerVersion() { + saveAndUpdateAggregateWithPrimitiveVersion(new AggregateWithPrimitiveIntegerVersion(), Number::intValue); + } + + @Test + // DATAJDBC-219 + void saveAndUpdateAggregateWithShortVersion() { + saveAndUpdateAggregateWithVersion(new AggregateWithShortVersion(), Number::shortValue); + } + + @Test + // DATAJDBC-219 + void saveAndUpdateAggregateWithPrimitiveShortVersion() { + saveAndUpdateAggregateWithPrimitiveVersion(new AggregateWithPrimitiveShortVersion(), Number::shortValue); + } + + @Test + // GH-1254 + void saveAndUpdateAggregateWithIdAndNullVersion() { + + PersistableVersionedAggregate aggregate = new PersistableVersionedAggregate(); + aggregate.setVersion(null); + aggregate.setId(23L); + + assertThatThrownBy(() -> template.save(aggregate)).isInstanceOf(DbActionExecutionException.class); + } + + @Test // DATAJDBC-462 + void resavingAnUnversionedEntity() { + + LegoSet legoSet = new LegoSet(); + + LegoSet saved = template.save(legoSet); + + template.save(saved); + } + + @Test // DATAJDBC-637 + @EnabledOnFeature(SUPPORTS_NANOSECOND_PRECISION) + void saveAndLoadDateTimeWithFullPrecision() { + + WithLocalDateTime entity = new WithLocalDateTime(); + entity.id = 23L; + entity.testTime = LocalDateTime.of(2005, 5, 5, 5, 5, 5, 123456789); + + template.insert(entity); + + WithLocalDateTime loaded = template.findById(23L, WithLocalDateTime.class); + + assertThat(loaded.testTime).isEqualTo(entity.testTime); + } + + @Test + // DATAJDBC-637 + void saveAndLoadDateTimeWithMicrosecondPrecision() { + + WithLocalDateTime entity = new WithLocalDateTime(); + entity.id = 23L; + entity.testTime = LocalDateTime.of(2005, 5, 5, 5, 5, 5, 123456000); + + template.insert(entity); + + WithLocalDateTime loaded = template.findById(23L, WithLocalDateTime.class); + + assertThat(loaded.testTime).isEqualTo(entity.testTime); + } + + @Test + // GH-777 + void insertWithIdOnly() { + + WithIdOnly entity = new WithIdOnly(); + + assertThat(template.save(entity).id).isNotNull(); + } + + @Test + // GH-1309 + void updateIdOnlyAggregate() { + + WithIdOnly entity = new WithIdOnly(); + + assertThat(template.save(entity).id).isNotNull(); + + template.save(entity); + } + + @Test + // GH-637 + void insertOnlyPropertyDoesNotGetUpdated() { + + WithInsertOnly entity = new WithInsertOnly(); + entity.insertOnly = "first value"; + + assertThat(template.save(entity).id).isNotNull(); + + entity.insertOnly = "second value"; + template.save(entity); + + assertThat(template.findById(entity.id, WithInsertOnly.class).insertOnly).isEqualTo("first value"); + } + + @Test // GH-1460 + @EnabledOnFeature(SUPPORTS_ARRAYS) + void readEnumArray() { + + EnumArrayOwner entity = new EnumArrayOwner(); + entity.digits = new Color[] { Color.BLUE }; + + template.save(entity); + + assertThat(template.findById(entity.id, EnumArrayOwner.class).digits).isEqualTo(new Color[] { Color.BLUE }); + } + + @Test // GH-1448 + void multipleCollections() { + + MultipleCollections aggregate = new MultipleCollections(); + aggregate.name = "aggregate"; + + aggregate.listElements.add(new ListElement("one")); + aggregate.listElements.add(new ListElement("two")); + aggregate.listElements.add(new ListElement("three")); + + aggregate.setElements.add(new SetElement("one")); + aggregate.setElements.add(new SetElement("two")); + + aggregate.mapElements.put("alpha", new MapElement("one")); + aggregate.mapElements.put("beta", new MapElement("two")); + aggregate.mapElements.put("gamma", new MapElement("three")); + aggregate.mapElements.put("delta", new MapElement("four")); + + template.save(aggregate); + + MultipleCollections reloaded = template.findById(aggregate.id, MultipleCollections.class); + + assertThat(reloaded.name).isEqualTo(aggregate.name); + + assertThat(reloaded.listElements).containsExactly(aggregate.listElements.get(0), aggregate.listElements.get(1), + aggregate.listElements.get(2)); + + assertThat(reloaded.setElements).containsExactlyInAnyOrder(aggregate.setElements.toArray(new SetElement[0])); + + assertThat(reloaded.mapElements.get("alpha")).isEqualTo(new MapElement("one")); + assertThat(reloaded.mapElements.get("beta")).isEqualTo(new MapElement("two")); + assertThat(reloaded.mapElements.get("gamma")).isEqualTo(new MapElement("three")); + assertThat(reloaded.mapElements.get("delta")).isEqualTo(new MapElement("four")); + } + + @Test // GH-1448 + void multipleCollectionsWithEmptySet() { + + MultipleCollections aggregate = new MultipleCollections(); + aggregate.name = "aggregate"; + + aggregate.listElements.add(new ListElement("one")); + aggregate.listElements.add(new ListElement("two")); + aggregate.listElements.add(new ListElement("three")); + + aggregate.mapElements.put("alpha", new MapElement("one")); + aggregate.mapElements.put("beta", new MapElement("two")); + aggregate.mapElements.put("gamma", new MapElement("three")); + aggregate.mapElements.put("delta", new MapElement("four")); + + template.save(aggregate); + + MultipleCollections reloaded = template.findById(aggregate.id, MultipleCollections.class); + + assertThat(reloaded.name).isEqualTo(aggregate.name); + + assertThat(reloaded.listElements).containsExactly(aggregate.listElements.get(0), aggregate.listElements.get(1), + aggregate.listElements.get(2)); + + assertThat(reloaded.setElements).containsExactlyInAnyOrder(aggregate.setElements.toArray(new SetElement[0])); + + assertThat(reloaded.mapElements.get("alpha")).isEqualTo(new MapElement("one")); + assertThat(reloaded.mapElements.get("beta")).isEqualTo(new MapElement("two")); + assertThat(reloaded.mapElements.get("gamma")).isEqualTo(new MapElement("three")); + assertThat(reloaded.mapElements.get("delta")).isEqualTo(new MapElement("four")); + } + + @Test // GH-1448 + void multipleCollectionsWithEmptyList() { + + MultipleCollections aggregate = new MultipleCollections(); + aggregate.name = "aggregate"; + + aggregate.setElements.add(new SetElement("one")); + aggregate.setElements.add(new SetElement("two")); + + aggregate.mapElements.put("alpha", new MapElement("one")); + aggregate.mapElements.put("beta", new MapElement("two")); + aggregate.mapElements.put("gamma", new MapElement("three")); + aggregate.mapElements.put("delta", new MapElement("four")); + + template.save(aggregate); + + MultipleCollections reloaded = template.findById(aggregate.id, MultipleCollections.class); + + assertThat(reloaded.name).isEqualTo(aggregate.name); + + assertThat(reloaded.listElements).containsExactly(); + + assertThat(reloaded.setElements).containsExactlyInAnyOrder(aggregate.setElements.toArray(new SetElement[0])); + + assertThat(reloaded.mapElements.get("alpha")).isEqualTo(new MapElement("one")); + assertThat(reloaded.mapElements.get("beta")).isEqualTo(new MapElement("two")); + assertThat(reloaded.mapElements.get("gamma")).isEqualTo(new MapElement("three")); + assertThat(reloaded.mapElements.get("delta")).isEqualTo(new MapElement("four")); + } + + @Test // GH-1646 + void recordOfSet() { + + Author tolkien = template.save(new Author(null, Set.of(new Book("Lord of the Rings")))); + + Iterable authors = template.findAll(Author.class); + + assertThat(authors).containsExactly(tolkien); + } + + @Test // GH-1656 + void mapWithEnumKey() { + + EnumMapOwner enumMapOwner = template + .save(new EnumMapOwner(null, "OwnerName", Map.of(Color.BLUE, new MapElement("Element")))); + + Iterable enumMapOwners = template.findAll(EnumMapOwner.class); + + assertThat(enumMapOwners).containsExactly(enumMapOwner); + } + + @Test // GH-1684 + void oneToOneWithIdenticalIdColumnName() { + + WithOneToOne saved = template.insert(new WithOneToOne("one", new Referenced(23L))); + + WithOneToOne reloaded = template.findById(saved.id, WithOneToOne.class); + + assertThat(reloaded).isEqualTo(saved); + } + + @Test // GH-1802 + void singleEntitySetChain() { + + First first1 = template.insert( // + new First(1L, "first-1", // + new Sec(2L, "second-1-2", Set.of( // + new Third("third-1-2-0"), // + new Third("third-1-2-1"), // + new Third("third-1-2-3")) // + ) // + ) // + ); + First first2 = template.insert( // + new First(2L, "first-2", // + new Sec(3L, "second-2-3", Set.of( // + new Third("third-2-3-0"), // + new Third("third-2-3-1"), // + new Third("third-2-3-3")) // + ) // + ) // + ); + + First first1Reloaded = template.findById(first1.id, First.class); + First first2Reloaded = template.findById(first2.id, First.class); + + assertSoftly(softly -> { + softly.assertThat(first1Reloaded).isEqualTo(first1); + softly.assertThat(first2Reloaded).isEqualTo(first2); + }); + } + + private void saveAndUpdateAggregateWithVersion(VersionedAggregate aggregate, + Function toConcreteNumber) { + saveAndUpdateAggregateWithVersion(aggregate, toConcreteNumber, 0); + } + + private void saveAndUpdateAggregateWithPrimitiveVersion(VersionedAggregate aggregate, + Function toConcreteNumber) { + saveAndUpdateAggregateWithVersion(aggregate, toConcreteNumber, 1); + } + + private void saveAndUpdateAggregateWithVersion(VersionedAggregate aggregate, + Function toConcreteNumber, int initialId) { + + template.save(aggregate); + + VersionedAggregate reloadedAggregate = template.findById(aggregate.id, aggregate.getClass()); + assertThat(reloadedAggregate.getVersion()) // + .withFailMessage("version field should initially have the value 0") + .isEqualTo(toConcreteNumber.apply(initialId)); + template.save(reloadedAggregate); + + VersionedAggregate updatedAggregate = template.findById(aggregate.id, aggregate.getClass()); + assertThat(updatedAggregate.getVersion()) // + .withFailMessage("version field should increment by one with each save") + .isEqualTo(toConcreteNumber.apply(initialId + 1)); + + reloadedAggregate.setVersion(toConcreteNumber.apply(initialId)); + assertThatThrownBy(() -> template.save(reloadedAggregate)) + .withFailMessage("saving an aggregate with an outdated version should raise an exception") + .isInstanceOf(OptimisticLockingFailureException.class); + + reloadedAggregate.setVersion(toConcreteNumber.apply(initialId + 2)); + assertThatThrownBy(() -> template.save(reloadedAggregate)) + .withFailMessage("saving an aggregate with a future version should raise an exception") + .isInstanceOf(OptimisticLockingFailureException.class); + } + + private Long count(String tableName) { + return jdbcTemplate.queryForObject("SELECT COUNT(*) FROM " + tableName, emptyMap(), Long.class); + } + + enum Color { + BLUE + } + + @Table("ARRAY_OWNER") + private static class EnumArrayOwner { + @Id Long id; + + Color[] digits; + } + + @Table("ARRAY_OWNER") + private static class ArrayOwner { + @Id Long id; + + String[] digits; + String[][] multidimensional; + } + + private static class ByteArrayOwner { + @Id Long id; + + byte[] binaryData; + } + + @Table("ARRAY_OWNER") + private static class ListOwner { + @Id Long id; + + List digits = new ArrayList<>(); + } + + @Table("ARRAY_OWNER") + private static class SetOwner { + @Id Long id; + + Set digits = new HashSet<>(); + } + + private static class DoubleListOwner { + + @Id Long id; + + List digits = new ArrayList<>(); + } + + private static class FloatListOwner { + + @Id Long id; + + List digits = new ArrayList<>(); + } + + @Table("ARRAY_OWNER") + private static class EmbeddedStringListOwner { + @Id Long id; + + @Embedded(onEmpty = Embedded.OnEmpty.USE_NULL, prefix = "") EmbeddedStringList embeddedStringList; + } + + private static class EmbeddedStringList { + List digits = new ArrayList<>(); + } + + static class LegoSet { + + @Column("id1") + @Id private Long id; + + private String name; + + private Manual manual; + @Column("alternative") private Manual alternativeInstructions; + } + + static class Manual { + + @Column("id2") + @Id private Long id; + private String content; + + } + + @SuppressWarnings("unused") + static class OneToOneParent { + + @Column("id3") + @Id private Long id; + private String content; + + private ChildNoId child; + } + + static class ChildNoId { + private String content; + } + + @SuppressWarnings("unused") + static class SimpleListParent { + + @Id private Long id; + String name; + List content = new ArrayList<>(); + + static SimpleListParent of(String name, String... contents) { + + SimpleListParent parent = new SimpleListParent(); + parent.name = name; + + for (String content : contents) { + + ElementNoId element = new ElementNoId(); + element.content = content; + parent.content.add(element); + } + + return parent; + } + } + + @Table("LIST_PARENT") + @SuppressWarnings("unused") + static class ListParent { + + @Column("id4") + @Id private Long id; + String name; + @MappedCollection(idColumn = "LIST_PARENT") List content = new ArrayList<>(); + } + + @Table("LIST_PARENT") + static class ListParentAllArgs { + + @Column("id4") + @Id private final Long id; + private final String name; + @MappedCollection(idColumn = "LIST_PARENT") private final List content = new ArrayList<>(); + + @PersistenceCreator + ListParentAllArgs(Long id, String name, List content) { + + this.id = id; + this.name = name; + this.content.addAll(content); + } + + ListParentAllArgs(String name, List content) { + this(null, name, content); + } + } + + static class ElementNoId { + private String content; + } + + /** + * One may think of ChainN as a chain with N further elements + */ + @SuppressWarnings("unused") + static class Chain0 { + @Id Long zero; + String zeroValue; + } + + @SuppressWarnings("unused") + static class Chain1 { + @Id Long one; + String oneValue; + Chain0 chain0; + } + + @SuppressWarnings("unused") + static class Chain2 { + @Id Long two; + String twoValue; + Chain1 chain1; + } + + @SuppressWarnings("unused") + static class Chain3 { + @Id Long three; + String threeValue; + Chain2 chain2; + } + + static class Chain4 { + @Id Long four; + String fourValue; + Chain3 chain3; + } + + /** + * One may think of ChainN as a chain with N further elements + */ + static class NoIdChain0 { + String zeroValue; + } + + static class NoIdChain1 { + String oneValue; + NoIdChain0 chain0; + } + + static class NoIdChain2 { + String twoValue; + NoIdChain1 chain1; + } + + static class NoIdChain3 { + String threeValue; + NoIdChain2 chain2; + } + + static class NoIdChain4 { + @Id Long four; + String fourValue; + NoIdChain3 chain3; + } + + /** + * One may think of ChainN as a chain with N further elements + */ + static class NoIdListChain0 { + String zeroValue; + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdListChain0 that = (NoIdListChain0) o; + return Objects.equals(zeroValue, that.zeroValue); + } + + @Override + public int hashCode() { + return Objects.hash(zeroValue); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [zeroValue='" + zeroValue + '\'' + ']'; + return sb; + } + } + + static class NoIdListChain1 { + String oneValue; + List chain0 = new ArrayList<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdListChain1 that = (NoIdListChain1) o; + return Objects.equals(oneValue, that.oneValue) && Objects.equals(chain0, that.chain0); + } + + @Override + public int hashCode() { + return Objects.hash(oneValue, chain0); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [oneValue='" + oneValue + '\'' + ", chain0=" + chain0 + ']'; + return sb; + } + } + + static class NoIdListChain2 { + String twoValue; + List chain1 = new ArrayList<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdListChain2 that = (NoIdListChain2) o; + return Objects.equals(twoValue, that.twoValue) && Objects.equals(chain1, that.chain1); + } + + @Override + public int hashCode() { + return Objects.hash(twoValue, chain1); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [twoValue='" + twoValue + '\'' + ", chain1=" + chain1 + ']'; + return sb; + } + } + + static class NoIdListChain3 { + String threeValue; + List chain2 = new ArrayList<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdListChain3 that = (NoIdListChain3) o; + return Objects.equals(threeValue, that.threeValue) && Objects.equals(chain2, that.chain2); + } + + @Override + public int hashCode() { + return Objects.hash(threeValue, chain2); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [threeValue='" + threeValue + '\'' + ", chain2=" + chain2 + ']'; + return sb; + } + } + + static class NoIdListChain4 { + @Id Long four; + String fourValue; + List chain3 = new ArrayList<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdListChain4 that = (NoIdListChain4) o; + return Objects.equals(four, that.four) && Objects.equals(fourValue, that.fourValue) + && Objects.equals(chain3, that.chain3); + } + + @Override + public int hashCode() { + return Objects.hash(four, fourValue, chain3); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [four=" + four + ", fourValue='" + fourValue + '\'' + ", chain3=" + + chain3 + ']'; + return sb; + } + + } + + /** + * One may think of ChainN as a chain with N further elements + */ + static class NoIdMapChain0 { + String zeroValue; + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdMapChain0 that = (NoIdMapChain0) o; + return Objects.equals(zeroValue, that.zeroValue); + } + + @Override + public int hashCode() { + return Objects.hash(zeroValue); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [zeroValue='" + zeroValue + '\'' + ']'; + return sb; + } + } + + static class NoIdMapChain1 { + String oneValue; + Map chain0 = new HashMap<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdMapChain1 that = (NoIdMapChain1) o; + return Objects.equals(oneValue, that.oneValue) && Objects.equals(chain0, that.chain0); + } + + @Override + public int hashCode() { + return Objects.hash(oneValue, chain0); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [oneValue='" + oneValue + '\'' + ", chain0=" + chain0 + ']'; + return sb; + } + } + + static class NoIdMapChain2 { + String twoValue; + Map chain1 = new HashMap<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdMapChain2 that = (NoIdMapChain2) o; + return Objects.equals(twoValue, that.twoValue) && Objects.equals(chain1, that.chain1); + } + + @Override + public int hashCode() { + return Objects.hash(twoValue, chain1); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [twoValue='" + twoValue + '\'' + ", chain1=" + chain1 + ']'; + return sb; + } + } + + static class NoIdMapChain3 { + String threeValue; + Map chain2 = new HashMap<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdMapChain3 that = (NoIdMapChain3) o; + return Objects.equals(threeValue, that.threeValue) && Objects.equals(chain2, that.chain2); + } + + @Override + public int hashCode() { + return Objects.hash(threeValue, chain2); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [threeValue='" + threeValue + '\'' + ", chain2=" + chain2 + ']'; + return sb; + } + } + + static class NoIdMapChain4 { + @Id Long four; + String fourValue; + Map chain3 = new HashMap<>(); + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + NoIdMapChain4 that = (NoIdMapChain4) o; + return Objects.equals(four, that.four) && Objects.equals(fourValue, that.fourValue) + && Objects.equals(chain3, that.chain3); + } + + @Override + public int hashCode() { + return Objects.hash(four, fourValue, chain3); + } + + @Override + public String toString() { + String sb = getClass().getSimpleName() + " [four=" + four + ", fourValue='" + fourValue + '\'' + ", chain3=" + + chain3 + ']'; + return sb; + } + } + + @SuppressWarnings("unused") + static class WithReadOnly { + @Id Long id; + String name; + @ReadOnlyProperty String readOnly; + } + + static abstract class VersionedAggregate { + + @Id private Long id; + + abstract Number getVersion(); + + abstract void setVersion(Number newVersion); + } + + @Table("VERSIONED_AGGREGATE") + static class PersistableVersionedAggregate implements Persistable { + + @Id private Long id; + + @Version Long version; + + @Override + public boolean isNew() { + return getId() == null; + } + + @Override + public Long getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + + public void setId(Long id) { + this.id = id; + } + + public void setVersion(Long version) { + this.version = version; + } + } + + @Table("VERSIONED_AGGREGATE") + static final class AggregateWithImmutableVersion { + + @Id private final Long id; + @Version private final Long version; + + private final static List constructorInvocations = new ArrayList<>(); + + public static void clearConstructorInvocationData() { + constructorInvocations.clear(); + } + + public AggregateWithImmutableVersion(Long id, Long version) { + + constructorInvocations.add(new ConstructorInvocation(id, version)); + this.id = id; + this.version = version; + } + + public Long getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final AggregateWithImmutableVersion other)) + return false; + final Object this$id = this.id; + final Object other$id = other.id; + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$version = this.getVersion(); + final Object other$version = other.getVersion(); + return Objects.equals(this$version, other$version); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.id; + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $version = this.getVersion(); + result = result * PRIME + ($version == null ? 43 : $version.hashCode()); + return result; + } + + public String toString() { + return "JdbcAggregateTemplateIntegrationTests.AggregateWithImmutableVersion(id=" + this.id + ", version=" + + this.getVersion() + ")"; + } + + public AggregateWithImmutableVersion withId(Long id) { + return this.id == id ? this : new AggregateWithImmutableVersion(id, this.version); + } + + public AggregateWithImmutableVersion withVersion(Long version) { + return this.version == version ? this : new AggregateWithImmutableVersion(this.id, version); + } + } + + private static final class ConstructorInvocation { + + private final Long id; + private final Long version; + + public ConstructorInvocation(Long id, Long version) { + this.id = id; + this.version = version; + } + + public Long getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + + public String toString() { + return "JdbcAggregateTemplateIntegrationTests.ConstructorInvocation(id=" + this.id + ", version=" + + this.getVersion() + ")"; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final ConstructorInvocation other)) + return false; + final Object this$id = this.id; + final Object other$id = other.id; + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$version = this.getVersion(); + final Object other$version = other.getVersion(); + return Objects.equals(this$version, other$version); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.id; + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $version = this.getVersion(); + result = result * PRIME + ($version == null ? 43 : $version.hashCode()); + return result; + } + } + + @Table("VERSIONED_AGGREGATE") + static class AggregateWithLongVersion extends VersionedAggregate { + + @Version private Long version; + + @Override + void setVersion(Number newVersion) { + this.version = (Long) newVersion; + } + + @Override + public Long getVersion() { + return this.version; + } + } + + @Table("VERSIONED_AGGREGATE") + static class AggregateWithPrimitiveLongVersion extends VersionedAggregate { + + @Version private long version; + + @Override + Number getVersion() { + return this.version; + } + + @Override + void setVersion(Number newVersion) { + this.version = (long) newVersion; + } + } + + @Table("VERSIONED_AGGREGATE") + static class AggregateWithIntegerVersion extends VersionedAggregate { + + @Version private Integer version; + + @Override + void setVersion(Number newVersion) { + this.version = (Integer) newVersion; + } + + @Override + public Integer getVersion() { + return this.version; + } + } + + @Table("VERSIONED_AGGREGATE") + static class AggregateWithPrimitiveIntegerVersion extends VersionedAggregate { + + @Version private int version; + + @Override + Number getVersion() { + return this.version; + } + + @Override + void setVersion(Number newVersion) { + this.version = (int) newVersion; + } + } + + @Table("VERSIONED_AGGREGATE") + static class AggregateWithShortVersion extends VersionedAggregate { + + @Version private Short version; + + @Override + void setVersion(Number newVersion) { + this.version = (Short) newVersion; + } + + @Override + public Short getVersion() { + return this.version; + } + } + + @Table("VERSIONED_AGGREGATE") + static class AggregateWithPrimitiveShortVersion extends VersionedAggregate { + + @Version private short version; + + @Override + Number getVersion() { + return this.version; + } + + @Override + void setVersion(Number newVersion) { + this.version = (short) newVersion; + } + } + + @Table + static class WithLocalDateTime { + + @Id Long id; + LocalDateTime testTime; + } + + @Table + static class WithIdOnly { + @Id Long id; + } + + @Table + static class WithInsertOnly { + @Id Long id; + @InsertOnlyProperty String insertOnly; + } + + @Table + static class MultipleCollections { + @Id Long id; + String name; + List listElements = new ArrayList<>(); + Set setElements = new HashSet<>(); + Map mapElements = new HashMap<>(); + } + + record ListElement(String name) { + } + + record SetElement(String name) { + } + + record MapElement(String name) { + } + + record Author(@Id Long id, Set books) { + } + + record Book(String name) { + + } + + record EnumMapOwner(@Id Long id, String name, Map map) { + } + + record WithOneToOne(@Id String id, @MappedCollection(idColumn = "renamed") Referenced referenced) { + } + + record Referenced(@Id Long id) { + } + + record First(@Id Long id, String name, Sec sec) { + } + + record Sec(@Id Long id, String name, Set thirds) { + } + + record Third(String name) { + } + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + TestClass testClass() { + return TestClass.of(JdbcAggregateTemplateIntegrationTests.class); + } + + @Bean + JdbcAggregateOperations operations(ApplicationEventPublisher publisher, RelationalMappingContext context, + DataAccessStrategy dataAccessStrategy, JdbcConverter converter) { + return new JdbcAggregateTemplate(publisher, context, converter, dataAccessStrategy); + } + } + + @ContextConfiguration(classes = Config.class) + static class JdbcAggregateTemplateIntegrationTests extends AbstractJdbcAggregateTemplateIntegrationTests {} + + @ActiveProfiles(value = PROFILE_SINGLE_QUERY_LOADING) + @ContextConfiguration(classes = Config.class) + static class JdbcAggregateTemplateSingleQueryLoadingIntegrationTests + extends AbstractJdbcAggregateTemplateIntegrationTests { + + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AggregateChangeIdGenerationImmutableUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AggregateChangeIdGenerationImmutableUnitTests.java new file mode 100644 index 0000000000..ba958af4d8 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AggregateChangeIdGenerationImmutableUnitTests.java @@ -0,0 +1,837 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.conversion.DbAction; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.conversion.RootAggregateChange; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.lang.Nullable; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.mockito.Mockito.*; + +/** + * Unit tests for the {@link MutableAggregateChange} testing the setting of generated ids in aggregates consisting of + * immutable entities. + * + * @author Jens Schauder + * @author Myeonghyeon-Lee + * @author Chirag Tailor + */ +@Disabled +public class AggregateChangeIdGenerationImmutableUnitTests { + + DummyEntity entity = new DummyEntity(); + Content content = new Content(); + Content content2 = new Content(); + Tag tag1 = new Tag("tag1"); + Tag tag2 = new Tag("tag2"); + Tag tag3 = new Tag("tag3"); + ContentNoId contentNoId = new ContentNoId(); + ContentNoId contentNoId2 = new ContentNoId(); + + RelationalMappingContext context = new RelationalMappingContext(); + JdbcConverter converter = mock(JdbcConverter.class); + DbAction.WithRoot rootInsert = new DbAction.InsertRoot<>(entity, IdValueSource.GENERATED); + + DataAccessStrategy accessStrategy = mock(DataAccessStrategy.class); + + AggregateChangeExecutor executor = new AggregateChangeExecutor(converter, accessStrategy); + + @Test // DATAJDBC-291 + public void singleRoot() { + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertThat(entity.rootId).isEqualTo(1); + } + + @Test // DATAJDBC-291 + public void simpleReference() { + + entity = entity.withSingle(content); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(createInsert("single", content, null)); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.single.id).isEqualTo(2); + }); + } + + @Test // DATAJDBC-291 + public void listReference() { + + entity = entity.withContentList(asList(content, content2)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(createInsert("contentList", content, 0)); + aggregateChange.addAction(createInsert("contentList", content2, 1)); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentList).extracting(c -> c.id).containsExactly(2, 3); + }); + } + + @Test // DATAJDBC-291 + public void mapReference() { + + entity = entity.withContentMap(createContentMap("a", content, "b", content2)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(createInsert("contentMap", content, "a")); + aggregateChange.addAction(createInsert("contentMap", content2, "b")); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertThat(entity.rootId).isEqualTo(1); + assertThat(entity.contentMap.values()).extracting(c -> c.id).containsExactly(2, 3); + } + + @Test // DATAJDBC-291 + public void setIdForDeepReference() { + + content = content.withSingle(tag1); + entity = entity.withSingle(content); + + DbAction.Insert parentInsert = createInsert("single", content, null); + DbAction.Insert insert = createDeepInsert("single", tag1, null, parentInsert); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + aggregateChange.addAction(insert); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertThat(entity.rootId).isEqualTo(1); + assertThat(entity.single.id).isEqualTo(2); + assertThat(entity.single.single.id).isEqualTo(3); + } + + @Test // DATAJDBC-291 + public void setIdForDeepReferenceElementList() { + + content = content.withTagList(asList(tag1, tag2)); + entity = entity.withSingle(content); + + DbAction.Insert parentInsert = createInsert("single", content, null); + DbAction.Insert insert1 = createDeepInsert("tagList", tag1, 0, parentInsert); + DbAction.Insert insert2 = createDeepInsert("tagList", tag2, 1, parentInsert); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.single.id).isEqualTo(2); + softly.assertThat(entity.single.tagList).extracting(t -> t.id).containsExactly(3, 4); + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementSetElementSet() { + + content = content.withTagSet(Stream.of(tag1, tag2).collect(Collectors.toSet())); + entity = entity.withContentSet(singleton(content)); + + DbAction.Insert parentInsert = createInsert("contentSet", content, null); + DbAction.Insert insert1 = createDeepInsert("tagSet", tag1, null, parentInsert); + DbAction.Insert insert2 = createDeepInsert("tagSet", tag2, null, parentInsert); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentSet) // + .extracting(c -> c.id) // + .containsExactly(2); // + softly.assertThat(entity.contentSet.stream() // + .flatMap(c -> c.tagSet.stream())) // + .extracting(t -> t.id) // + .containsExactlyInAnyOrder(3, 4); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementListSingleReference() { + + content = content.withSingle(tag1); + content2 = content2.withSingle(tag2); + entity = entity.withContentList(asList(content, content2)); + + DbAction.Insert parentInsert1 = createInsert("contentList", content, 0); + DbAction.Insert parentInsert2 = createInsert("contentList", content2, 1); + DbAction.Insert insert1 = createDeepInsert("single", tag1, null, parentInsert1); + DbAction.Insert insert2 = createDeepInsert("single", tag2, null, parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentList) // + .extracting(c -> c.id, c -> c.single.id) // + .containsExactly(tuple(2, 4), tuple(3, 5)); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementListElementList() { + + content = content.withTagList(singletonList(tag1)); + content2 = content2.withTagList(asList(tag2, tag3)); + entity = entity.withContentList(asList(content, content2)); + + DbAction.Insert parentInsert1 = createInsert("contentList", content, 0); + DbAction.Insert parentInsert2 = createInsert("contentList", content2, 1); + DbAction.Insert insert1 = createDeepInsert("tagList", tag1, 0, parentInsert1); + DbAction.Insert insert2 = createDeepInsert("tagList", tag2, 0, parentInsert2); + DbAction.Insert insert3 = createDeepInsert("tagList", tag3, 1, parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + aggregateChange.addAction(insert3); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentList) // + .extracting(c -> c.id) // + .containsExactly(2, 3); // + softly.assertThat(entity.contentList.stream() // + .flatMap(c -> c.tagList.stream()) // + ).extracting(t -> t.id) // + .containsExactly(4, 5, 6); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementMapElementMap() { + + content = content.withTagMap(createTagMap("111", tag1, "222", tag2, "333", tag3)); + entity = entity.withContentMap(createContentMap("one", content, "two", content2)); + + DbAction.Insert parentInsert1 = createInsert("contentMap", content, "one"); + DbAction.Insert parentInsert2 = createInsert("contentMap", content2, "two"); + DbAction.Insert insert1 = createDeepInsert("tagMap", tag1, "111", parentInsert1); + DbAction.Insert insert2 = createDeepInsert("tagMap", tag2, "222", parentInsert2); + DbAction.Insert insert3 = createDeepInsert("tagMap", tag3, "333", parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + aggregateChange.addAction(insert3); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentMap.entrySet()) // + .extracting(Map.Entry::getKey, e -> e.getValue().id) // + .containsExactly(tuple("one", 2), tuple("two", 3)); // + softly.assertThat(entity.contentMap.values().stream() // + .flatMap(c -> c.tagMap.entrySet().stream())) // + .extracting(Map.Entry::getKey, e -> e.getValue().id) // + .containsExactly( // + tuple("111", 4), // + tuple("222", 5), // + tuple("333", 6) // + ); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementListSingleReferenceWithIntermittentNoId() { + + contentNoId = contentNoId.withSingle(tag1); + contentNoId2 = contentNoId2.withSingle(tag2); + entity = entity.withContentNoIdList(asList(contentNoId, contentNoId2)); + + DbAction.Insert parentInsert1 = createInsert("contentNoIdList", contentNoId, 0); + DbAction.Insert parentInsert2 = createInsert("contentNoIdList", contentNoId2, 1); + DbAction.Insert insert1 = createDeepInsert("single", tag1, null, parentInsert1); + DbAction.Insert insert2 = createDeepInsert("single", tag2, null, parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentNoIdList) // + .extracting(c -> c.single.id) // + .containsExactly(2, 3); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForEmbeddedDeepReference() { + + contentNoId = contentNoId2.withSingle(tag1); + entity = entity.withEmbedded(contentNoId); + + DbAction.Insert parentInsert = createInsert("embedded.single", tag1, null); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + + List result = executor.executeSave(aggregateChange); + entity = result.get(0); + + assertThat(entity.rootId).isEqualTo(1); + assertThat(entity.embedded.single.id).isEqualTo(2); + } + + private static Map createContentMap(Object... keysAndValues) { + + Map contentMap = new HashMap<>(); + + for (int i = 0; i < keysAndValues.length; i += 2) { + contentMap.put((String) keysAndValues[i], (Content) keysAndValues[i + 1]); + } + return unmodifiableMap(contentMap); + } + + private static Map createTagMap(Object... keysAndValues) { + + Map contentMap = new HashMap<>(); + + for (int i = 0; i < keysAndValues.length; i += 2) { + contentMap.put((String) keysAndValues[i], (Tag) keysAndValues[i + 1]); + } + return unmodifiableMap(contentMap); + } + + DbAction.Insert createInsert(String propertyName, Object value, @Nullable Object key) { + + return new DbAction.Insert<>(value, + context.getPersistentPropertyPath(propertyName, DummyEntity.class), rootInsert, + singletonMap(toPath(propertyName), key), IdValueSource.GENERATED); + } + + DbAction.Insert createDeepInsert(String propertyName, Object value, Object key, + @Nullable DbAction.Insert parentInsert) { + + PersistentPropertyPath propertyPath = toPath( + parentInsert.getPropertyPath().toDotPath() + "." + propertyName); + + return new DbAction.Insert<>(value, propertyPath, parentInsert, + singletonMap(propertyPath, key), IdValueSource.GENERATED); + } + + PersistentPropertyPath toPath(String path) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(DummyEntity.class, p -> true); + + return persistentPropertyPaths.filter(p -> p.toDotPath().equals(path)).stream().findFirst() + .orElseThrow(() -> new IllegalArgumentException("No matching path found")); + } + + private static final class DummyEntity { + + @Id + private final + Integer rootId; + private final Content single; + private final Set contentSet; + private final List contentList; + private final Map contentMap; + private final List contentNoIdList; + @Embedded(onEmpty = Embedded.OnEmpty.USE_NULL, prefix = "fooBar") + private final + ContentNoId embedded; + + DummyEntity() { + + rootId = null; + single = null; + contentSet = emptySet(); + contentList = emptyList(); + contentMap = emptyMap(); + contentNoIdList = emptyList(); + embedded = new ContentNoId(); + } + + public DummyEntity(Integer rootId, Content single, Set contentSet, List contentList, Map contentMap, List contentNoIdList, ContentNoId embedded) { + this.rootId = rootId; + this.single = single; + this.contentSet = contentSet; + this.contentList = contentList; + this.contentMap = contentMap; + this.contentNoIdList = contentNoIdList; + this.embedded = embedded; + } + + public Integer getRootId() { + return this.rootId; + } + + public Content getSingle() { + return this.single; + } + + public Set getContentSet() { + return this.contentSet; + } + + public List getContentList() { + return this.contentList; + } + + public Map getContentMap() { + return this.contentMap; + } + + public List getContentNoIdList() { + return this.contentNoIdList; + } + + public ContentNoId getEmbedded() { + return this.embedded; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof DummyEntity other)) return false; + final Object this$rootId = this.getRootId(); + final Object other$rootId = other.getRootId(); + if (this$rootId == null ? other$rootId != null : !this$rootId.equals(other$rootId)) return false; + final Object this$single = this.getSingle(); + final Object other$single = other.getSingle(); + if (this$single == null ? other$single != null : !this$single.equals(other$single)) return false; + final Object this$contentSet = this.getContentSet(); + final Object other$contentSet = other.getContentSet(); + if (this$contentSet == null ? other$contentSet != null : !this$contentSet.equals(other$contentSet)) + return false; + final Object this$contentList = this.getContentList(); + final Object other$contentList = other.getContentList(); + if (this$contentList == null ? other$contentList != null : !this$contentList.equals(other$contentList)) + return false; + final Object this$contentMap = this.getContentMap(); + final Object other$contentMap = other.getContentMap(); + if (this$contentMap == null ? other$contentMap != null : !this$contentMap.equals(other$contentMap)) + return false; + final Object this$contentNoIdList = this.getContentNoIdList(); + final Object other$contentNoIdList = other.getContentNoIdList(); + if (this$contentNoIdList == null ? other$contentNoIdList != null : !this$contentNoIdList.equals(other$contentNoIdList)) + return false; + final Object this$embedded = this.getEmbedded(); + final Object other$embedded = other.getEmbedded(); + if (this$embedded == null ? other$embedded != null : !this$embedded.equals(other$embedded)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $rootId = this.getRootId(); + result = result * PRIME + ($rootId == null ? 43 : $rootId.hashCode()); + final Object $single = this.getSingle(); + result = result * PRIME + ($single == null ? 43 : $single.hashCode()); + final Object $contentSet = this.getContentSet(); + result = result * PRIME + ($contentSet == null ? 43 : $contentSet.hashCode()); + final Object $contentList = this.getContentList(); + result = result * PRIME + ($contentList == null ? 43 : $contentList.hashCode()); + final Object $contentMap = this.getContentMap(); + result = result * PRIME + ($contentMap == null ? 43 : $contentMap.hashCode()); + final Object $contentNoIdList = this.getContentNoIdList(); + result = result * PRIME + ($contentNoIdList == null ? 43 : $contentNoIdList.hashCode()); + final Object $embedded = this.getEmbedded(); + result = result * PRIME + ($embedded == null ? 43 : $embedded.hashCode()); + return result; + } + + public String toString() { + return "AggregateChangeIdGenerationImmutableUnitTests.DummyEntity(rootId=" + this.getRootId() + ", single=" + this.getSingle() + ", contentSet=" + this.getContentSet() + ", contentList=" + this.getContentList() + ", contentMap=" + this.getContentMap() + ", contentNoIdList=" + this.getContentNoIdList() + ", embedded=" + this.getEmbedded() + ")"; + } + + public DummyEntity withRootId(Integer rootId) { + return this.rootId == rootId ? this : new DummyEntity(rootId, this.single, this.contentSet, this.contentList, this.contentMap, this.contentNoIdList, this.embedded); + } + + public DummyEntity withSingle(Content single) { + return this.single == single ? this : new DummyEntity(this.rootId, single, this.contentSet, this.contentList, this.contentMap, this.contentNoIdList, this.embedded); + } + + public DummyEntity withContentSet(Set contentSet) { + return this.contentSet == contentSet ? this : new DummyEntity(this.rootId, this.single, contentSet, this.contentList, this.contentMap, this.contentNoIdList, this.embedded); + } + + public DummyEntity withContentList(List contentList) { + return this.contentList == contentList ? this : new DummyEntity(this.rootId, this.single, this.contentSet, contentList, this.contentMap, this.contentNoIdList, this.embedded); + } + + public DummyEntity withContentMap(Map contentMap) { + return this.contentMap == contentMap ? this : new DummyEntity(this.rootId, this.single, this.contentSet, this.contentList, contentMap, this.contentNoIdList, this.embedded); + } + + public DummyEntity withContentNoIdList(List contentNoIdList) { + return this.contentNoIdList == contentNoIdList ? this : new DummyEntity(this.rootId, this.single, this.contentSet, this.contentList, this.contentMap, contentNoIdList, this.embedded); + } + + public DummyEntity withEmbedded(ContentNoId embedded) { + return this.embedded == embedded ? this : new DummyEntity(this.rootId, this.single, this.contentSet, this.contentList, this.contentMap, this.contentNoIdList, embedded); + } + } + + private static final class Content { + + @Id + private final + Integer id; + private final Tag single; + private final Set tagSet; + private final List tagList; + private final Map tagMap; + + Content() { + + id = null; + single = null; + tagSet = emptySet(); + tagList = emptyList(); + tagMap = emptyMap(); + } + + public Content(Integer id, Tag single, Set tagSet, List tagList, Map tagMap) { + this.id = id; + this.single = single; + this.tagSet = tagSet; + this.tagList = tagList; + this.tagMap = tagMap; + } + + public Integer getId() { + return this.id; + } + + public Tag getSingle() { + return this.single; + } + + public Set getTagSet() { + return this.tagSet; + } + + public List getTagList() { + return this.tagList; + } + + public Map getTagMap() { + return this.tagMap; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof Content other)) return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) return false; + final Object this$single = this.getSingle(); + final Object other$single = other.getSingle(); + if (this$single == null ? other$single != null : !this$single.equals(other$single)) return false; + final Object this$tagSet = this.getTagSet(); + final Object other$tagSet = other.getTagSet(); + if (this$tagSet == null ? other$tagSet != null : !this$tagSet.equals(other$tagSet)) return false; + final Object this$tagList = this.getTagList(); + final Object other$tagList = other.getTagList(); + if (this$tagList == null ? other$tagList != null : !this$tagList.equals(other$tagList)) return false; + final Object this$tagMap = this.getTagMap(); + final Object other$tagMap = other.getTagMap(); + if (this$tagMap == null ? other$tagMap != null : !this$tagMap.equals(other$tagMap)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $single = this.getSingle(); + result = result * PRIME + ($single == null ? 43 : $single.hashCode()); + final Object $tagSet = this.getTagSet(); + result = result * PRIME + ($tagSet == null ? 43 : $tagSet.hashCode()); + final Object $tagList = this.getTagList(); + result = result * PRIME + ($tagList == null ? 43 : $tagList.hashCode()); + final Object $tagMap = this.getTagMap(); + result = result * PRIME + ($tagMap == null ? 43 : $tagMap.hashCode()); + return result; + } + + public String toString() { + return "AggregateChangeIdGenerationImmutableUnitTests.Content(id=" + this.getId() + ", single=" + this.getSingle() + ", tagSet=" + this.getTagSet() + ", tagList=" + this.getTagList() + ", tagMap=" + this.getTagMap() + ")"; + } + + public Content withId(Integer id) { + return this.id == id ? this : new Content(id, this.single, this.tagSet, this.tagList, this.tagMap); + } + + public Content withSingle(Tag single) { + return this.single == single ? this : new Content(this.id, single, this.tagSet, this.tagList, this.tagMap); + } + + public Content withTagSet(Set tagSet) { + return this.tagSet == tagSet ? this : new Content(this.id, this.single, tagSet, this.tagList, this.tagMap); + } + + public Content withTagList(List tagList) { + return this.tagList == tagList ? this : new Content(this.id, this.single, this.tagSet, tagList, this.tagMap); + } + + public Content withTagMap(Map tagMap) { + return this.tagMap == tagMap ? this : new Content(this.id, this.single, this.tagSet, this.tagList, tagMap); + } + } + + private static final class ContentNoId { + @Column("single") + private final + Tag single; + private final Set tagSet; + private final List tagList; + private final Map tagMap; + + ContentNoId() { + + single = null; + tagSet = emptySet(); + tagList = emptyList(); + tagMap = emptyMap(); + } + + public ContentNoId(Tag single, Set tagSet, List tagList, Map tagMap) { + this.single = single; + this.tagSet = tagSet; + this.tagList = tagList; + this.tagMap = tagMap; + } + + public Tag getSingle() { + return this.single; + } + + public Set getTagSet() { + return this.tagSet; + } + + public List getTagList() { + return this.tagList; + } + + public Map getTagMap() { + return this.tagMap; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof ContentNoId other)) return false; + final Object this$single = this.getSingle(); + final Object other$single = other.getSingle(); + if (this$single == null ? other$single != null : !this$single.equals(other$single)) return false; + final Object this$tagSet = this.getTagSet(); + final Object other$tagSet = other.getTagSet(); + if (this$tagSet == null ? other$tagSet != null : !this$tagSet.equals(other$tagSet)) return false; + final Object this$tagList = this.getTagList(); + final Object other$tagList = other.getTagList(); + if (this$tagList == null ? other$tagList != null : !this$tagList.equals(other$tagList)) return false; + final Object this$tagMap = this.getTagMap(); + final Object other$tagMap = other.getTagMap(); + if (this$tagMap == null ? other$tagMap != null : !this$tagMap.equals(other$tagMap)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $single = this.getSingle(); + result = result * PRIME + ($single == null ? 43 : $single.hashCode()); + final Object $tagSet = this.getTagSet(); + result = result * PRIME + ($tagSet == null ? 43 : $tagSet.hashCode()); + final Object $tagList = this.getTagList(); + result = result * PRIME + ($tagList == null ? 43 : $tagList.hashCode()); + final Object $tagMap = this.getTagMap(); + result = result * PRIME + ($tagMap == null ? 43 : $tagMap.hashCode()); + return result; + } + + public String toString() { + return "AggregateChangeIdGenerationImmutableUnitTests.ContentNoId(single=" + this.getSingle() + ", tagSet=" + this.getTagSet() + ", tagList=" + this.getTagList() + ", tagMap=" + this.getTagMap() + ")"; + } + + public ContentNoId withSingle(Tag single) { + return this.single == single ? this : new ContentNoId(single, this.tagSet, this.tagList, this.tagMap); + } + + public ContentNoId withTagSet(Set tagSet) { + return this.tagSet == tagSet ? this : new ContentNoId(this.single, tagSet, this.tagList, this.tagMap); + } + + public ContentNoId withTagList(List tagList) { + return this.tagList == tagList ? this : new ContentNoId(this.single, this.tagSet, tagList, this.tagMap); + } + + public ContentNoId withTagMap(Map tagMap) { + return this.tagMap == tagMap ? this : new ContentNoId(this.single, this.tagSet, this.tagList, tagMap); + } + } + + private static final class Tag { + + @Id + private final + Integer id; + + private final String name; + + Tag(String name) { + id = null; + this.name = name; + } + + public Tag(Integer id, String name) { + this.id = id; + this.name = name; + } + + public Integer getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof Tag other)) return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (this$name == null ? other$name != null : !this$name.equals(other$name)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "AggregateChangeIdGenerationImmutableUnitTests.Tag(id=" + this.getId() + ", name=" + this.getName() + ")"; + } + + public Tag withId(Integer id) { + return this.id == id ? this : new Tag(id, this.name); + } + + public Tag withName(String name) { + return this.name == name ? this : new Tag(this.id, name); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AggregateChangeIdGenerationUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AggregateChangeIdGenerationUnitTests.java new file mode 100644 index 0000000000..ce695532c1 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AggregateChangeIdGenerationUnitTests.java @@ -0,0 +1,396 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.mockito.Mockito.*; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.junit.jupiter.api.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.conversion.DbAction; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.conversion.RootAggregateChange; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.lang.Nullable; + +/** + * Unit tests for the {@link MutableAggregateChange}. + * + * @author Jens Schauder + * @author Myeonghyeon-Lee + * @author Chirag Tailor + */ +public class AggregateChangeIdGenerationUnitTests { + + DummyEntity entity = new DummyEntity(); + Content content = new Content(); + Content content2 = new Content(); + Tag tag1 = new Tag(); + Tag tag2 = new Tag(); + Tag tag3 = new Tag(); + + RelationalMappingContext context = new RelationalMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + DbAction.WithRoot rootInsert = new DbAction.InsertRoot<>(entity, IdValueSource.GENERATED); + DataAccessStrategy accessStrategy = mock(DataAccessStrategy.class, new IncrementingIds()); + AggregateChangeExecutor executor = new AggregateChangeExecutor(converter, accessStrategy); + + @Test // DATAJDBC-291 + public void singleRoot() { + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + + executor.executeSave(aggregateChange); + + assertThat(entity.rootId).isEqualTo(1); + } + + @Test // DATAJDBC-291 + public void simpleReference() { + + entity.single = content; + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(createInsert("single", content, null)); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.single.id).isEqualTo(2); + }); + } + + @Test // DATAJDBC-291 + public void listReference() { + + entity.contentList.add(content); + entity.contentList.add(content2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(createInsert("contentList", content, 0)); + aggregateChange.addAction(createInsert("contentList", content2, 1)); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentList).extracting(c -> c.id).containsExactly(2, 3); + }); + } + + @Test // DATAJDBC-291 + public void mapReference() { + + entity.contentMap.put("a", content); + entity.contentMap.put("b", content2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(createInsert("contentMap", content, "a")); + aggregateChange.addAction(createInsert("contentMap", content2, "b")); + + executor.executeSave(aggregateChange); + + assertThat(entity.rootId).isEqualTo(1); + assertThat(entity.contentMap.values()).extracting(c -> c.id).containsExactly(2, 3); + } + + @Test // DATAJDBC-291 + public void setIdForDeepReference() { + + content.single = tag1; + entity.single = content; + + DbAction.Insert parentInsert = createInsert("single", content, null); + DbAction.Insert insert = createDeepInsert("single", tag1, null, parentInsert); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + aggregateChange.addAction(insert); + + executor.executeSave(aggregateChange); + + assertThat(entity.rootId).isEqualTo(1); + assertThat(entity.single.id).isEqualTo(2); + assertThat(entity.single.single.id).isEqualTo(3); + } + + @Test // DATAJDBC-291 + public void setIdForDeepReferenceElementList() { + + content.tagList.add(tag1); + content.tagList.add(tag2); + entity.single = content; + + DbAction.Insert parentInsert = createInsert("single", content, null); + DbAction.Insert insert1 = createDeepInsert("tagList", tag1, 0, parentInsert); + DbAction.Insert insert2 = createDeepInsert("tagList", tag2, 1, parentInsert); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.single.id).isEqualTo(2); + softly.assertThat(entity.single.tagList).extracting(t -> t.id).containsExactly(3, 4); + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementSetElementSet() { + + content.tagSet.add(tag1); + content.tagSet.add(tag2); + entity.contentSet.add(content); + + DbAction.Insert parentInsert = createInsert("contentSet", content, null); + DbAction.Insert insert1 = createDeepInsert("tagSet", tag1, null, parentInsert); + DbAction.Insert insert2 = createDeepInsert("tagSet", tag2, null, parentInsert); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentSet) // + .extracting(c -> content.id) // + .containsExactly(2); // + softly.assertThat(entity.contentSet.stream() // + .flatMap(c -> c.tagSet.stream())) // + .extracting(t -> t.id) // + .containsExactlyInAnyOrder(3, 4); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementListSingleReference() { + + content.single = tag1; + content2.single = tag2; + entity.contentList.add(content); + entity.contentList.add(content2); + + DbAction.Insert parentInsert1 = createInsert("contentList", content, 0); + DbAction.Insert parentInsert2 = createInsert("contentList", content2, 1); + DbAction.Insert insert1 = createDeepInsert("single", tag1, null, parentInsert1); + DbAction.Insert insert2 = createDeepInsert("single", tag2, null, parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentList) // + .extracting(c -> c.id, c -> c.single.id) // + .containsExactly(tuple(2, 4), tuple(3, 5)); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementListElementList() { + + content.tagList.add(tag1); + content2.tagList.add(tag2); + content2.tagList.add(tag3); + entity.contentList.add(content); + entity.contentList.add(content2); + + DbAction.Insert parentInsert1 = createInsert("contentList", content, 0); + DbAction.Insert parentInsert2 = createInsert("contentList", content2, 1); + DbAction.Insert insert1 = createDeepInsert("tagList", tag1, 0, parentInsert1); + DbAction.Insert insert2 = createDeepInsert("tagList", tag2, 0, parentInsert2); + DbAction.Insert insert3 = createDeepInsert("tagList", tag3, 1, parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + aggregateChange.addAction(insert3); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentList) // + .extracting(c -> c.id) // + .containsExactly(2, 3); // + softly.assertThat(entity.contentList.stream() // + .flatMap(c -> c.tagList.stream()) // + ).extracting(t -> t.id) // + .containsExactly(4, 5, 6); // + }); + } + + @Test // DATAJDBC-291 + public void setIdForDeepElementMapElementMap() { + + content.tagMap.put("111", tag1); + content2.tagMap.put("222", tag2); + content2.tagMap.put("333", tag3); + entity.contentMap.put("one", content); + entity.contentMap.put("two", content2); + + DbAction.Insert parentInsert1 = createInsert("contentMap", content, "one"); + DbAction.Insert parentInsert2 = createInsert("contentMap", content2, "two"); + DbAction.Insert insert1 = createDeepInsert("tagMap", tag1, "111", parentInsert1); + DbAction.Insert insert2 = createDeepInsert("tagMap", tag2, "222", parentInsert2); + DbAction.Insert insert3 = createDeepInsert("tagMap", tag3, "333", parentInsert2); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + aggregateChange.setRootAction(rootInsert); + aggregateChange.addAction(parentInsert1); + aggregateChange.addAction(parentInsert2); + aggregateChange.addAction(insert1); + aggregateChange.addAction(insert2); + aggregateChange.addAction(insert3); + + executor.executeSave(aggregateChange); + + assertSoftly(softly -> { + + softly.assertThat(entity.rootId).isEqualTo(1); + softly.assertThat(entity.contentMap.entrySet()) // + .extracting(Map.Entry::getKey, e -> e.getValue().id) // + .containsExactly(tuple("one", 2), tuple("two", 3)); // + softly.assertThat(entity.contentMap.values().stream() // + .flatMap(c -> c.tagMap.entrySet().stream())) // + .extracting(Map.Entry::getKey, e -> e.getValue().id) // + .containsExactly( // + tuple("111", 4), // + tuple("222", 5), // + tuple("333", 6) // + ); // + }); + } + + DbAction.Insert createInsert(String propertyName, Object value, @Nullable Object key) { + + return new DbAction.Insert<>(value, context.getPersistentPropertyPath(propertyName, DummyEntity.class), rootInsert, + key == null ? emptyMap() : singletonMap(toPath(propertyName), key), IdValueSource.GENERATED); + } + + DbAction.Insert createDeepInsert(String propertyName, Object value, @Nullable Object key, + DbAction.Insert parentInsert) { + + PersistentPropertyPath propertyPath = toPath( + parentInsert.getPropertyPath().toDotPath() + "." + propertyName); + + return new DbAction.Insert<>(value, propertyPath, parentInsert, + key == null ? emptyMap() : singletonMap(propertyPath, key), IdValueSource.GENERATED); + } + + PersistentPropertyPath toPath(String path) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(DummyEntity.class, p -> true); + + return persistentPropertyPaths.filter(p -> path.equals(p.toDotPath())).stream().findFirst() + .orElseThrow(() -> new IllegalArgumentException("No matching path found")); + } + + private static class DummyEntity { + + @Id Integer rootId; + + Content single; + + Set contentSet = new HashSet<>(); + + List contentList = new ArrayList<>(); + + Map contentMap = new HashMap<>(); + } + + private static class Content { + + @Id Integer id; + + Tag single; + + Set tagSet = new HashSet<>(); + + List tagList = new ArrayList<>(); + + Map tagMap = new HashMap<>(); + } + + private static class Tag { + @Id Integer id; + } + + private static class IncrementingIds implements Answer { + long id = 1; + + @Override + public Object answer(InvocationOnMock invocation) { + + if (!invocation.getMethod().getReturnType().equals(Object.class)) { + throw new UnsupportedOperationException("This mock does not support this invocation: " + invocation); + } + + return id++; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/ImmutableAggregateTemplateHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/ImmutableAggregateTemplateHsqlIntegrationTests.java new file mode 100644 index 0000000000..e9a4a7ee1a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/ImmutableAggregateTemplateHsqlIntegrationTests.java @@ -0,0 +1,612 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.Objects; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Integration tests for {@link JdbcAggregateTemplate} and it's handling of immutable entities. + * + * @author Jens Schauder + * @author Salim Achouche + * @author Chirag Tailor + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class ImmutableAggregateTemplateHsqlIntegrationTests { + + @Autowired JdbcAggregateOperations template; + + @Test // DATAJDBC-241 + public void saveWithGeneratedIdCreatesNewInstance() { + + LegoSet legoSet = createLegoSet(createManual()); + + LegoSet saved = template.save(legoSet); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(legoSet).isNotSameAs(saved); + softly.assertThat(legoSet.getId()).isNull(); + + softly.assertThat(saved.getId()).isNotNull(); + softly.assertThat(saved.name).isNotNull(); + softly.assertThat(saved.manual).isNotNull(); + softly.assertThat(saved.manual.content).isNotNull(); + + softly.assertAll(); + } + + @Test // DATAJDBC-241 + public void saveAndLoadAnEntityWithReferencedEntityById() { + + LegoSet saved = template.save(createLegoSet(createManual())); + + assertThat(saved.manual.id).describedAs("id of stored manual").isNotNull(); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + assertThat(reloadedLegoSet.manual).isNotNull(); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(reloadedLegoSet.manual.getId()) // + .isEqualTo(saved.getManual().getId()) // + .isNotNull(); + softly.assertThat(reloadedLegoSet.manual.getContent()).isEqualTo(saved.getManual().getContent()); + + softly.assertAll(); + } + + @Test // DATAJDBC-291 + public void saveAndLoadAnEntityWithTwoReferencedEntitiesById() { + + LegoSet saved = template.save(createLegoSet(createManual(), new Author(null, "Alfred E. Neumann"))); + + assertThat(saved.manual.id).describedAs("id of stored manual").isNotNull(); + assertThat(saved.author.id).describedAs("id of stored author").isNotNull(); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + assertThat(reloadedLegoSet.manual).isNotNull(); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(reloadedLegoSet.manual.getId()) // + .isEqualTo(saved.getManual().getId()) // + .isNotNull(); + softly.assertThat(reloadedLegoSet.manual.getContent()).isEqualTo(saved.getManual().getContent()); + softly.assertThat(reloadedLegoSet.author.getName()).isEqualTo(saved.getAuthor().getName()); + + softly.assertAll(); + } + + @Test // DATAJDBC-241 + public void saveAndLoadManyEntitiesWithReferencedEntity() { + + LegoSet legoSet = createLegoSet(createManual()); + + LegoSet savedLegoSet = template.save(legoSet); + + Iterable reloadedLegoSets = template.findAll(LegoSet.class); + + assertThat(reloadedLegoSets).hasSize(1).extracting("id", "manual.id", "manual.content") + .contains(tuple(savedLegoSet.getId(), savedLegoSet.getManual().getId(), savedLegoSet.getManual().getContent())); + } + + @Test // DATAJDBC-241 + public void saveAndLoadManyEntitiesByIdWithReferencedEntity() { + + LegoSet saved = template.save(createLegoSet(createManual())); + + Iterable reloadedLegoSets = template.findAllById(singletonList(saved.getId()), LegoSet.class); + + assertThat(reloadedLegoSets).hasSize(1).extracting("id", "manual.id", "manual.content") + .contains(tuple(saved.getId(), saved.getManual().getId(), saved.getManual().getContent())); + } + + @Test // DATAJDBC-241 + public void saveAndLoadAnEntityWithReferencedNullEntity() { + + LegoSet saved = template.save(createLegoSet(null)); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + assertThat(reloadedLegoSet.manual).isNull(); + } + + @Test // DATAJDBC-241 + public void saveAndDeleteAnEntityWithReferencedEntity() { + + LegoSet legoSet = createLegoSet(createManual()); + + LegoSet saved = template.save(legoSet); + + template.delete(saved); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(template.findAll(LegoSet.class)).isEmpty(); + softly.assertThat(template.findAll(Manual.class)).isEmpty(); + + softly.assertAll(); + } + + @Test // DATAJDBC-241 + public void saveAndDeleteAllWithReferencedEntity() { + + template.save(createLegoSet(createManual())); + + template.deleteAll(LegoSet.class); + + SoftAssertions softly = new SoftAssertions(); + + assertThat(template.findAll(LegoSet.class)).isEmpty(); + assertThat(template.findAll(Manual.class)).isEmpty(); + + softly.assertAll(); + } + + @Test // DATAJDBC-241 + public void updateReferencedEntityFromNull() { + + LegoSet saved = template.save(createLegoSet(null)); + + LegoSet changedLegoSet = new LegoSet(saved.id, saved.name, new Manual(23L, "Some content"), null); + + template.save(changedLegoSet); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + assertThat(reloadedLegoSet.manual.content).isEqualTo("Some content"); + } + + @Test // DATAJDBC-241 + public void updateReferencedEntityToNull() { + + LegoSet saved = template.save(createLegoSet(null)); + + LegoSet changedLegoSet = new LegoSet(saved.id, saved.name, null, null); + + template.save(changedLegoSet); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(reloadedLegoSet.manual).isNull(); + softly.assertThat(template.findAll(Manual.class)).describedAs("Manuals failed to delete").isEmpty(); + + softly.assertAll(); + } + + @Test // DATAJDBC-241 + public void replaceReferencedEntity() { + + LegoSet saved = template.save(createLegoSet(null)); + + LegoSet changedLegoSet = new LegoSet(saved.id, saved.name, new Manual(null, "other content"), null); + + template.save(changedLegoSet); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(reloadedLegoSet.manual.content).isEqualTo("other content"); + softly.assertThat(template.findAll(Manual.class)).describedAs("There should be only one manual").hasSize(1); + + softly.assertAll(); + } + + @Test // GH-1201 + void replaceReferencedEntity_saveResult() { + + Root root = new Root(null, "originalRoot", new NonRoot(null, "originalNonRoot")); + Root originalSavedRoot = template.save(root); + + assertThat(originalSavedRoot.id).isNotNull(); + assertThat(originalSavedRoot.name).isEqualTo("originalRoot"); + assertThat(originalSavedRoot.reference.id).isNotNull(); + assertThat(originalSavedRoot.reference.name).isEqualTo("originalNonRoot"); + + Root updatedRoot = new Root(originalSavedRoot.id, "updatedRoot", new NonRoot(null, "updatedNonRoot")); + Root updatedSavedRoot = template.save(updatedRoot); + + assertThat(updatedSavedRoot.id).isNotNull(); + assertThat(updatedSavedRoot.name).isEqualTo("updatedRoot"); + assertThat(updatedSavedRoot.reference.id).isNotNull().isNotEqualTo(originalSavedRoot.reference.id); + assertThat(updatedSavedRoot.reference.name).isEqualTo("updatedNonRoot"); + } + + @Test // DATAJDBC-241 + public void changeReferencedEntity() { + + LegoSet saved = template.save(createLegoSet(createManual())); + + LegoSet changedLegoSet = saved.withManual(saved.manual.withContent("new content")); + + template.save(changedLegoSet); + + LegoSet reloadedLegoSet = template.findById(saved.getId(), LegoSet.class); + + Manual manual = reloadedLegoSet.manual; + assertThat(manual).isNotNull(); + assertThat(manual.content).isEqualTo("new content"); + } + + @Test // DATAJDBC-545 + public void setIdViaConstructor() { + + WithCopyConstructor entity = new WithCopyConstructor(null, "Alfred"); + + WithCopyConstructor saved = template.save(entity); + + assertThat(saved).isNotEqualTo(entity); + assertThat(saved.id).isNotNull(); + } + + private static LegoSet createLegoSet(Manual manual) { + + return new LegoSet(null, "Star Destroyer", manual, null); + } + + private static LegoSet createLegoSet(Manual manual, Author author) { + + return new LegoSet(null, "Star Destroyer", manual, author); + } + + private static Manual createManual() { + return new Manual(null, + "Accelerates to 99% of light speed. Destroys almost everything. See https://what-if.xkcd.com/1/"); + } + + static final class LegoSet { + + @Id private final Long id; + private final String name; + private final Manual manual; + private final Author author; + + public LegoSet(Long id, String name, Manual manual, Author author) { + this.id = id; + this.name = name; + this.manual = manual; + this.author = author; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Manual getManual() { + return this.manual; + } + + public Author getAuthor() { + return this.author; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final LegoSet other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (!Objects.equals(this$name, other$name)) + return false; + final Object this$manual = this.getManual(); + final Object other$manual = other.getManual(); + if (!Objects.equals(this$manual, other$manual)) + return false; + final Object this$author = this.getAuthor(); + final Object other$author = other.getAuthor(); + return Objects.equals(this$author, other$author); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $manual = this.getManual(); + result = result * PRIME + ($manual == null ? 43 : $manual.hashCode()); + final Object $author = this.getAuthor(); + result = result * PRIME + ($author == null ? 43 : $author.hashCode()); + return result; + } + + public String toString() { + return "ImmutableAggregateTemplateHsqlIntegrationTests.LegoSet(id=" + this.getId() + ", name=" + this.getName() + + ", manual=" + this.getManual() + ", author=" + this.getAuthor() + ")"; + } + + public LegoSet withId(Long id) { + return this.id == id ? this : new LegoSet(id, this.name, this.manual, this.author); + } + + public LegoSet withName(String name) { + return this.name == name ? this : new LegoSet(this.id, name, this.manual, this.author); + } + + public LegoSet withManual(Manual manual) { + return this.manual == manual ? this : new LegoSet(this.id, this.name, manual, this.author); + } + + public LegoSet withAuthor(Author author) { + return this.author == author ? this : new LegoSet(this.id, this.name, this.manual, author); + } + } + + static final class Manual { + + @Id private final Long id; + private final String content; + + public Manual(Long id, String content) { + this.id = id; + this.content = content; + } + + public Long getId() { + return this.id; + } + + public String getContent() { + return this.content; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Manual other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$content = this.getContent(); + final Object other$content = other.getContent(); + return Objects.equals(this$content, other$content); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $content = this.getContent(); + result = result * PRIME + ($content == null ? 43 : $content.hashCode()); + return result; + } + + public String toString() { + return "ImmutableAggregateTemplateHsqlIntegrationTests.Manual(id=" + this.getId() + ", content=" + + this.getContent() + ")"; + } + + public Manual withId(Long id) { + return this.id == id ? this : new Manual(id, this.content); + } + + public Manual withContent(String content) { + return this.content == content ? this : new Manual(this.id, content); + } + } + + static final class Author { + + @Id private final Long id; + private final String name; + + public Author(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Author other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "ImmutableAggregateTemplateHsqlIntegrationTests.Author(id=" + this.getId() + ", name=" + this.getName() + + ")"; + } + + public Author withId(Long id) { + return this.id == id ? this : new Author(id, this.name); + } + + public Author withName(String name) { + return this.name == name ? this : new Author(this.id, name); + } + } + + static class Root { + @Id private Long id; + private String name; + private NonRoot reference; + + public Root(Long id, String name, NonRoot reference) { + this.id = id; + this.name = name; + this.reference = reference; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public NonRoot getReference() { + return this.reference; + } + + public void setId(Long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setReference(NonRoot reference) { + this.reference = reference; + } + } + + static final class NonRoot { + @Id private final Long id; + private final String name; + + public NonRoot(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final NonRoot other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "ImmutableAggregateTemplateHsqlIntegrationTests.NonRoot(id=" + this.getId() + ", name=" + this.getName() + + ")"; + } + + public NonRoot withId(Long id) { + return this.id == id ? this : new NonRoot(id, this.name); + } + + public NonRoot withName(String name) { + return this.name == name ? this : new NonRoot(this.id, name); + } + } + + static class WithCopyConstructor { + @Id private final Long id; + private final String name; + + WithCopyConstructor(Long id, String name) { + this.id = id; + this.name = name; + } + } + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + Class testClass() { + return ImmutableAggregateTemplateHsqlIntegrationTests.class; + } + + @Bean + JdbcAggregateOperations operations(ApplicationEventPublisher publisher, RelationalMappingContext context, + DataAccessStrategy dataAccessStrategy, JdbcConverter converter) { + return new JdbcAggregateTemplate(publisher, context, converter, dataAccessStrategy); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutorContextImmutableUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutorContextImmutableUnitTests.java new file mode 100644 index 0000000000..78d05c03dc --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutorContextImmutableUnitTests.java @@ -0,0 +1,318 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.JdbcIdentifierBuilder; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.conversion.DbAction; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.lang.Nullable; + +/** + * Test for the {@link JdbcAggregateChangeExecutionContext} when operating on immutable classes. + * + * @author Jens Schauder + * @author Chirag Tailor + */ +public class JdbcAggregateChangeExecutorContextImmutableUnitTests { + + RelationalMappingContext context = new RelationalMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + DataAccessStrategy accessStrategy = mock(DataAccessStrategy.class); + + JdbcAggregateChangeExecutionContext executionContext = new JdbcAggregateChangeExecutionContext(converter, + accessStrategy); + + DummyEntity root = new DummyEntity(); + + @Test // DATAJDBC-453 + public void afterInsertRootIdMaybeUpdated() { + + // note that the root entity isn't the original one, but a new instance with the version set. + when(accessStrategy.insert(any(DummyEntity.class), eq(DummyEntity.class), eq(Identifier.empty()), + eq(IdValueSource.GENERATED))).thenReturn(23L); + + executionContext.executeInsertRoot(new DbAction.InsertRoot<>(root, IdValueSource.GENERATED)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).hasSize(1); + DummyEntity newRoot = newRoots.get(0); + assertThat(newRoot.id).isEqualTo(23L); + } + + @Test // DATAJDBC-453 + public void idGenerationOfChild() { + + Content content = new Content(); + + when(accessStrategy.insert(any(DummyEntity.class), eq(DummyEntity.class), eq(Identifier.empty()), + eq(IdValueSource.GENERATED))).thenReturn(23L); + when(accessStrategy.insert(any(Content.class), eq(Content.class), eq(createBackRef(23L)), + eq(IdValueSource.GENERATED))).thenReturn(24L); + + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + executionContext.executeInsertRoot(rootInsert); + executionContext.executeInsert(createInsert(rootInsert, "content", content, null)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).hasSize(1); + DummyEntity newRoot = newRoots.get(0); + assertThat(newRoot.id).isEqualTo(23L); + + assertThat(newRoot.content.id).isEqualTo(24L); + } + + @Test // DATAJDBC-453 + public void idGenerationOfChildInList() { + + Content content = new Content(); + + when(accessStrategy.insert(any(DummyEntity.class), eq(DummyEntity.class), eq(Identifier.empty()), + eq(IdValueSource.GENERATED))).thenReturn(23L); + when(accessStrategy.insert(eq(content), eq(Content.class), any(Identifier.class), eq(IdValueSource.GENERATED))) + .thenReturn(24L); + + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + executionContext.executeInsertRoot(rootInsert); + executionContext.executeInsert(createInsert(rootInsert, "list", content, 1)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).hasSize(1); + DummyEntity newRoot = newRoots.get(0); + assertThat(newRoot.id).isEqualTo(23L); + + assertThat(newRoot.list.get(0).id).isEqualTo(24L); + } + + @Test // GH-537 + void populatesIdsIfNecessaryForAllRootsThatWereProcessed() { + + DummyEntity root1 = new DummyEntity().withId(123L); + when(accessStrategy.update(root1, DummyEntity.class)).thenReturn(true); + DbAction.UpdateRoot rootUpdate1 = new DbAction.UpdateRoot<>(root1, null); + executionContext.executeUpdateRoot(rootUpdate1); + Content content1 = new Content(); + when(accessStrategy.insert(content1, Content.class, createBackRef(123L), IdValueSource.GENERATED)).thenReturn(11L); + executionContext.executeInsert(createInsert(rootUpdate1, "content", content1, null)); + + DummyEntity root2 = new DummyEntity(); + DbAction.InsertRoot rootInsert2 = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + when(accessStrategy.insert(root2, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(456L); + executionContext.executeInsertRoot(rootInsert2); + Content content2 = new Content(); + when(accessStrategy.insert(content2, Content.class, createBackRef(456L), IdValueSource.GENERATED)).thenReturn(12L); + executionContext.executeInsert(createInsert(rootInsert2, "content", content2, null)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).hasSize(2); + DummyEntity newRoot1 = newRoots.get(0); + assertThat(newRoot1.id).isEqualTo(123L); + assertThat(newRoot1.content.id).isEqualTo(11L); + DummyEntity newRoot2 = newRoots.get(1); + assertThat(newRoot2.id).isEqualTo(456L); + assertThat(newRoot2.content.id).isEqualTo(12L); + } + + DbAction.Insert createInsert(DbAction.WithEntity parent, String propertyName, Object value, + @Nullable Object key) { + + return new DbAction.Insert<>(value, getPersistentPropertyPath(propertyName), parent, + key == null ? emptyMap() : singletonMap(toPath(propertyName), key), IdValueSource.GENERATED); + } + + AggregatePath toAggregatePath(String path) { + return context.getAggregatePath(getPersistentPropertyPath(path)); + } + + PersistentPropertyPath getPersistentPropertyPath(String propertyName) { + return context.getPersistentPropertyPath(propertyName, DummyEntity.class); + } + + Identifier createBackRef(long value) { + return JdbcIdentifierBuilder.forBackReferences(converter, toAggregatePath("content"), value).build(); + } + + PersistentPropertyPath toPath(String path) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(DummyEntity.class, p -> true); + + return persistentPropertyPaths.filter(p -> p.toDotPath().equals(path)).stream().findFirst() + .orElseThrow(() -> new IllegalArgumentException("No matching path found")); + } + + private static final class DummyEntity { + + @Id + private final Long id; + @Version + private final long version; + + private final Content content; + + private final List list; + + DummyEntity() { + + id = null; + version = 0; + content = null; + list = null; + } + + public DummyEntity(Long id, long version, Content content, List list) { + this.id = id; + this.version = version; + this.content = content; + this.list = list; + } + + public Long getId() { + return this.id; + } + + public long getVersion() { + return this.version; + } + + public Content getContent() { + return this.content; + } + + public List getList() { + return this.list; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final DummyEntity other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + if (this.getVersion() != other.getVersion()) return false; + final Object this$content = this.getContent(); + final Object other$content = other.getContent(); + if (!Objects.equals(this$content, other$content)) + return false; + final Object this$list = this.getList(); + final Object other$list = other.getList(); + return Objects.equals(this$list, other$list); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final long $version = this.getVersion(); + result = result * PRIME + (int) ($version >>> 32 ^ $version); + final Object $content = this.getContent(); + result = result * PRIME + ($content == null ? 43 : $content.hashCode()); + final Object $list = this.getList(); + result = result * PRIME + ($list == null ? 43 : $list.hashCode()); + return result; + } + + public String toString() { + return "JdbcAggregateChangeExecutorContextImmutableUnitTests.DummyEntity(id=" + this.getId() + ", version=" + this.getVersion() + ", content=" + this.getContent() + ", list=" + this.getList() + ")"; + } + + public DummyEntity withId(Long id) { + return this.id == id ? this : new DummyEntity(id, this.version, this.content, this.list); + } + + public DummyEntity withVersion(long version) { + return this.version == version ? this : new DummyEntity(this.id, version, this.content, this.list); + } + + public DummyEntity withContent(Content content) { + return this.content == content ? this : new DummyEntity(this.id, this.version, content, this.list); + } + + public DummyEntity withList(List list) { + return this.list == list ? this : new DummyEntity(this.id, this.version, this.content, list); + } + } + + private static final class Content { + @Id + private final Long id; + + Content() { + id = null; + } + + public Content(Long id) { + this.id = id; + } + + public Long getId() { + return this.id; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final Content other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + return Objects.equals(this$id, other$id); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + return result; + } + + public String toString() { + return "JdbcAggregateChangeExecutorContextImmutableUnitTests.Content(id=" + this.getId() + ")"; + } + + public Content withId(Long id) { + return this.id == id ? this : new Content(id); + } + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutorContextUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutorContextUnitTests.java new file mode 100644 index 0000000000..eef22d5c94 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateChangeExecutorContextUnitTests.java @@ -0,0 +1,291 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.jdbc.core.convert.JdbcIdentifierBuilder.*; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.data.jdbc.core.convert.InsertSubject; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.conversion.DbAction; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link JdbcAggregateChangeExecutionContext}. + * + * @author Jens Schauder + * @author Umut Erturk + * @author Chirag Tailor + */ +public class JdbcAggregateChangeExecutorContextUnitTests { + + RelationalMappingContext context = new RelationalMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + DataAccessStrategy accessStrategy = mock(DataAccessStrategy.class); + + JdbcAggregateChangeExecutionContext executionContext = new JdbcAggregateChangeExecutionContext(converter, + accessStrategy); + + DummyEntity root = new DummyEntity(); + + @Test // DATAJDBC-453 + public void afterInsertRootIdMaybeUpdated() { + + when(accessStrategy.insert(root, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(23L); + + executionContext.executeInsertRoot(new DbAction.InsertRoot<>(root, IdValueSource.GENERATED)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(23L); + } + + @Test // DATAJDBC-453 + public void idGenerationOfChild() { + + Content content = new Content(); + + when(accessStrategy.insert(root, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(23L); + when(accessStrategy.insert(content, Content.class, createBackRef(23L), IdValueSource.GENERATED)).thenReturn(24L); + + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + executionContext.executeInsertRoot(rootInsert); + executionContext.executeInsert(createInsert(rootInsert, "content", content, null, IdValueSource.GENERATED)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(23L); + + assertThat(content.id).isEqualTo(24L); + } + + @Test // DATAJDBC-453 + public void idGenerationOfChildInList() { + + Content content = new Content(); + + when(accessStrategy.insert(root, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(23L); + when(accessStrategy.insert(eq(content), eq(Content.class), any(Identifier.class), eq(IdValueSource.GENERATED))) + .thenReturn(24L); + + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + executionContext.executeInsertRoot(rootInsert); + executionContext.executeInsert(createInsert(rootInsert, "list", content, 1, IdValueSource.GENERATED)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(23L); + + assertThat(content.id).isEqualTo(24L); + } + + @Test // GH-1159 + void batchInsertOperation_withGeneratedIds() { + + when(accessStrategy.insert(root, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(123L); + + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + executionContext.executeInsertRoot(rootInsert); + + Content content = new Content(); + Identifier identifier = Identifier.empty().withPart(SqlIdentifier.quoted("DUMMY_ENTITY"), 123L, Long.class) + .withPart(SqlIdentifier.quoted("DUMMY_ENTITY_KEY"), 0, Integer.class); + when(accessStrategy.insert(singletonList(InsertSubject.describedBy(content, identifier)), Content.class, + IdValueSource.GENERATED)).thenReturn(new Object[] { 456L }); + DbAction.BatchInsert batchInsert = new DbAction.BatchInsert<>( + singletonList(createInsert(rootInsert, "list", content, 0, IdValueSource.GENERATED))); + executionContext.executeBatchInsert(batchInsert); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(123L); + assertThat(content.id).isEqualTo(456L); + } + + @Test // GH-1159 + void batchInsertOperation_withoutGeneratedIds() { + + when(accessStrategy.insert(root, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(123L); + + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + executionContext.executeInsertRoot(rootInsert); + + Content content = new Content(); + Identifier identifier = Identifier.empty().withPart(SqlIdentifier.quoted("DUMMY_ENTITY"), 123L, Long.class) + .withPart(SqlIdentifier.quoted("DUMMY_ENTITY_KEY"), 0, Integer.class); + when(accessStrategy.insert(singletonList(InsertSubject.describedBy(content, identifier)), Content.class, + IdValueSource.PROVIDED)).thenReturn(new Object[] { null }); + DbAction.BatchInsert batchInsert = new DbAction.BatchInsert<>( + singletonList(createInsert(rootInsert, "list", content, 0, IdValueSource.PROVIDED))); + executionContext.executeBatchInsert(batchInsert); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(123L); + assertThat(content.id).isNull(); + } + + @Test // GH-537 + void batchInsertRootOperation_withGeneratedIds() { + + when(accessStrategy.insert(singletonList(InsertSubject.describedBy(root, Identifier.empty())), DummyEntity.class, + IdValueSource.GENERATED)).thenReturn(new Object[] { 123L }); + executionContext.executeBatchInsertRoot( + new DbAction.BatchInsertRoot<>(singletonList(new DbAction.InsertRoot<>(root, IdValueSource.GENERATED)))); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(123L); + } + + @Test // GH-537 + void batchInsertRootOperation_withoutGeneratedIds() { + + when(accessStrategy.insert(singletonList(InsertSubject.describedBy(root, Identifier.empty())), DummyEntity.class, + IdValueSource.PROVIDED)).thenReturn(new Object[] { null }); + executionContext.executeBatchInsertRoot( + new DbAction.BatchInsertRoot<>(singletonList(new DbAction.InsertRoot<>(root, IdValueSource.PROVIDED)))); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isNull(); + } + + @Test // GH-1201 + void updates_whenReferencesWithImmutableIdAreInserted() { + + root.id = 123L; + when(accessStrategy.update(root, DummyEntity.class)).thenReturn(true); + DbAction.UpdateRoot rootUpdate = new DbAction.UpdateRoot<>(root, null); + executionContext.executeUpdateRoot(rootUpdate); + + ContentImmutableId contentImmutableId = new ContentImmutableId(null); + root.contentImmutableId = contentImmutableId; + Identifier identifier = Identifier.empty().withPart(SqlIdentifier.quoted("DUMMY_ENTITY"), 123L, Long.class); + when(accessStrategy.insert(contentImmutableId, ContentImmutableId.class, identifier, IdValueSource.GENERATED)) + .thenReturn(456L); + executionContext.executeInsert( + createInsert(rootUpdate, "contentImmutableId", contentImmutableId, null, IdValueSource.GENERATED)); + + List newRoots = executionContext.populateIdsIfNecessary(); + assertThat(newRoots).containsExactly(root); + assertThat(root.id).isEqualTo(123L); + assertThat(root.contentImmutableId.id).isEqualTo(456L); + } + + @Test // GH-537 + void populatesIdsIfNecessaryForAllRootsThatWereProcessed() { + + DummyEntity root1 = new DummyEntity(); + root1.id = 123L; + when(accessStrategy.update(root1, DummyEntity.class)).thenReturn(true); + DbAction.UpdateRoot rootUpdate1 = new DbAction.UpdateRoot<>(root1, null); + executionContext.executeUpdateRoot(rootUpdate1); + Content content1 = new Content(); + when(accessStrategy.insert(content1, Content.class, createBackRef(123L), IdValueSource.GENERATED)).thenReturn(11L); + executionContext.executeInsert(createInsert(rootUpdate1, "content", content1, null, IdValueSource.GENERATED)); + + DummyEntity root2 = new DummyEntity(); + DbAction.InsertRoot rootInsert2 = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + when(accessStrategy.insert(root2, DummyEntity.class, Identifier.empty(), IdValueSource.GENERATED)).thenReturn(456L); + executionContext.executeInsertRoot(rootInsert2); + Content content2 = new Content(); + when(accessStrategy.insert(content2, Content.class, createBackRef(456L), IdValueSource.GENERATED)).thenReturn(12L); + executionContext.executeInsert(createInsert(rootInsert2, "content", content2, null, IdValueSource.GENERATED)); + + List newRoots = executionContext.populateIdsIfNecessary(); + + assertThat(newRoots).containsExactly(root1, root2); + assertThat(root1.id).isEqualTo(123L); + assertThat(content1.id).isEqualTo(11L); + assertThat(root2.id).isEqualTo(456L); + assertThat(content2.id).isEqualTo(12L); + } + + DbAction.Insert createInsert(DbAction.WithEntity parent, String propertyName, Object value, + @Nullable Object key, IdValueSource idValueSource) { + + return new DbAction.Insert<>(value, getPersistentPropertyPath(propertyName), parent, + key == null ? emptyMap() : singletonMap(toPath(propertyName), key), idValueSource); + } + + AggregatePath toAggregatePath(String path) { + return context.getAggregatePath(getPersistentPropertyPath(path)); + } + + PersistentPropertyPath getPersistentPropertyPath(String propertyName) { + return context.getPersistentPropertyPath(propertyName, DummyEntity.class); + } + + Identifier createBackRef(long value) { + return forBackReferences(converter, toAggregatePath("content"), value).build(); + } + + PersistentPropertyPath toPath(String path) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(DummyEntity.class, p -> true); + + return persistentPropertyPaths.filter(p -> p.toDotPath().equals(path)).stream().findFirst() + .orElseThrow(() -> new IllegalArgumentException("No matching path found")); + } + + @SuppressWarnings("unused") + private static class DummyEntity { + + @Id Long id; + + Content content; + + ContentImmutableId contentImmutableId; + + List list = new ArrayList<>(); + } + + private static class Content { + @Id Long id; + } + + record ContentImmutableId(@Id Long id) { + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateTemplateSchemaIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateTemplateSchemaIntegrationTests.java new file mode 100644 index 0000000000..b104935530 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateTemplateSchemaIntegrationTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature.*; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; + +/** + * Integration tests for {@link JdbcAggregateTemplate} using an entity mapped with an explicit schema. + * + * @author Jens Schauder + */ +@IntegrationTest +public class JdbcAggregateTemplateSchemaIntegrationTests { + + @Autowired JdbcAggregateOperations template; + @Autowired NamedParameterJdbcOperations jdbcTemplate; + + @Test + public void insertFindUpdateDelete() { + + DummyEntity entity = new DummyEntity(); + entity.name = "Alfred"; + entity.reference = new Referenced(); + entity.reference.name = "Peter"; + + template.save(entity); + + DummyEntity reloaded = template.findById(entity.id, DummyEntity.class); + + assertThat(reloaded).isNotNull(); + + reloaded.name += " E. Neumann"; + + template.save(reloaded); + + template.deleteById(reloaded.id, DummyEntity.class); + } + + static class DummyEntity { + + @Id Long id; + String name; + Referenced reference; + } + + static class Referenced { + String name; + } + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + Class testClass() { + return JdbcAggregateTemplateSchemaIntegrationTests.class; + } + + @Bean + JdbcAggregateOperations operations(ApplicationEventPublisher publisher, RelationalMappingContext context, + DataAccessStrategy dataAccessStrategy, JdbcConverter converter) { + return new JdbcAggregateTemplate(publisher, context, converter, dataAccessStrategy); + } + + @Bean + NamingStrategy namingStrategy() { + return new NamingStrategy() { + @Override + public String getSchema() { + return "other"; + } + }; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateTemplateUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateTemplateUnitTests.java new file mode 100644 index 0000000000..f5af0c6ba9 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/JdbcAggregateTemplateUnitTests.java @@ -0,0 +1,540 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.jdbc.core.convert.RelationResolver; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.event.AfterConvertCallback; +import org.springframework.data.relational.core.mapping.event.AfterDeleteCallback; +import org.springframework.data.relational.core.mapping.event.AfterSaveCallback; +import org.springframework.data.relational.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.relational.core.mapping.event.BeforeDeleteCallback; +import org.springframework.data.relational.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.query.Query; + +import java.util.List; +import java.util.Optional; + +/** + * Unit tests for {@link JdbcAggregateTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Milan Milanov + * @author Chirag Tailor + */ +@ExtendWith(MockitoExtension.class) +public class JdbcAggregateTemplateUnitTests { + + JdbcAggregateTemplate template; + + @Mock DataAccessStrategy dataAccessStrategy; + @Mock ApplicationEventPublisher eventPublisher; + @Mock RelationResolver relationResolver; + @Mock EntityCallbacks callbacks; + + @BeforeEach + void setUp() { + + RelationalMappingContext mappingContext = new RelationalMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(mappingContext, relationResolver); + + template = new JdbcAggregateTemplate(eventPublisher, mappingContext, converter, dataAccessStrategy); + template.setEntityCallbacks(callbacks); + + } + + @Test // DATAJDBC-378 + void findAllByIdMustNotAcceptNullArgumentForType() { + assertThatThrownBy(() -> template.findAllById(singleton(23L), null)).isInstanceOf(IllegalArgumentException.class); + } + + @Test // DATAJDBC-378 + void findAllByIdMustNotAcceptNullArgumentForIds() { + + assertThatThrownBy(() -> template.findAllById(null, SampleEntity.class)) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test // DATAJDBC-378 + void findAllByIdWithEmptyListMustReturnEmptyResult() { + assertThat(template.findAllById(emptyList(), SampleEntity.class)).isEmpty(); + } + + @Test // DATAJDBC-393, GH-1291 + void callbackOnSave() { + + SampleEntity first = new SampleEntity(null, "Alfred"); + SampleEntity second = new SampleEntity(23L, "Alfred E."); + SampleEntity third = new SampleEntity(23L, "Neumann"); + + when(callbacks.callback(any(Class.class), any(), any(Object[].class))).thenReturn(second, third); + + SampleEntity last = template.save(first); + + verify(callbacks).callback(BeforeConvertCallback.class, first); + verify(callbacks).callback(eq(BeforeSaveCallback.class), eq(second), any(MutableAggregateChange.class)); + verify(callbacks).callback(AfterSaveCallback.class, third); + assertThat(last).isEqualTo(third); + verify(eventPublisher, times(3)).publishEvent(any(Object.class)); + } + + @Test // GH-1291 + void doesNotEmitEvents() { + + SampleEntity first = new SampleEntity(null, "Alfred"); + SampleEntity second = new SampleEntity(23L, "Alfred E."); + SampleEntity third = new SampleEntity(23L, "Neumann"); + + when(callbacks.callback(any(Class.class), any(), any(Object[].class))).thenReturn(second, third); + + template.setEntityLifecycleEventsEnabled(false); + template.save(first); + + verifyNoInteractions(eventPublisher); + } + + @Test // GH-1137 + void savePreparesInstanceWithInitialVersion_onInsert() { + + EntityWithVersion entity = new EntityWithVersion(1L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateRootCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), aggregateRootCaptor.capture(), any()); + + EntityWithVersion afterConvert = (EntityWithVersion) aggregateRootCaptor.getValue(); + assertThat(afterConvert.getVersion()).isEqualTo(0L); + } + + @Test // GH-1137 + void savePreparesInstanceWithInitialVersion_onInsert_whenVersionPropertyIsImmutable() { + + EntityWithImmutableVersion entity = new EntityWithImmutableVersion(1L, null); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateRootCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), aggregateRootCaptor.capture(), any()); + + EntityWithImmutableVersion afterConvert = (EntityWithImmutableVersion) aggregateRootCaptor.getValue(); + assertThat(afterConvert.getVersion()).isEqualTo(0L); + } + + @Test // GH-1137 + void savePreparesInstanceWithInitialVersion_onInsert_whenVersionPropertyIsPrimitiveType() { + + EntityWithPrimitiveVersion entity = new EntityWithPrimitiveVersion(1L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateRootCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), aggregateRootCaptor.capture(), any()); + + EntityWithPrimitiveVersion afterConvert = (EntityWithPrimitiveVersion) aggregateRootCaptor.getValue(); + assertThat(afterConvert.getVersion()).isEqualTo(1L); + } + + @Test // GH-1137 + void savePreparesInstanceWithInitialVersion_onInsert__whenVersionPropertyIsImmutableAndPrimitiveType() { + + EntityWithImmutablePrimitiveVersion entity = new EntityWithImmutablePrimitiveVersion(1L, 0L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateRootCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), aggregateRootCaptor.capture(), any()); + + EntityWithImmutablePrimitiveVersion afterConvert = (EntityWithImmutablePrimitiveVersion) aggregateRootCaptor + .getValue(); + assertThat(afterConvert.getVersion()).isEqualTo(1L); + } + + @Test // GH-1137 + void savePreparesChangeWithPreviousVersion_onUpdate() { + + when(dataAccessStrategy.updateWithVersion(any(), any(), any())).thenReturn(true); + EntityWithVersion entity = new EntityWithVersion(1L); + entity.setVersion(1L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateChangeCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), any(), aggregateChangeCaptor.capture()); + + MutableAggregateChange aggregateChange = (MutableAggregateChange) aggregateChangeCaptor.getValue(); + assertThat(aggregateChange.getPreviousVersion()).isEqualTo(1L); + } + + @Test // GH-1137 + void savePreparesInstanceWithNextVersion_onUpdate() { + + when(dataAccessStrategy.updateWithVersion(any(), any(), any())).thenReturn(true); + EntityWithVersion entity = new EntityWithVersion(1L); + entity.setVersion(1L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateRootCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), aggregateRootCaptor.capture(), any()); + + EntityWithVersion afterConvert = (EntityWithVersion) aggregateRootCaptor.getValue(); + assertThat(afterConvert.getVersion()).isEqualTo(2L); + } + + @Test // GH-1137 + void savePreparesInstanceWithNextVersion_onUpdate_whenVersionPropertyIsImmutable() { + + when(dataAccessStrategy.updateWithVersion(any(), any(), any())).thenReturn(true); + EntityWithImmutableVersion entity = new EntityWithImmutableVersion(1L, 1L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.save(entity); + + ArgumentCaptor aggregateRootCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeSaveCallback.class), aggregateRootCaptor.capture(), any()); + EntityWithImmutableVersion afterConvert = (EntityWithImmutableVersion) aggregateRootCaptor.getValue(); + assertThat(afterConvert.getVersion()).isEqualTo(2L); + } + + @Test // GH-1137 + void deletePreparesChangeWithPreviousVersion_onDeleteByInstance() { + + EntityWithImmutableVersion entity = new EntityWithImmutableVersion(1L, 1L); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(entity, entity); + + template.delete(entity); + + ArgumentCaptor aggregateChangeCaptor = ArgumentCaptor.forClass(Object.class); + verify(callbacks).callback(eq(BeforeDeleteCallback.class), any(), aggregateChangeCaptor.capture()); + + MutableAggregateChange aggregateChange = (MutableAggregateChange) aggregateChangeCaptor.getValue(); + assertThat(aggregateChange.getPreviousVersion()).isEqualTo(1L); + } + + @Test // DATAJDBC-393 + void callbackOnDelete() { + + SampleEntity first = new SampleEntity(23L, "Alfred"); + SampleEntity second = new SampleEntity(23L, "Alfred E."); + + when(callbacks.callback(any(Class.class), any(), any())).thenReturn(second); + + template.delete(first); + + verify(callbacks).callback(eq(BeforeDeleteCallback.class), eq(first), any(MutableAggregateChange.class)); + verify(callbacks).callback(AfterDeleteCallback.class, second); + } + + @Test // DATAJDBC-101 + void callbackOnLoadSorted() { + + SampleEntity alfred1 = new SampleEntity(23L, "Alfred"); + SampleEntity alfred2 = new SampleEntity(23L, "Alfred E."); + + SampleEntity neumann1 = new SampleEntity(42L, "Neumann"); + SampleEntity neumann2 = new SampleEntity(42L, "Alfred E. Neumann"); + + when(dataAccessStrategy.findAll(SampleEntity.class, Sort.by("name"))).thenReturn(asList(alfred1, neumann1)); + + when(callbacks.callback(any(Class.class), eq(alfred1), any(Object[].class))).thenReturn(alfred2); + when(callbacks.callback(any(Class.class), eq(neumann1), any(Object[].class))).thenReturn(neumann2); + + Iterable all = template.findAll(SampleEntity.class, Sort.by("name")); + + verify(callbacks).callback(AfterConvertCallback.class, alfred1); + verify(callbacks).callback(AfterConvertCallback.class, neumann1); + + assertThat(all).containsExactly(alfred2, neumann2); + } + + @Test // DATAJDBC-101 + void callbackOnLoadPaged() { + + SampleEntity alfred1 = new SampleEntity(23L, "Alfred"); + SampleEntity alfred2 = new SampleEntity(23L, "Alfred E."); + + SampleEntity neumann1 = new SampleEntity(42L, "Neumann"); + SampleEntity neumann2 = new SampleEntity(42L, "Alfred E. Neumann"); + + PageRequest pageRequest = PageRequest.of(0, 20); + when(dataAccessStrategy.findAll(SampleEntity.class, pageRequest)).thenReturn(asList(alfred1, neumann1)); + + when(callbacks.callback(any(Class.class), eq(alfred1), any(Object[].class))).thenReturn(alfred2); + when(callbacks.callback(any(Class.class), eq(neumann1), any(Object[].class))).thenReturn(neumann2); + + Iterable all = template.findAll(SampleEntity.class, pageRequest); + + verify(callbacks).callback(AfterConvertCallback.class, alfred1); + verify(callbacks).callback(AfterConvertCallback.class, neumann1); + + assertThat(all).containsExactly(alfred2, neumann2); + } + + @Test // GH-1979 + void callbackOnFindAllByQuery() { + + SampleEntity alfred1 = new SampleEntity(23L, "Alfred"); + SampleEntity alfred2 = new SampleEntity(23L, "Alfred E."); + + SampleEntity neumann1 = new SampleEntity(42L, "Neumann"); + SampleEntity neumann2 = new SampleEntity(42L, "Alfred E. Neumann"); + + Query query = Query.query(Criteria.where("not relevant").is("for test")); + + when(dataAccessStrategy.findAll(query, SampleEntity.class)).thenReturn(asList(alfred1, neumann1)); + + when(callbacks.callback(any(Class.class), eq(alfred1), any(Object[].class))).thenReturn(alfred2); + when(callbacks.callback(any(Class.class), eq(neumann1), any(Object[].class))).thenReturn(neumann2); + + Iterable all = template.findAll(query, SampleEntity.class); + + verify(callbacks).callback(AfterConvertCallback.class, alfred1); + verify(callbacks).callback(AfterConvertCallback.class, neumann1); + + assertThat(all).containsExactly(alfred2, neumann2); + } + + @Test // GH-1979 + void callbackOnFindOneByQuery() { + + SampleEntity alfred1 = new SampleEntity(23L, "Alfred"); + SampleEntity alfred2 = new SampleEntity(23L, "Alfred E."); + + Query query = Query.query(Criteria.where("not relevant").is("for test")); + + when(dataAccessStrategy.findOne(query, SampleEntity.class)).thenReturn(Optional.of(alfred1)); + + when(callbacks.callback(any(Class.class), eq(alfred1), any(Object[].class))).thenReturn(alfred2); + + Optional all = template.findOne(query, SampleEntity.class); + + verify(callbacks).callback(AfterConvertCallback.class, alfred1); + + assertThat(all).contains(alfred2); + } + + @Test // GH-1401 + void saveAllWithEmptyListDoesNothing() { + assertThat(template.saveAll(emptyList())).isEmpty(); + } + + @Test // GH-1401 + void insertAllWithEmptyListDoesNothing() { + assertThat(template.insertAll(emptyList())).isEmpty(); + } + + @Test // GH-1401 + void updateAllWithEmptyListDoesNothing() { + assertThat(template.updateAll(emptyList())).isEmpty(); + } + + @Test // GH-1401 + void deleteAllWithEmptyListDoesNothing() { + template.deleteAll(emptyList()); + } + + @Test // GH-1401 + void deleteAllByIdWithEmptyListDoesNothing() { + template.deleteAllById(emptyList(), SampleEntity.class); + } + + @Test // GH-1502 + void saveThrowsExceptionWhenIdIsNotSet() { + + SampleEntity alfred = new SampleEntity(null, "Alfred"); + when(callbacks.callback(any(), any(), any(Object[].class))).thenReturn(alfred); + + when(dataAccessStrategy.insert(eq(alfred), any(Class.class), any(Identifier.class), any(IdValueSource.class))) + .thenReturn(null); + + assertThatIllegalArgumentException().isThrownBy(() -> template.save(alfred)) + .withMessage("After saving the identifier must not be null"); + } + + @Test // GH-1502 + void saveThrowsExceptionWhenIdDoesNotExist() { + + NoIdEntity alfred = new NoIdEntity("Alfred"); + + assertThatIllegalStateException().isThrownBy(() -> template.save(alfred)) + .withMessage("Required identifier property not found for class %s".formatted(NoIdEntity.class.getName())); + } + + @Test // GH-1502 + void saveThrowsExceptionWhenIdDoesNotExistOnSaveAll() { + + NoIdEntity alfred = new NoIdEntity("Alfred"); + NoIdEntity berta = new NoIdEntity("Berta"); + + assertThatIllegalStateException().isThrownBy(() -> template.saveAll( List.of(alfred, berta))) + .withMessage("Required identifier property not found for class %s".formatted(NoIdEntity.class.getName())); + } + + private static class SampleEntity { + + @Column("id1") + @Id private Long id; + + private String name; + + public SampleEntity(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + void setId(Long id) { + this.id = id; + } + + void setName(String name) { + this.name = name; + } + } + + private static class EntityWithVersion { + + @Column("id1") + @Id private final Long id; + + @Version private Long version; + + public EntityWithVersion(Long id) { + this.id = id; + } + + public Long getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + + void setVersion(Long version) { + this.version = version; + } + } + + private static class EntityWithImmutableVersion { + + @Column("id1") + @Id private final Long id; + + @Version private final Long version; + + public EntityWithImmutableVersion(Long id, Long version) { + this.id = id; + this.version = version; + } + + public Long getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + } + + private static class EntityWithPrimitiveVersion { + + @Column("id1") + @Id private final Long id; + + @Version private long version; + + public EntityWithPrimitiveVersion(Long id) { + this.id = id; + } + + public Long getId() { + return this.id; + } + + public long getVersion() { + return this.version; + } + + void setVersion(long version) { + this.version = version; + } + } + + private static class EntityWithImmutablePrimitiveVersion { + + @Column("id1") + @Id private final Long id; + + @Version private final long version; + + public EntityWithImmutablePrimitiveVersion(Long id, long version) { + this.id = id; + this.version = version; + } + + public Long getId() { + return this.id; + } + + public long getVersion() { + return this.version; + } + } + + record NoIdEntity(String name) { + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/PersistentPropertyPathTestUtils.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/PersistentPropertyPathTestUtils.java new file mode 100644 index 0000000000..05da8535ef --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/PersistentPropertyPathTestUtils.java @@ -0,0 +1,47 @@ +package org.springframework.data.jdbc.core; + +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Utility class for easy creation of {@link PersistentPropertyPath} instances for tests. + * + * @author Jens Schauder + */ +public final class PersistentPropertyPathTestUtils { + + private PersistentPropertyPathTestUtils() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + public static PersistentPropertyPath getPath(String path, Class source, + RelationalMappingContext context) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(source, p -> true); + + return persistentPropertyPaths + .filter(p -> p.toDotPath().equals(path)) + .stream() + .findFirst() + .orElse(null); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/AggregateReferenceConvertersUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/AggregateReferenceConvertersUnitTests.java new file mode 100644 index 0000000000..eab84c6a76 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/AggregateReferenceConvertersUnitTests.java @@ -0,0 +1,99 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.core.ResolvableType; +import org.springframework.core.convert.TypeDescriptor; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.data.jdbc.core.mapping.AggregateReference; + +/** + * Tests for converters from an to {@link org.springframework.data.jdbc.core.mapping.AggregateReference}. + * + * @author Jens Schauder + * @author Mark Paluch + */ +class AggregateReferenceConvertersUnitTests { + + ConfigurableConversionService conversionService; + + @BeforeEach + void setUp() { + conversionService = new DefaultConversionService(); + AggregateReferenceConverters.getConvertersToRegister(DefaultConversionService.getSharedInstance()) + .forEach(it -> conversionService.addConverter(it)); + } + + @Test // GH-992 + void convertsFromSimpleValue() { + + ResolvableType aggregateReferenceWithIdTypeInteger = ResolvableType.forClassWithGenerics(AggregateReference.class, + String.class, Integer.class); + Object converted = conversionService.convert(23, TypeDescriptor.forObject(23), + new TypeDescriptor(aggregateReferenceWithIdTypeInteger, null, null)); + + assertThat(converted).isEqualTo(AggregateReference.to(23)); + } + + @Test // GH-992 + void convertsFromSimpleValueThatNeedsSeparateConversion() { + + ResolvableType aggregateReferenceWithIdTypeInteger = ResolvableType.forClassWithGenerics(AggregateReference.class, + String.class, Long.class); + Object converted = conversionService.convert(23, TypeDescriptor.forObject(23), + new TypeDescriptor(aggregateReferenceWithIdTypeInteger, null, null)); + + assertThat(converted).isEqualTo(AggregateReference.to(23L)); + } + + @Test // GH-992 + void convertsFromSimpleValueWithMissingTypeInformation() { + + Object converted = conversionService.convert(23, TypeDescriptor.forObject(23), + TypeDescriptor.valueOf(AggregateReference.class)); + + assertThat(converted).isEqualTo(AggregateReference.to(23)); + } + + @Test // GH-992 + void convertsToSimpleValue() { + + AggregateReference source = AggregateReference.to(23); + + Object converted = conversionService.convert(source, TypeDescriptor.forObject(source), + TypeDescriptor.valueOf(Integer.class)); + + assertThat(converted).isEqualTo(23); + } + + @Test // GH-992 + void convertsToSimpleValueThatNeedsSeparateConversion() { + + AggregateReference source = AggregateReference.to(23); + + Object converted = conversionService.convert(source, TypeDescriptor.forObject(source), + TypeDescriptor.valueOf(Long.class)); + + assertThat(converted).isEqualTo(23L); + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ArrayUtilsUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ArrayUtilsUnitTests.java new file mode 100644 index 0000000000..683e2a169a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ArrayUtilsUnitTests.java @@ -0,0 +1,55 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.data.Offset.offset; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ArrayUtils}. + * + * @author Mark Paluch + */ +class ArrayUtilsUnitTests { + + @Test + void testCreatePrimitiveArray() { + + assertThat(ArrayUtils.toPrimitive(new Boolean[] { true })).isEqualTo(new boolean[] { true }); + assertThat(ArrayUtils.toPrimitive(new Byte[] { 1 })).isEqualTo(new byte[] { 1 }); + assertThat(ArrayUtils.toPrimitive(new Character[] { 'a' })).isEqualTo(new char[] { 'a' }); + assertThat(ArrayUtils.toPrimitive(new Double[] { 2.718 })).contains(new double[] { 2.718 }, offset(0.1)); + assertThat(ArrayUtils.toPrimitive(new Float[] { 3.14f })).contains(new float[] { 3.14f }, offset(0.1f)); + assertThat(ArrayUtils.toPrimitive(new Integer[] {})).isEqualTo(new int[] {}); + assertThat(ArrayUtils.toPrimitive(new Long[] { 2L, 3L })).isEqualTo(new long[] { 2, 3 }); + assertThat(ArrayUtils.toPrimitive(new Short[] { 2 })).isEqualTo(new short[] { 2 }); + } + + @Test + void testCreatePrimitiveArrayViaObjectArray() { + + assertThat(ArrayUtils.toPrimitive(new Boolean[] { true })).isEqualTo(new boolean[] { true }); + assertThat(ArrayUtils.toPrimitive(new Byte[] { 1 })).isEqualTo(new byte[] { 1 }); + assertThat(ArrayUtils.toPrimitive(new Character[] { 'a' })).isEqualTo(new char[] { 'a' }); + assertThat(ArrayUtils.toPrimitive(new Double[] { 2.718 })).contains(new double[] { 2.718 }, offset(0.1)); + assertThat(ArrayUtils.toPrimitive(new Float[] { 3.14f })).contains(new float[] { 3.14f }, offset(0.1f)); + assertThat(ArrayUtils.toPrimitive(new Integer[] {})).isEqualTo(new int[] {}); + assertThat(ArrayUtils.toPrimitive(new Long[] { 2L, 3L })).isEqualTo(new long[] { 2, 3 }); + assertThat(ArrayUtils.toPrimitive(new Short[] { 2 })).isEqualTo(new short[] { 2 }); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/BasicRelationalConverterAggregateReferenceUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/BasicRelationalConverterAggregateReferenceUnitTests.java new file mode 100644 index 0000000000..e2b25f5087 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/BasicRelationalConverterAggregateReferenceUnitTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.util.TypeInformation; + +/** + * Unit tests for the handling of {@link AggregateReference}s in the {@link MappingJdbcConverter}. + * + * @author Jens Schauder + */ +public class BasicRelationalConverterAggregateReferenceUnitTests { + + JdbcMappingContext context = new JdbcMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, mock(RelationResolver.class)); + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + @Test // DATAJDBC-221 + public void convertsToAggregateReference() { + + final RelationalPersistentProperty property = entity.getRequiredPersistentProperty("reference"); + + Object readValue = converter.readValue(23, property.getTypeInformation()); + + Assertions.assertThat(readValue).isInstanceOf(AggregateReference.class); + assertThat(((AggregateReference) readValue).getId()).isEqualTo(23L); + } + + @Test // DATAJDBC-221 + public void convertsFromAggregateReference() { + + final RelationalPersistentProperty property = entity.getRequiredPersistentProperty("reference"); + + AggregateReference reference = AggregateReference.to(23); + + Object writeValue = converter.writeValue(reference, TypeInformation.of(converter.getColumnType(property))); + + Assertions.assertThat(writeValue).isEqualTo(23L); + } + + private static class DummyEntity { + + @Id Long simple; + AggregateReference reference; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/BindParameterNameSanitizerUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/BindParameterNameSanitizerUnitTests.java new file mode 100644 index 0000000000..16d8000b62 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/BindParameterNameSanitizerUnitTests.java @@ -0,0 +1,37 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link BindParameterNameSanitizer}. + * + * @author Mark Paluch + */ +class BindParameterNameSanitizerUnitTests { + + @Test + void shouldSanitizeNames() { + + assertThat(BindParameterNameSanitizer.sanitize("___oldOptimisticLockingVersion")) + .isEqualTo("___oldOptimisticLockingVersion"); + assertThat(BindParameterNameSanitizer.sanitize("fooBar")).isEqualTo("fooBar"); + assertThat(BindParameterNameSanitizer.sanitize("one.two.three")).isEqualTo("onetwothree"); + } +} diff --git a/src/test/java/org/springframework/data/jdbc/core/CascadingDataAccessStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/CascadingDataAccessStrategyUnitTests.java similarity index 74% rename from src/test/java/org/springframework/data/jdbc/core/CascadingDataAccessStrategyUnitTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/CascadingDataAccessStrategyUnitTests.java index 2313da7ae7..5c9dd20c5f 100644 --- a/src/test/java/org/springframework/data/jdbc/core/CascadingDataAccessStrategyUnitTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/CascadingDataAccessStrategyUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.core; +package org.springframework.data.jdbc.core.convert; import static java.util.Arrays.*; import static org.assertj.core.api.Assertions.*; @@ -23,9 +23,10 @@ import java.util.Collections; -import org.junit.Test; -import org.springframework.data.jdbc.core.FunctionCollector.CombinedDataAccessException; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.convert.FunctionCollector.CombinedDataAccessException; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.sql.SqlIdentifier; /** * Unit tests for {@link CascadingDataAccessStrategy}. @@ -35,11 +36,11 @@ public class CascadingDataAccessStrategyUnitTests { int errorIndex = 1; - String[] errorMessages = {"Sorry I don't support this method. Please try again later", "Still no luck"}; + String[] errorMessages = { "Sorry I don't support this method; Please try again later", "Still no luck" }; DataAccessStrategy alwaysFails = mock(DataAccessStrategy.class, i -> { - errorIndex ++; - errorIndex %=2; + errorIndex++; + errorIndex %= 2; throw new UnsupportedOperationException(errorMessages[errorIndex]); }); DataAccessStrategy succeeds = mock(DataAccessStrategy.class); @@ -47,7 +48,6 @@ public class CascadingDataAccessStrategyUnitTests { throw new AssertionFailedError("this shouldn't have get called"); }); - @Test // DATAJDBC-123 public void findByReturnsFirstSuccess() { @@ -75,10 +75,12 @@ public void findByFailsIfAllStrategiesFail() { @Test // DATAJDBC-123 public void findByPropertyReturnsFirstSuccess() { - doReturn(Collections.singletonList("success")).when(succeeds).findAllByProperty(eq(23L), any(JdbcPersistentProperty.class)); + Identifier identifier = Identifier.of(SqlIdentifier.quoted("id-name"), 23L, Long.class); + doReturn(Collections.singletonList("success")).when(succeeds).findAllByPath(eq(identifier), + any(PersistentPropertyPath.class)); CascadingDataAccessStrategy access = new CascadingDataAccessStrategy(asList(alwaysFails, succeeds, mayNotCall)); - Iterable findAll = access.findAllByProperty(23L, mock(JdbcPersistentProperty.class)); + Iterable findAll = access.findAllByPath(identifier, mock(PersistentPropertyPath.class)); assertThat(findAll).containsExactly("success"); } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/DefaultDataAccessStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/DefaultDataAccessStrategyUnitTests.java new file mode 100644 index 0000000000..0f06834d05 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/DefaultDataAccessStrategyUnitTests.java @@ -0,0 +1,131 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.util.Collections.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.dialect.JdbcHsqlDbDialect; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; + +/** + * Unit tests for {@link DefaultDataAccessStrategy}. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Myeonghyeon Lee + * @author Myat Min + * @author Radim Tlusty + * @author Chirag Tailor + */ +class DefaultDataAccessStrategyUnitTests { + + static final long ORIGINAL_ID = 4711L; + + private final NamedParameterJdbcOperations namedJdbcOperations = mock(NamedParameterJdbcOperations.class); + private final JdbcOperations jdbcOperations = mock(JdbcOperations.class); + private final RelationalMappingContext context = new JdbcMappingContext(); + private final SqlParametersFactory sqlParametersFactory = mock(SqlParametersFactory.class); + private final InsertStrategyFactory insertStrategyFactory = mock(InsertStrategyFactory.class); + + private JdbcConverter converter; + private DataAccessStrategy accessStrategy; + + @BeforeEach + void before() { + + DelegatingDataAccessStrategy relationResolver = new DelegatingDataAccessStrategy(); + Dialect dialect = JdbcHsqlDbDialect.INSTANCE; + converter = new MappingJdbcConverter(context, relationResolver, new JdbcCustomConversions(), + new DefaultJdbcTypeFactory(jdbcOperations)); + accessStrategy = new DataAccessStrategyFactory( // + new SqlGeneratorSource(context, converter, dialect), // + converter, // + namedJdbcOperations, // + sqlParametersFactory, // + insertStrategyFactory).create(); + + relationResolver.setDelegate(accessStrategy); + + when(sqlParametersFactory.forInsert(any(), any(), any(), any())).thenReturn(new SqlIdentifierParameterSource()); + when(insertStrategyFactory.insertStrategy(any(), any())).thenReturn(mock(InsertStrategy.class)); + when(insertStrategyFactory.batchInsertStrategy(any(), any())).thenReturn(mock(BatchInsertStrategy.class)); + } + + @Test // GH-1159 + void insert() { + + accessStrategy.insert(new DummyEntity(ORIGINAL_ID), DummyEntity.class, Identifier.empty(), IdValueSource.PROVIDED); + + verify(insertStrategyFactory).insertStrategy(IdValueSource.PROVIDED, SqlIdentifier.quoted("ID")); + } + + @Test // GH-1159 + void batchInsert() { + + accessStrategy.insert(singletonList(InsertSubject.describedBy(new DummyEntity(ORIGINAL_ID), Identifier.empty())), + DummyEntity.class, IdValueSource.PROVIDED); + + verify(insertStrategyFactory).batchInsertStrategy(IdValueSource.PROVIDED, SqlIdentifier.quoted("ID")); + } + + @Test // GH-1159 + void insertForEntityWithNoId() { + + accessStrategy.insert(new DummyEntityWithoutIdAnnotation(ORIGINAL_ID), DummyEntityWithoutIdAnnotation.class, + Identifier.empty(), IdValueSource.GENERATED); + + verify(insertStrategyFactory).insertStrategy(IdValueSource.GENERATED, null); + } + + @Test // GH-1159 + void batchInsertForEntityWithNoId() { + + accessStrategy.insert( + singletonList(InsertSubject.describedBy(new DummyEntityWithoutIdAnnotation(ORIGINAL_ID), Identifier.empty())), + DummyEntityWithoutIdAnnotation.class, IdValueSource.GENERATED); + + verify(insertStrategyFactory).batchInsertStrategy(IdValueSource.GENERATED, null); + } + + private static class DummyEntity { + + @Id private final Long id; + + public DummyEntity(Long id) { + this.id = id; + } + } + + private static class DummyEntityWithoutIdAnnotation { + + private final Long id; + + public DummyEntityWithoutIdAnnotation(Long id) { + this.id = id; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/DefaultJdbcTypeFactoryTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/DefaultJdbcTypeFactoryTest.java new file mode 100644 index 0000000000..ddcb65bc4d --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/DefaultJdbcTypeFactoryTest.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.sql.Array; +import java.sql.SQLException; +import java.util.UUID; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.postgresql.core.BaseConnection; +import org.springframework.data.jdbc.core.dialect.JdbcPostgresDialect; +import org.springframework.jdbc.core.ConnectionCallback; +import org.springframework.jdbc.core.JdbcOperations; + +/** + * Unit tests for {@link DefaultJdbcTypeFactory}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +class DefaultJdbcTypeFactoryTest { + + @Mock JdbcOperations operations; + @Mock BaseConnection connection; + + @Test // GH-1567 + void shouldProvidePostgresArrayType() throws SQLException { + + DefaultJdbcTypeFactory sut = new DefaultJdbcTypeFactory(operations, JdbcPostgresDialect.INSTANCE.getArraySupport()); + + when(operations.execute(any(ConnectionCallback.class))).thenAnswer(invocation -> { + + ConnectionCallback callback = invocation.getArgument(0, ConnectionCallback.class); + return callback.doInConnection(connection); + }); + + UUID uuids[] = new UUID[] { UUID.randomUUID(), UUID.randomUUID() }; + when(connection.createArrayOf("uuid", uuids)).thenReturn(mock(Array.class)); + Array array = sut.createArray(uuids); + + assertThat(array).isNotNull(); + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/EntityRowMapperUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/EntityRowMapperUnitTests.java new file mode 100644 index 0000000000..507fcd3dc0 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/EntityRowMapperUnitTests.java @@ -0,0 +1,1426 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import javax.naming.OperationNotSupportedException; + +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.annotation.Transient; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.repository.query.Param; +import org.springframework.util.Assert; +import org.springframework.util.LinkedCaseInsensitiveMap; + +/** + * Tests the extraction of entities from a {@link ResultSet} by the {@link EntityRowMapper}. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Maciej Walkowiak + * @author Bastian Wilhelm + * @author Christoph Strobl + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +public class EntityRowMapperUnitTests { + + static final long ID_FOR_ENTITY_REFERENCING_MAP = 42L; + static final long ID_FOR_ENTITY_REFERENCING_LIST = 4711L; + static final long ID_FOR_ENTITY_NOT_REFERENCING_MAP = 23L; + static final NamingStrategy X_APPENDING_NAMINGSTRATEGY=new NamingStrategy(){@Override public String getColumnName(RelationalPersistentProperty property){return NamingStrategy.super.getColumnName(property).concat("x");}}; + + @Test // DATAJDBC-113 + void simpleEntitiesGetProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + rs.next(); + + Trivial extracted = createRowMapper(Trivial.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + } + + @Test // DATAJDBC-181 + void namingStrategyGetsHonored() throws SQLException { + + ResultSet rs = mockResultSet(asList("IDX", "NAMEX"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + rs.next(); + + Trivial extracted = createRowMapper(Trivial.class, X_APPENDING_NAMINGSTRATEGY).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + } + + @Test // DATAJDBC-181 + void namingStrategyGetsHonoredForConstructor() throws SQLException { + + ResultSet rs = mockResultSet(asList("IDX", "NAMEX"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + rs.next(); + + TrivialImmutable extracted = createRowMapper(TrivialImmutable.class, X_APPENDING_NAMINGSTRATEGY).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + } + + @Test // DATAJDBC-427 + void simpleWithReferenceGetProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "TRIVIAL_ID"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 100L); + rs.next(); + + WithReference extracted = createRowMapper(WithReference.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.trivialId) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", AggregateReference.to(100L)); + } + + @Test // DATAJDBC-113 + void simpleOneToOneGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "CHILD_ID", "CHILD_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); + rs.next(); + + OneToOne extracted = createRowMapper(OneToOne.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.child.id, e -> e.child.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); + } + + @Test // DATAJDBC-286 + void immutableOneToOneGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "CHILD_ID", "CHILD_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); + rs.next(); + + OneToOneImmutable extracted = createRowMapper(OneToOneImmutable.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.child.id, e -> e.child.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); + } + + @Test // DATAJDBC-427 + void immutableWithReferenceGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "TRIVIAL_ID"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 100L); + rs.next(); + + WithReferenceImmutable extracted = createRowMapper(WithReferenceImmutable.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.trivialId) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", AggregateReference.to(100L)); + } + + // TODO add additional test for multilevel embeddables + @Test // DATAJDBC-111 + void simpleEmbeddedGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "PREFIX_ID", "PREFIX_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); + rs.next(); + + EmbeddedEntity extracted = createRowMapper(EmbeddedEntity.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children.id, e -> e.children.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); + } + + @Test // DATAJDBC-113 + void collectionReferenceGetsLoadedWithAdditionalSelect() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + rs.next(); + + OneToSet extracted = createRowMapper(OneToSet.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children.size()) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 2); + } + + @Test // DATAJDBC-131 + void mapReferenceGetsLoadedWithAdditionalSelect() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME"), // + ID_FOR_ENTITY_REFERENCING_MAP, "alpha"); + rs.next(); + + OneToMap extracted = createRowMapper(OneToMap.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children.size()) // + .containsExactly(ID_FOR_ENTITY_REFERENCING_MAP, "alpha", 2); + } + + @Test // DATAJDBC-130 + void listReferenceGetsLoadedWithAdditionalSelect() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME"), // + ID_FOR_ENTITY_REFERENCING_LIST, "alpha"); + rs.next(); + + OneToMap extracted = createRowMapper(OneToMap.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children.size()) // + .containsExactly(ID_FOR_ENTITY_REFERENCING_LIST, "alpha", 2); + } + + @Test // DATAJDBC-252 + void doesNotTryToSetPropertiesThatAreSetViaConstructor() throws SQLException { + + ResultSet rs = mockResultSet(singletonList("VALUE"), // + "value-from-resultSet"); + rs.next(); + + DontUseSetter extracted = createRowMapper(DontUseSetter.class).mapRow(rs, 1); + + assertThat(extracted.value) // + .isEqualTo("setThroughConstructor:value-from-resultSet"); + } + + @Test // DATAJDBC-252 + void handlesMixedProperties() throws SQLException { + + ResultSet rs = mockResultSet(asList("ONE", "TWO", "THREE"), // + "111", "222", "333"); + rs.next(); + + MixedProperties extracted = createRowMapper(MixedProperties.class).mapRow(rs, 1); + + assertThat(extracted) // + .extracting(e -> e.one, e -> e.two, e -> e.three) // + .containsSequence("111", "222", "333"); + } + + @Test // DATAJDBC-359 + void chainedEntitiesWithoutId() throws SQLException { + + // @formatter:off + Fixture fixture = this. buildFixture() // + // Id of the aggregate root and backreference to it from + // the various aggregate members. + .value(4L).inColumns("FOUR", // + "CHAIN3_NO_ID_CHAIN4", // + "CHAIN3_CHAIN2_NO_ID_CHAIN4", // + "CHAIN3_CHAIN2_CHAIN1_NO_ID_CHAIN4", // + "CHAIN3_CHAIN2_CHAIN1_CHAIN0_NO_ID_CHAIN4") // + .endUpIn(e -> e.four) + // values for the different entities + .value("four_value").inColumns("FOUR_VALUE").endUpIn(e -> e.fourValue) // + + .value("three_value").inColumns("CHAIN3_THREE_VALUE").endUpIn(e -> e.chain3.threeValue) // + + .value("two_value").inColumns("CHAIN3_CHAIN2_TWO_VALUE").endUpIn(e -> e.chain3.chain2.twoValue) // + + .value("one_value").inColumns("CHAIN3_CHAIN2_CHAIN1_ONE_VALUE").endUpIn(e -> e.chain3.chain2.chain1.oneValue) // + + .value("zero_value").inColumns("CHAIN3_CHAIN2_CHAIN1_CHAIN0_ZERO_VALUE") + .endUpIn(e -> e.chain3.chain2.chain1.chain0.zeroValue) // + .build(); + // @formatter:on + + ResultSet rs = fixture.resultSet; + + rs.next(); + + NoIdChain4 extracted = createRowMapper(NoIdChain4.class).mapRow(rs, 1); + + fixture.assertOn(extracted); + } + + @Test // DATAJDBC-370 + void simpleNullableImmutableEmbeddedGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE", "NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "ru'Ha'", "Alfred"); + rs.next(); + + WithNullableEmbeddedImmutableValue extracted = createRowMapper(WithNullableEmbeddedImmutableValue.class) // + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutableValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, new ImmutableValue("ru'Ha'", "Alfred")); + } + + @Test // DATAJDBC-374 + void simpleEmptyImmutableEmbeddedGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE", "NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, null, null); + rs.next(); + + WithEmptyEmbeddedImmutableValue extracted = createRowMapper(WithEmptyEmbeddedImmutableValue.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutableValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, new ImmutableValue(null, null)); + } + + @Test // DATAJDBC-370 + void simplePrimitiveImmutableEmbeddedGetsProperlyExtracted() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, 24); + rs.next(); + + WithEmbeddedPrimitiveImmutableValue extracted = createRowMapper(WithEmbeddedPrimitiveImmutableValue.class) + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutablePrimitiveValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, new ImmutablePrimitiveValue(24)); + } + + @Test // DATAJDBC-370 + void simpleImmutableEmbeddedShouldBeNullIfAllOfTheEmbeddableAreNull() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE", "NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, null, null); + rs.next(); + + WithNullableEmbeddedImmutableValue extracted = createRowMapper(WithNullableEmbeddedImmutableValue.class) // + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutableValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + } + + @Test // DATAJDBC-370 + void embeddedShouldBeNullWhenFieldsAreNull() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "PREFIX_ID", "PREFIX_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", null, null); + rs.next(); + + EmbeddedEntity extracted = createRowMapper(EmbeddedEntity.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", null); + } + + @Test // DATAJDBC-370 + void embeddedShouldNotBeNullWhenAtLeastOneFieldIsNotNull() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "PREFIX_ID", "PREFIX_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24, null); + rs.next(); + + EmbeddedEntity extracted = createRowMapper(EmbeddedEntity.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", new Trivial(24L, null)); + } + + @Test // DATAJDBC-370 + void primitiveEmbeddedShouldBeNullWhenNoValuePresent() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + rs.next(); + + WithEmbeddedPrimitiveImmutableValue extracted = createRowMapper(WithEmbeddedPrimitiveImmutableValue.class) + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutablePrimitiveValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + } + + @Test // DATAJDBC-370 + void deepNestedEmbeddable() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "LEVEL0", "LEVEL1_VALUE", "LEVEL1_LEVEL2_VALUE", "LEVEL1_LEVEL2_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "0", "1", "2", "Rumpelstilzchen"); + rs.next(); + + WithDeepNestedEmbeddable extracted = createRowMapper(WithDeepNestedEmbeddable.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> extracted.level0, e -> e.level1.value, e -> e.level1.level2.value) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "0", "1", "2"); + } + + @Test // DATAJDBC-341 + void missingValueForObjectGetsMappedToZero() throws SQLException { + + ResultSet rs = mockResultSet(singletonList("id"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP); + rs.next(); + Trivial extracted = createRowMapper(Trivial.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + + } + + @Test // DATAJDBC-341 + void missingValueForConstructorArgCausesException() throws SQLException { + + ResultSet rs = mockResultSet(singletonList("id"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP); + rs.next(); + + TrivialImmutable trivialImmutable = createRowMapper(TrivialImmutable.class).mapRow(rs, 1); + + assertThat(trivialImmutable.id).isEqualTo(23L); + assertThat(trivialImmutable.name).isNull(); + } + + @Test // DATAJDBC-341 + void missingColumnForPrimitiveGetsMappedToZero() throws SQLException { + + ResultSet rs = mockResultSet(singletonList("id"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP); + rs.next(); + TrivialMapPropertiesToNullIfNotNeeded extracted = createRowMapper(TrivialMapPropertiesToNullIfNotNeeded.class) + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.age) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, 0); + + } + + @Test // DATAJDBC-341 + void columnNamesAreCaseInsensitive() throws SQLException { + + ResultSet rs = mockResultSet(asList("id", "name"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + rs.next(); + + Trivial extracted = createRowMapper(Trivial.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + } + + @Test // DATAJDBC-341 + void immutableEmbeddedWithAllColumnsMissingShouldBeNull() throws SQLException { + + ResultSet rs = mockResultSet(List.of("ID"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP); + rs.next(); + + WithNullableEmbeddedImmutableValue extracted = createRowMapper(WithNullableEmbeddedImmutableValue.class) // + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutableValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + } + + @Test // DATAJDBC-341 + void immutableEmbeddedWithSomeColumnsMissingShouldNotBeEmpty() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "some value"); + rs.next(); + + WithNullableEmbeddedImmutableValue result = createRowMapper(WithNullableEmbeddedImmutableValue.class).mapRow(rs, 1); + + assertThat(result.embeddedImmutableValue).isNotNull(); + } + + @Test // DATAJDBC-341 + void immutableEmbeddedWithSomeColumnsMissingAndSomeNullShouldBeNull() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "VALUE"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + rs.next(); + + WithNullableEmbeddedImmutableValue extracted = createRowMapper(WithNullableEmbeddedImmutableValue.class) // + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutableValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + } + + @Test // DATAJDBC-341 + void embeddedShouldBeNullWhenAllFieldsAreMissing() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); + rs.next(); + + EmbeddedEntity extracted = createRowMapper(EmbeddedEntity.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", null); + } + + @Test // DATAJDBC-341 + void missingColumnsInEmbeddedShouldBeUnset() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "PREFIX_ID"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24); + rs.next(); + + EmbeddedEntity extracted = createRowMapper(EmbeddedEntity.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.children) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", new Trivial(24L, null)); + } + + @Test // DATAJDBC-341 + void primitiveEmbeddedShouldBeNullWhenAllColumnsAreMissing() throws SQLException { + + ResultSet rs = mockResultSet(List.of("ID"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP); + rs.next(); + + WithEmbeddedPrimitiveImmutableValue extracted = createRowMapper(WithEmbeddedPrimitiveImmutableValue.class) + .mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.embeddedImmutablePrimitiveValue) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, null); + } + + @Test // DATAJDBC-341 + void oneToOneWithMissingColumnResultsInNullProperty() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "CHILD_ID"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L); + rs.next(); + + OneToOne extracted = createRowMapper(OneToOne.class).mapRow(rs, 1); + + assertThat(extracted) // + .isNotNull() // + .extracting(e -> e.id, e -> e.name, e -> e.child.id, e -> e.child.name) // + .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, null); + } + + @Test // DATAJDBC-341 + void oneToOneWithMissingIdColumnResultsInNullProperty() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "CHILD_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", "Alfred"); + rs.next(); + + OneToOne extracted = createRowMapper(OneToOne.class).mapRow(rs, 1); + + assertThat(extracted.child).isNull(); + } + + @Test // DATAJDBC-341 + void immutableOneToOneWithIdMissingColumnResultsInNullReference() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "NAME", "CHILD_NAME"), // + ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", "Alfred"); + rs.next(); + + OneToOneImmutable result = createRowMapper(OneToOneImmutable.class).mapRow(rs, 1); + + assertThat(result.id).isEqualTo(23); + assertThat(result.name).isEqualTo("alpha"); + assertThat(result.child).isNull(); + } + + @Test // DATAJDBC-508 + void materializesObjectWithAtValue() throws SQLException { + + ResultSet rs = mockResultSet(asList("ID", "FIRST_NAME"), // + 123L, "Hello World"); + rs.next(); + + WithAtValue result = createRowMapper(WithAtValue.class).mapRow(rs, 1); + + assertThat(result.getId()).isEqualTo(123L); + assertThat(result.getComputed()).isEqualTo("Hello World"); + } + + // Model classes to be used in tests + + static class TrivialImmutable { + + @Id + private final Long id; + private final String name; + + public TrivialImmutable(Long id, String name) { + this.id = id; + this.name = name; + } + + public TrivialImmutable withId(Long id) { + return this.id == id ? this : new TrivialImmutable(id, this.name); + } + + public TrivialImmutable withName(String name) { + return this.name == name ? this : new TrivialImmutable(this.id, name); + } + } + + static class Trivial { + + @Id + Long id; + String name; + + public Trivial(Long id, String name) { + this.id = id; + this.name = name; + } + + public Trivial() { + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final Trivial other)) + return false; + if (!other.canEqual(this)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + protected boolean canEqual(final Object other) { + return other instanceof Trivial; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + } + + static class TrivialMapPropertiesToNullIfNotNeeded { + + @Id + Long id; + int age; + String phone; + Boolean isSupreme; + long referenceToCustomer; + + public TrivialMapPropertiesToNullIfNotNeeded(Long id, int age, String phone, Boolean isSupreme, long referenceToCustomer) { + this.id = id; + this.age = age; + this.phone = phone; + this.isSupreme = isSupreme; + this.referenceToCustomer = referenceToCustomer; + } + + public TrivialMapPropertiesToNullIfNotNeeded() { + } + + public Long getId() { + return this.id; + } + + public int getAge() { + return this.age; + } + + public String getPhone() { + return this.phone; + } + + public Boolean getIsSupreme() { + return this.isSupreme; + } + + public long getReferenceToCustomer() { + return this.referenceToCustomer; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final TrivialMapPropertiesToNullIfNotNeeded other)) + return false; + if (!other.canEqual(this)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + if (this.getAge() != other.getAge()) return false; + final Object this$phone = this.getPhone(); + final Object other$phone = other.getPhone(); + if (!Objects.equals(this$phone, other$phone)) + return false; + final Object this$isSupreme = this.getIsSupreme(); + final Object other$isSupreme = other.getIsSupreme(); + if (!Objects.equals(this$isSupreme, other$isSupreme)) + return false; + return this.getReferenceToCustomer() == other.getReferenceToCustomer(); + } + + protected boolean canEqual(final Object other) { + return other instanceof TrivialMapPropertiesToNullIfNotNeeded; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + result = result * PRIME + this.getAge(); + final Object $phone = this.getPhone(); + result = result * PRIME + ($phone == null ? 43 : $phone.hashCode()); + final Object $isSupreme = this.getIsSupreme(); + result = result * PRIME + ($isSupreme == null ? 43 : $isSupreme.hashCode()); + final long $referenceToCustomer = this.getReferenceToCustomer(); + result = result * PRIME + (int) ($referenceToCustomer >>> 32 ^ $referenceToCustomer); + return result; + } + } + + static class WithReference { + + @Id + Long id; + String name; + AggregateReference trivialId; + + public WithReference(Long id, String name, AggregateReference trivialId) { + this.id = id; + this.name = name; + this.trivialId = trivialId; + } + + public WithReference() { + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public AggregateReference getTrivialId() { + return this.trivialId; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final WithReference other)) + return false; + if (!other.canEqual(this)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (!Objects.equals(this$name, other$name)) + return false; + final Object this$trivialId = this.getTrivialId(); + final Object other$trivialId = other.getTrivialId(); + return Objects.equals(this$trivialId, other$trivialId); + } + + protected boolean canEqual(final Object other) { + return other instanceof WithReference; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $trivialId = this.getTrivialId(); + result = result * PRIME + ($trivialId == null ? 43 : $trivialId.hashCode()); + return result; + } + } + + record WithReferenceImmutable( + @Id Long id, String name, + AggregateReference trivialId){ + + public WithReferenceImmutable withId(Long id) { + return this.id == id ? this : new WithReferenceImmutable(id, this.name, this.trivialId); + } + + public WithReferenceImmutable withName(String name) { + return this.name == name ? this : new WithReferenceImmutable(this.id, name, this.trivialId); + } + + public WithReferenceImmutable withTrivialId(AggregateReference trivialId) { + return this.trivialId == trivialId ? this : new WithReferenceImmutable(this.id, this.name, trivialId); + } + } + + static class OneToOne { + + @Id Long id; + String name; + Trivial child; + } + + record OneToOneImmutable( + + @Id Long id, String name, TrivialImmutable child) { + + OneToOneImmutable() { + this(null, null, null); + } + + public OneToOneImmutable withId(Long id) { + return this.id == id ? this : new OneToOneImmutable(id, name, child); + } + + public OneToOneImmutable withName(String name) { + return this.name == name ? this : new OneToOneImmutable(id, name, child); + } + + public OneToOneImmutable withChild(TrivialImmutable child) { + return this.child == child ? this : new OneToOneImmutable(id, name, child); + } + } + + static class OneToSet { + + @Id Long id; + String name; + Set children; + } + + static class OneToMap { + + @Id Long id; + String name; + Map children; + } + + static class OneToList { + + @Id Long id; + String name; + List children; + } + + static class EmbeddedEntity { + + @Id Long id; + String name; + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix_") Trivial children; + } + + private static class DontUseSetter { + String value; + + DontUseSetter(@Param("value") String value) { + this.value = "setThroughConstructor:" + value; + } + } + + static class MixedProperties { + + final String one; + String two; + final String three; + + @PersistenceCreator + MixedProperties(String one) { + this.one = one; + this.three = "unset"; + } + + private MixedProperties(String one, String two, String three) { + + this.one = one; + this.two = two; + this.three = three; + } + + MixedProperties withThree(String three) { + return new MixedProperties(one, two, three); + } + } + + static class NoIdChain0 { + String zeroValue; + } + + static class NoIdChain1 { + String oneValue; + NoIdChain0 chain0; + } + + static class NoIdChain2 { + String twoValue; + NoIdChain1 chain1; + } + + static class NoIdChain3 { + String threeValue; + NoIdChain2 chain2; + } + + static class NoIdChain4 { + @Id Long four; + String fourValue; + NoIdChain3 chain3; + } + + static class WithNullableEmbeddedImmutableValue { + + @Id Long id; + @Embedded(onEmpty = OnEmpty.USE_NULL) ImmutableValue embeddedImmutableValue; + } + + static class WithEmptyEmbeddedImmutableValue { + + @Id Long id; + @Embedded.Empty ImmutableValue embeddedImmutableValue; + } + + static class WithEmbeddedPrimitiveImmutableValue { + + @Id Long id; + @Embedded.Nullable ImmutablePrimitiveValue embeddedImmutablePrimitiveValue; + } + + record ImmutableValue(Object value, String name) { + } + + record ImmutablePrimitiveValue(int value) { + } + + static class WithDeepNestedEmbeddable { + + @Id Long id; + String level0; + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "level1_") EmbeddedWithEmbedded level1; + } + + static class EmbeddedWithEmbedded { + + Object value; + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "level2_") ImmutableValue level2; + } + + // Infrastructure for assertions and constructing mocks + + private FixtureBuilder buildFixture() { + return new FixtureBuilder<>(); + } + + private EntityRowMapper createRowMapper(Class type) { + return createRowMapper(type, DefaultNamingStrategy.INSTANCE); + } + + @SuppressWarnings("unchecked") + private EntityRowMapper createRowMapper(Class type, NamingStrategy namingStrategy) { + + RelationalMappingContext context = new JdbcMappingContext(namingStrategy); + + DataAccessStrategy accessStrategy = mock(DataAccessStrategy.class); + + // the ID of the entity is used to determine what kind of ResultSet is needed for subsequent selects. + Set trivials = Stream.of(new Trivial(1L, "one"), // + new Trivial(2L, "two")) // + .collect(Collectors.toSet()); + + Set> simpleEntriesWithInts = trivials.stream() + .collect(Collectors.toMap(it -> it.getId().intValue(), Function.identity())).entrySet(); + Set> simpleEntriesWithStringKeys = trivials.stream() + .collect(Collectors.toMap(Trivial::getName, Function.identity())).entrySet(); + + doReturn(trivials).when(accessStrategy).findAllByPath(identifierOfValue(ID_FOR_ENTITY_NOT_REFERENCING_MAP), + any(PersistentPropertyPath.class)); + + doReturn(simpleEntriesWithStringKeys).when(accessStrategy) + .findAllByPath(identifierOfValue(ID_FOR_ENTITY_REFERENCING_MAP), any(PersistentPropertyPath.class)); + + doReturn(simpleEntriesWithInts).when(accessStrategy) + .findAllByPath(identifierOfValue(ID_FOR_ENTITY_REFERENCING_LIST), any(PersistentPropertyPath.class)); + + doReturn(trivials).when(accessStrategy).findAllByPath(identifierOfValue(ID_FOR_ENTITY_NOT_REFERENCING_MAP), + any(PersistentPropertyPath.class)); + + doReturn(simpleEntriesWithStringKeys).when(accessStrategy) + .findAllByPath(identifierOfValue(ID_FOR_ENTITY_REFERENCING_MAP), any(PersistentPropertyPath.class)); + + doReturn(simpleEntriesWithInts).when(accessStrategy) + .findAllByPath(identifierOfValue(ID_FOR_ENTITY_REFERENCING_LIST), any(PersistentPropertyPath.class)); + + MappingJdbcConverter converter = new MappingJdbcConverter(context, accessStrategy, new JdbcCustomConversions(), + JdbcTypeFactory.unsupported()); + + return new EntityRowMapper<>( // + (RelationalPersistentEntity) context.getRequiredPersistentEntity(type), // + converter // + ); + } + + private Identifier identifierOfValue(long value) { + return ArgumentMatchers.argThat(argument -> argument.toMap().containsValue(value)); + } + + private static ResultSet mockResultSet(List columns, Object... values) { + + Assert.isTrue( // + values.length % columns.size() == 0, // + String // + .format( // + "Number of values [%d] must be a multiple of the number of columns [%d]", // + values.length, // + columns.size() // + ) // + ); + + List> result = convertValues(columns, values); + + return mock(ResultSet.class, new ResultSetAnswer(columns, result)); + } + + private static List> convertValues(List columns, Object[] values) { + + List> result = new ArrayList<>(); + + int index = 0; + while (index < values.length) { + + Map row = new LinkedCaseInsensitiveMap<>(); + result.add(row); + for (String column : columns) { + + row.put(column, values[index]); + index++; + } + } + return result; + } + + private static class ResultSetAnswer implements Answer { + + private final List names; + private final List> values; + private int index = -1; + + ResultSetAnswer(List names, List> values) { + + this.names = names; + this.values = values; + } + + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + + switch (invocation.getMethod().getName()) { + case "next": + return next(); + case "getObject": + + Object argument = invocation.getArgument(0); + String name = argument instanceof Integer ? names.get(((Integer) argument) - 1) : (String) argument; + return getObject(name); + case "isAfterLast": + return isAfterLast(); + case "isBeforeFirst": + return isBeforeFirst(); + case "getRow": + return isAfterLast() || isBeforeFirst() ? 0 : index + 1; + case "toString": + return this.toString(); + case "findColumn": + return isThereAColumnNamed(invocation.getArgument(0)); + case "getMetaData": + ResultSetMetaData metaData = new MockedMetaData(); + return metaData; + default: + throw new OperationNotSupportedException(invocation.getMethod().getName()); + } + } + + private int isThereAColumnNamed(String name) { + + Optional> first = values.stream().filter(s -> s.equals(name)).findFirst(); + return (first.isPresent()) ? 1 : 0; + } + + private boolean isAfterLast() { + return index >= values.size() && !values.isEmpty(); + } + + private boolean isBeforeFirst() { + return index < 0 && !values.isEmpty(); + } + + private Object getObject(String column) throws SQLException { + + Map rowMap = values.get(index); + + if (!rowMap.containsKey(column)) { + throw new SQLException(String.format("Trying to access a column (%s) that does not exist", column)); + } + + return rowMap.get(column); + } + + private boolean next() { + + index++; + return index < values.size(); + } + + private class MockedMetaData implements ResultSetMetaData { + @Override + public int getColumnCount() throws SQLException { + return values.get(index).size(); + } + + @Override + public boolean isAutoIncrement(int i) throws SQLException { + return false; + } + + @Override + public boolean isCaseSensitive(int i) throws SQLException { + return false; + } + + @Override + public boolean isSearchable(int i) throws SQLException { + return false; + } + + @Override + public boolean isCurrency(int i) throws SQLException { + return false; + } + + @Override + public int isNullable(int i) throws SQLException { + return 0; + } + + @Override + public boolean isSigned(int i) throws SQLException { + return false; + } + + @Override + public int getColumnDisplaySize(int i) throws SQLException { + return 0; + } + + @Override + public String getColumnLabel(int i) throws SQLException { + return names.get(i - 1); + } + + @Override + public String getColumnName(int i) throws SQLException { + return null; + } + + @Override + public String getSchemaName(int i) throws SQLException { + return null; + } + + @Override + public int getPrecision(int i) throws SQLException { + return 0; + } + + @Override + public int getScale(int i) throws SQLException { + return 0; + } + + @Override + public String getTableName(int i) throws SQLException { + return null; + } + + @Override + public String getCatalogName(int i) throws SQLException { + return null; + } + + @Override + public int getColumnType(int i) throws SQLException { + return 0; + } + + @Override + public String getColumnTypeName(int i) throws SQLException { + return null; + } + + @Override + public boolean isReadOnly(int i) throws SQLException { + return false; + } + + @Override + public boolean isWritable(int i) throws SQLException { + return false; + } + + @Override + public boolean isDefinitelyWritable(int i) throws SQLException { + return false; + } + + @Override + public String getColumnClassName(int i) throws SQLException { + return null; + } + + @Override + public T unwrap(Class aClass) throws SQLException { + return null; + } + + @Override + public boolean isWrapperFor(Class aClass) throws SQLException { + return false; + } + } + } + + private interface SetValue { + SetColumns value(Object value); + + Fixture build(); + } + + private interface SetColumns { + + SetExpectation inColumns(String... columns); + } + + private interface SetExpectation { + SetValue endUpIn(Function extractor); + } + + private static class FixtureBuilder implements SetValue, SetColumns, SetExpectation { + + private final List values = new ArrayList<>(); + private final List columns = new ArrayList<>(); + private String explainingColumn; + private final List> expectations = new ArrayList<>(); + + @Override + public SetColumns value(Object value) { + + values.add(value); + + return this; + } + + @Override + public SetExpectation inColumns(String... columns) { + + boolean isFirst = true; + for (String column : columns) { + + // if more than one column is mentioned, we need to copy the value for all but the first column; + if (!isFirst) { + + values.add(values.get(values.size() - 1)); + } else { + + explainingColumn = column; + isFirst = false; + } + + this.columns.add(column); + } + + return this; + } + + @Override + public Fixture build() { + + return new Fixture<>(mockResultSet(columns, values.toArray()), expectations); + } + + @Override + public SetValue endUpIn(Function extractor) { + + expectations.add(new Expectation(extractor, values.get(values.size() - 1), explainingColumn)); + return this; + } + } + + private static class Fixture { + + final ResultSet resultSet; + final List> expectations; + + public Fixture(ResultSet resultSet, List> expectations) { + this.resultSet = resultSet; + this.expectations = expectations; + } + + public void assertOn(T result) { + + assertSoftly(softly -> { + expectations.forEach(expectation -> { + + softly.assertThat(expectation.extractor.apply(result)).describedAs("From column: " + expectation.sourceColumn) + .isEqualTo(expectation.expectedValue); + }); + + }); + } + } + + private static class Expectation { + + final Function extractor; + final Object expectedValue; + final String sourceColumn; + + public Expectation(Function extractor, Object expectedValue, String sourceColumn) { + this.extractor = extractor; + this.expectedValue = expectedValue; + this.sourceColumn = sourceColumn; + } + } + + private static class WithAtValue { + + @Id private final Long id; + private final @Transient String computed; + + public WithAtValue(Long id, + @org.springframework.beans.factory.annotation.Value("#root.first_name") String computed) { + this.id = id; + this.computed = computed; + } + + public Long getId() { + return this.id; + } + + public String getComputed() { + return this.computed; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingBatchInsertStrategyTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingBatchInsertStrategyTest.java new file mode 100644 index 0000000000..05d1b98392 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingBatchInsertStrategyTest.java @@ -0,0 +1,219 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.HashMap; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.dialect.AbstractDialect; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.IdGeneration; +import org.springframework.data.relational.core.dialect.LimitClause; +import org.springframework.data.relational.core.dialect.LockClause; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.jdbc.support.KeyHolder; + +/** + * Unit tests for {@link IdGeneratingBatchInsertStrategy}. + * + * @author Chirag Tailor + */ +class IdGeneratingBatchInsertStrategyTest { + + SqlIdentifier idColumn = SqlIdentifier.quoted("id"); + IdentifierProcessing identifierProcessing = IdentifierProcessing.ANSI; + NamedParameterJdbcOperations jdbcOperations = mock(NamedParameterJdbcOperations.class); + InsertStrategy insertStrategy = mock(InsertStrategy.class); + String sql = "some sql"; + SqlParameterSource[] sqlParameterSources = new SqlParameterSource[] { new SqlIdentifierParameterSource() }; + + @Test + void insertsSequentially_whenIdGenerationForBatchOperationsNotSupported() { + + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, true, false), jdbcOperations, idColumn); + + SqlIdentifierParameterSource sqlParameterSource1 = new SqlIdentifierParameterSource(); + sqlParameterSource1.addValue(SqlIdentifier.quoted("property1"), "value1"); + SqlIdentifierParameterSource sqlParameterSource2 = new SqlIdentifierParameterSource(); + sqlParameterSource2.addValue(SqlIdentifier.quoted("property2"), "value2"); + + long id1 = 1L; + when(insertStrategy.execute(sql, sqlParameterSource1)).thenReturn(id1); + long id2 = 2L; + when(insertStrategy.execute(sql, sqlParameterSource2)).thenReturn(id2); + + Object[] ids = batchInsertStrategy.execute(sql, + new SqlParameterSource[] { sqlParameterSource1, sqlParameterSource2 }); + + assertThat(ids).containsExactly(id1, id2); + } + + @Test + void insertsWithKeyHolderAndKeyColumnNames_whenDriverRequiresKeyColumnNames() { + + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, true, true), jdbcOperations, idColumn); + + batchInsertStrategy.execute(sql, sqlParameterSources); + + verify(jdbcOperations).batchUpdate(eq(sql), eq(sqlParameterSources), any(KeyHolder.class), + eq(new String[] { idColumn.getReference() })); + } + + @Test + void insertsWithKeyHolder_whenDriverRequiresKeyColumnNames_butIdColumnIsNull() { + + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, true, true), jdbcOperations, null); + + batchInsertStrategy.execute(sql, sqlParameterSources); + + verify(jdbcOperations).batchUpdate(eq(sql), eq(sqlParameterSources), any(KeyHolder.class)); + } + + @Test + void insertsWithKeyHolder_whenDriverDoesNotRequireKeyColumnNames() { + + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, false, true), jdbcOperations, idColumn); + + batchInsertStrategy.execute(sql, sqlParameterSources); + + verify(jdbcOperations).batchUpdate(eq(sql), eq(sqlParameterSources), any(KeyHolder.class)); + } + + @Test + void insertsWithKeyHolder_returningKey_whenThereIsOnlyOne() { + + Long idValue = 123L; + when(jdbcOperations.batchUpdate(any(), any(), any())).thenAnswer(invocationOnMock -> { + + KeyHolder keyHolder = invocationOnMock.getArgument(2); + HashMap keys = new HashMap<>(); + keys.put("anything", idValue); + keyHolder.getKeyList().add(keys); + return null; + }); + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, false, true), jdbcOperations, idColumn); + + Object[] ids = batchInsertStrategy.execute(sql, sqlParameterSources); + + assertThat(ids).containsExactly(idValue); + } + + @Test + void insertsWithKeyHolder_returningKeyMatchingIdColumn_whenKeyHolderContainsMultipleKeysPerRecord() { + + Long idValue = 123L; + when(jdbcOperations.batchUpdate(any(), any(), any())).thenAnswer(invocationOnMock -> { + + KeyHolder keyHolder = invocationOnMock.getArgument(2); + HashMap keys = new HashMap<>(); + keys.put(idColumn.getReference(), idValue); + keys.put("other", "someOtherValue"); + keyHolder.getKeyList().add(keys); + return null; + }); + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, false, true), jdbcOperations, idColumn); + + Object[] ids = batchInsertStrategy.execute(sql, sqlParameterSources); + + assertThat(ids).containsExactly(idValue); + } + + @Test + void insertsWithKeyHolder_returningNull__whenKeyHolderContainsMultipleKeysPerRecord_butIdColumnIsNull() { + + Long idValue = 123L; + when(jdbcOperations.batchUpdate(any(), any(), any())).thenAnswer(invocationOnMock -> { + + KeyHolder keyHolder = invocationOnMock.getArgument(2); + HashMap keys = new HashMap<>(); + keys.put(idColumn.getReference(), idValue); + keys.put("other", "someOtherValue"); + keyHolder.getKeyList().add(keys); + return null; + }); + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, false, true), jdbcOperations, null); + + Object[] ids = batchInsertStrategy.execute(sql, sqlParameterSources); + + assertThat(ids).hasSize(sqlParameterSources.length); + assertThat(ids).containsOnlyNulls(); + } + + @Test + void insertsWithKeyHolder_returningNull_whenKeyHolderHasNoKeys() { + + BatchInsertStrategy batchInsertStrategy = new IdGeneratingBatchInsertStrategy(insertStrategy, + createDialect(identifierProcessing, false, true), jdbcOperations, idColumn); + + Object[] ids = batchInsertStrategy.execute(sql, sqlParameterSources); + + assertThat(ids).hasSize(sqlParameterSources.length); + assertThat(ids).containsOnlyNulls(); + } + + private static Dialect createDialect(final IdentifierProcessing identifierProcessing, + final boolean requiresKeyColumnNames, final boolean supportsIdGenerationForBatchOperations) { + + return new AbstractDialect() { + + @Override + public LimitClause limit() { + return null; + } + + @Override + public LockClause lock() { + return null; + } + + @Override + public IdentifierProcessing getIdentifierProcessing() { + return identifierProcessing; + } + + @Override + public IdGeneration getIdGeneration() { + + return new IdGeneration() { + + @Override + public boolean driverRequiresKeyColumnNames() { + return requiresKeyColumnNames; + } + + @Override + public boolean supportedForBatchOperations() { + return supportsIdGenerationForBatchOperations; + } + }; + } + }; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingEntityCallbackTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingEntityCallbackTest.java new file mode 100644 index 0000000000..0de18dd348 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingEntityCallbackTest.java @@ -0,0 +1,171 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.any; + +import java.util.UUID; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.dialect.JdbcMySqlDialect; +import org.springframework.data.jdbc.core.dialect.JdbcPostgresDialect; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.Sequence; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; + +/** + * Unit tests for {@link IdGeneratingEntityCallback} + * + * @author Mikhail Polivakha + * @author Mark Paluch + */ +@MockitoSettings(strictness = Strictness.LENIENT) +class IdGeneratingEntityCallbackTest { + + @Mock NamedParameterJdbcOperations operations; + RelationalMappingContext relationalMappingContext; + + @BeforeEach + void setUp() { + + relationalMappingContext = new RelationalMappingContext(); + relationalMappingContext + .setSimpleTypeHolder(new SimpleTypeHolder(JdbcPostgresDialect.INSTANCE.simpleTypes(), true)); + } + + @Test // GH-1923 + void sequenceGenerationIsNotSupported() { + + NamedParameterJdbcOperations operations = mock(NamedParameterJdbcOperations.class); + + IdGeneratingEntityCallback subject = new IdGeneratingEntityCallback(relationalMappingContext, + JdbcMySqlDialect.INSTANCE, operations); + + EntityWithSequence processed = (EntityWithSequence) subject.onBeforeSave(new EntityWithSequence(), + MutableAggregateChange.forSave(new EntityWithSequence())); + + assertThat(processed.id).isNull(); + } + + @Test // GH-1923 + void entityIsNotMarkedWithTargetSequence() { + + IdGeneratingEntityCallback subject = new IdGeneratingEntityCallback(relationalMappingContext, + JdbcMySqlDialect.INSTANCE, operations); + + NoSequenceEntity processed = (NoSequenceEntity) subject.onBeforeSave(new NoSequenceEntity(), + MutableAggregateChange.forSave(new NoSequenceEntity())); + + assertThat(processed.id).isNull(); + } + + @Test // GH-1923 + void entityIdIsPopulatedFromSequence() { + + long generatedId = 112L; + when(operations.queryForObject(anyString(), any(SqlParameterSource.class), any(RowMapper.class))) + .thenReturn(generatedId); + + IdGeneratingEntityCallback subject = new IdGeneratingEntityCallback(relationalMappingContext, + JdbcPostgresDialect.INSTANCE, operations); + + EntityWithSequence processed = (EntityWithSequence) subject.onBeforeSave(new EntityWithSequence(), + MutableAggregateChange.forSave(new EntityWithSequence())); + + assertThat(processed.getId()).isEqualTo(generatedId); + } + + @Test // GH-2003 + void appliesIntegerConversion() { + + long generatedId = 112L; + when(operations.queryForObject(anyString(), any(SqlParameterSource.class), any(RowMapper.class))) + .thenReturn(generatedId); + + IdGeneratingEntityCallback subject = new IdGeneratingEntityCallback(relationalMappingContext, + JdbcPostgresDialect.INSTANCE, operations); + + EntityWithIntSequence processed = (EntityWithIntSequence) subject.onBeforeSave(new EntityWithIntSequence(), + MutableAggregateChange.forSave(new EntityWithIntSequence())); + + assertThat(processed.id).isEqualTo(112); + } + + @Test // GH-2003 + void assignsUuidValues() { + + UUID generatedId = UUID.randomUUID(); + when(operations.queryForObject(anyString(), any(SqlParameterSource.class), any(RowMapper.class))) + .thenReturn(generatedId); + + IdGeneratingEntityCallback subject = new IdGeneratingEntityCallback(relationalMappingContext, + JdbcPostgresDialect.INSTANCE, operations); + + EntityWithUuidSequence processed = (EntityWithUuidSequence) subject.onBeforeSave(new EntityWithUuidSequence(), + MutableAggregateChange.forSave(new EntityWithUuidSequence())); + + assertThat(processed.id).isEqualTo(generatedId); + } + + @Table + static class NoSequenceEntity { + + @Id private Long id; + private Long name; + } + + @Table + static class EntityWithSequence { + + @Id + @Sequence(value = "id_seq", schema = "public") private Long id; + + private Long name; + + public Long getId() { + return id; + } + } + + @Table + static class EntityWithIntSequence { + + @Id + @Sequence(value = "id_seq") private int id; + + } + + @Table + static class EntityWithUuidSequence { + + @Id + @Sequence(value = "id_seq") private UUID id; + + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingInsertStrategyTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingInsertStrategyTest.java new file mode 100644 index 0000000000..890a9bf94d --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdGeneratingInsertStrategyTest.java @@ -0,0 +1,187 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.HashMap; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.dialect.AbstractDialect; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.IdGeneration; +import org.springframework.data.relational.core.dialect.LimitClause; +import org.springframework.data.relational.core.dialect.LockClause; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.jdbc.support.KeyHolder; + +/** + * Unit tests for {@link IdGeneratingInsertStrategy}. + * + * @author Chirag Tailor + */ +class IdGeneratingInsertStrategyTest { + + SqlIdentifier idColumn = SqlIdentifier.quoted("id"); + IdentifierProcessing identifierProcessing = IdentifierProcessing.ANSI; + NamedParameterJdbcOperations namedParameterJdbcOperations = mock(NamedParameterJdbcOperations.class); + String sql = "some sql"; + SqlParameterSource sqlParameterSource = new SqlIdentifierParameterSource(); + + @Test + void insertsWithKeyHolderAndKeyColumnNames_whenDriverRequiresKeyColumnNames() { + + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, true), + namedParameterJdbcOperations, idColumn); + + insertStrategy.execute(sql, sqlParameterSource); + + verify(namedParameterJdbcOperations).update(eq(sql), eq(sqlParameterSource), any(KeyHolder.class), + eq(new String[] { idColumn.getReference() })); + } + + @Test + void insertsWithKeyHolder_whenDriverRequiresKeyColumnNames_butIdColumnIsNull() { + + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, true), + namedParameterJdbcOperations, null); + + insertStrategy.execute(sql, sqlParameterSource); + + verify(namedParameterJdbcOperations).update(eq(sql), eq(sqlParameterSource), any(KeyHolder.class)); + } + + @Test + void insertsWithKeyHolder_whenDriverDoesNotRequireKeyColumnNames() { + + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, false), + namedParameterJdbcOperations, idColumn); + + insertStrategy.execute(sql, sqlParameterSource); + + verify(namedParameterJdbcOperations).update(eq(sql), eq(sqlParameterSource), any(KeyHolder.class)); + } + + @Test + void insertsWithKeyHolder_returningKey_whenThereIsOnlyOne() { + + Long idValue = 123L; + when(namedParameterJdbcOperations.update(any(), any(), any())).thenAnswer(invocationOnMock -> { + + KeyHolder keyHolder = invocationOnMock.getArgument(2); + HashMap keys = new HashMap<>(); + keys.put("anything", idValue); + keyHolder.getKeyList().add(keys); + return null; + }); + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, false), + namedParameterJdbcOperations, idColumn); + + Object id = insertStrategy.execute(sql, sqlParameterSource); + + assertThat(id).isEqualTo(idValue); + } + + @Test + void insertsWithKeyHolder_returningKeyMatchingIdColumn_whenKeyHolderContainsMultipleKeysPerRecord() { + + Long idValue = 123L; + when(namedParameterJdbcOperations.update(any(), any(), any())).thenAnswer(invocationOnMock -> { + + KeyHolder keyHolder = invocationOnMock.getArgument(2); + HashMap keys = new HashMap<>(); + keys.put(idColumn.getReference(), idValue); + keys.put("other", "someOtherValue"); + keyHolder.getKeyList().add(keys); + return null; + }); + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, false), + namedParameterJdbcOperations, idColumn); + + Object id = insertStrategy.execute(sql, sqlParameterSource); + + assertThat(id).isEqualTo(idValue); + } + + @Test + void insertsWithKeyHolder_returningNull__whenKeyHolderContainsMultipleKeysPerRecord_butIdColumnIsNull() { + + Long idValue = 123L; + when(namedParameterJdbcOperations.update(any(), any(), any())).thenAnswer(invocationOnMock -> { + + KeyHolder keyHolder = invocationOnMock.getArgument(2); + HashMap keys = new HashMap<>(); + keys.put(idColumn.getReference(), idValue); + keys.put("other", "someOtherValue"); + keyHolder.getKeyList().add(keys); + return null; + }); + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, false), + namedParameterJdbcOperations, null); + + Object id = insertStrategy.execute(sql, sqlParameterSource); + + assertThat(id).isNull(); + } + + @Test + void insertsWithKeyHolder_returningNull_whenKeyHolderHasNoKeys() { + + InsertStrategy insertStrategy = new IdGeneratingInsertStrategy(createDialect(identifierProcessing, false), + namedParameterJdbcOperations, idColumn); + + Object id = insertStrategy.execute(sql, sqlParameterSource); + + assertThat(id).isNull(); + } + + private static Dialect createDialect(final IdentifierProcessing identifierProcessing, + final boolean requiresKeyColumnNames) { + + return new AbstractDialect() { + + @Override + public LimitClause limit() { + return null; + } + + @Override + public LockClause lock() { + return null; + } + + @Override + public IdentifierProcessing getIdentifierProcessing() { + return identifierProcessing; + } + + @Override + public IdGeneration getIdGeneration() { + return new IdGeneration() { + @Override + public boolean driverRequiresKeyColumnNames() { + return requiresKeyColumnNames; + } + }; + } + }; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdentifierUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdentifierUnitTests.java new file mode 100644 index 0000000000..7e67736ff7 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IdentifierUnitTests.java @@ -0,0 +1,146 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Unit tests for {@link Identifier}. + * + * @author Jens Schauder + * @author Mark Paluch + */ +public class IdentifierUnitTests { + + @Test // DATAJDBC-326 + public void getParametersByName() { + + Identifier identifier = Identifier.of(unquoted("aName"), "aValue", String.class); + + assertThat(identifier.toMap()).hasSize(1).containsEntry(unquoted("aName"), "aValue"); + } + + @Test // DATAJDBC-326 + public void typeIsCalculatedCorrectly() { + + HashMap parameters = new HashMap<>(); + Object objectValue = new Object(); + Object stringValue = "text"; + Object intValue = 23; + Object integerValue = 42; + + parameters.put(unquoted("one"), objectValue); + parameters.put(unquoted("two"), stringValue); + parameters.put(unquoted("three"), intValue); + parameters.put(unquoted("four"), integerValue); + + Identifier identifier = Identifier.from(parameters); + + assertThat(identifier.getParts()) // + .extracting("name", "value", "targetType") // + .containsExactlyInAnyOrder( // + Assertions.tuple(unquoted("one"), objectValue, Object.class), // + Assertions.tuple(unquoted("two"), stringValue, String.class), // + Assertions.tuple(unquoted("three"), intValue, Integer.class), // + Assertions.tuple(unquoted("four"), integerValue, Integer.class) // + ); + } + + @Test // DATAJDBC-326 + public void createsIdentifierFromMap() { + + Identifier identifier = Identifier.from(Collections.singletonMap(unquoted("aName"), "aValue")); + + assertThat(identifier.toMap()).hasSize(1).containsEntry(unquoted("aName"), "aValue"); + } + + @Test // DATAJDBC-326 + public void withAddsNewEntries() { + + Identifier identifier = Identifier.from(Collections.singletonMap(unquoted("aName"), "aValue")) + .withPart(unquoted("foo"), "bar", String.class); + + assertThat(identifier.toMap()) // + .hasSize(2) // + .containsEntry(unquoted("aName"), "aValue") // + .containsEntry(unquoted("foo"), "bar"); + } + + @Test // DATAJDBC-326 + public void withOverridesExistingEntries() { + + Identifier identifier = Identifier.from(Collections.singletonMap(unquoted("aName"), "aValue")) + .withPart(unquoted("aName"), "bar", String.class); + + assertThat(identifier.toMap()) // + .hasSize(1) // + .containsEntry(unquoted("aName"), "bar"); + } + + @Test // DATAJDBC-326 + public void forEachIteratesOverKeys() { + + List keys = new ArrayList<>(); + + Identifier.from(Collections.singletonMap(unquoted("aName"), "aValue")) + .forEach((name, value, targetType) -> keys.add(name.toSql(IdentifierProcessing.ANSI))); + + assertThat(keys).containsOnly("aName"); + } + + @Test // DATAJDBC-326 + public void equalsConsidersEquality() { + + Identifier one = Identifier.from(Collections.singletonMap(unquoted("aName"), "aValue")); + Identifier two = Identifier.from(Collections.singletonMap(unquoted("aName"), "aValue")); + Identifier three = Identifier.from(Collections.singletonMap(unquoted("aName"), "different")); + + assertThat(one).isEqualTo(two); + assertThat(one).isNotEqualTo(three); + } + + @Test // DATAJDBC-542 + public void identifierPartsCanBeAccessedByString() { + + Map idParts = new HashMap<>(); + idParts.put(unquoted("aName"), "one"); + idParts.put(quoted("Other"), "two"); + + Identifier id = Identifier.from(idParts); + + Map map = id.toMap(); + + assertSoftly(softly -> { + softly.assertThat(map.get("aName")).describedAs("aName").isEqualTo("one"); + softly.assertThat(map.get("Other")).describedAs("Other").isEqualTo("two"); + softly.assertThat(map.get("other")).describedAs("other").isNull(); + softly.assertThat(map.get("OTHER")).describedAs("OTHER").isNull(); + }); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/InsertStrategyFactoryTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/InsertStrategyFactoryTest.java new file mode 100644 index 0000000000..261f8d8b37 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/InsertStrategyFactoryTest.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.dialect.AnsiDialect; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; + +/** + * Unit tests for {@link InsertStrategyFactory}. + * + * @author Chirag Tailor + */ +class InsertStrategyFactoryTest { + + NamedParameterJdbcOperations namedParameterJdbcOperations = mock(NamedParameterJdbcOperations.class); + InsertStrategyFactory insertStrategyFactory = new InsertStrategyFactory(namedParameterJdbcOperations, + AnsiDialect.INSTANCE); + + String sql = "some sql"; + SqlParameterSource sqlParameterSource = new SqlIdentifierParameterSource(); + SqlParameterSource[] sqlParameterSources = new SqlParameterSource[] { sqlParameterSource }; + + @Test + void insertWithoutGeneratedIds() { + + Object id = insertStrategyFactory.insertStrategy(IdValueSource.GENERATED, null).execute(sql, sqlParameterSource); + + verify(namedParameterJdbcOperations).update(sql, sqlParameterSource); + assertThat(id).isNull(); + } + + @Test + void batchInsertWithoutGeneratedIds() { + + Object[] ids = insertStrategyFactory.batchInsertStrategy(IdValueSource.GENERATED, null).execute(sql, + sqlParameterSources); + + verify(namedParameterJdbcOperations).batchUpdate(sql, sqlParameterSources); + assertThat(ids).hasSize(sqlParameterSources.length); + assertThat(ids).containsOnlyNulls(); + } + +} diff --git a/src/test/java/org/springframework/data/jdbc/core/IterableOfEntryToMapConverterUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IterableOfEntryToMapConverterUnitTests.java similarity index 91% rename from src/test/java/org/springframework/data/jdbc/core/IterableOfEntryToMapConverterUnitTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IterableOfEntryToMapConverterUnitTests.java index 071772f2b5..3df9a491b4 100644 --- a/src/test/java/org/springframework/data/jdbc/core/IterableOfEntryToMapConverterUnitTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/IterableOfEntryToMapConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.core; +package org.springframework.data.jdbc.core.convert; import static java.util.Arrays.*; import static java.util.Collections.*; @@ -24,7 +24,7 @@ import java.util.Map; import org.assertj.core.api.SoftAssertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.core.convert.TypeDescriptor; /** diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/JdbcCustomConversionsUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/JdbcCustomConversionsUnitTests.java new file mode 100644 index 0000000000..78c79ed7a1 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/JdbcCustomConversionsUnitTests.java @@ -0,0 +1,38 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import org.jmolecules.ddd.types.Association; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link JdbcCustomConversions}. + * + * @author Oliver Drotbohm + */ +class JdbcCustomConversionsUnitTests { + + @Test // GH-937 + void registersNonDateDefaultConverter() { + + JdbcCustomConversions conversions = new JdbcCustomConversions(); + + assertThat(conversions.hasCustomWriteTarget(Association.class)).isTrue(); + assertThat(conversions.getSimpleTypeHolder().isSimpleType(Association.class)); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/JdbcIdentifierBuilderUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/JdbcIdentifierBuilderUnitTests.java new file mode 100644 index 0000000000..5873ce23a1 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/JdbcIdentifierBuilderUnitTests.java @@ -0,0 +1,151 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.PersistentPropertyPathTestUtils; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.mapping.AggregatePath; + +/** + * Unit tests for the {@link JdbcIdentifierBuilder}. + * + * @author Jens Schauder + */ +public class JdbcIdentifierBuilderUnitTests { + + JdbcMappingContext context = new JdbcMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + + @Test // DATAJDBC-326 + public void parametersWithPropertyKeysUseTheParentPropertyJdbcType() { + + Identifier identifier = JdbcIdentifierBuilder.forBackReferences(converter, getPath("child"), "eins").build(); + + assertThat(identifier.getParts()) // + .extracting("name", "value", "targetType") // + .containsExactly( // + tuple(quoted("DUMMY_ENTITY"), "eins", UUID.class) // + ); + } + + @Test // DATAJDBC-326 + public void qualifiersForMaps() { + + AggregatePath path = getPath("children"); + + Identifier identifier = JdbcIdentifierBuilder // + .forBackReferences(converter, path, "parent-eins") // + .withQualifier(path, "map-key-eins") // + .build(); + + assertThat(identifier.getParts()) // + .extracting("name", "value", "targetType") // + .containsExactlyInAnyOrder( // + tuple(quoted("DUMMY_ENTITY"), "parent-eins", UUID.class), // + tuple(quoted("DUMMY_ENTITY_KEY"), "map-key-eins", String.class) // + ); + } + + @Test // DATAJDBC-326 + public void qualifiersForLists() { + + AggregatePath path = getPath("moreChildren"); + + Identifier identifier = JdbcIdentifierBuilder // + .forBackReferences(converter, path, "parent-eins") // + .withQualifier(path, "list-index-eins") // + .build(); + + assertThat(identifier.getParts()) // + .extracting("name", "value", "targetType") // + .containsExactlyInAnyOrder( // + tuple(quoted("DUMMY_ENTITY"), "parent-eins", UUID.class), // + tuple(quoted("DUMMY_ENTITY_KEY"), "list-index-eins", Integer.class) // + ); + } + + @Test // DATAJDBC-326 + public void backreferenceAcrossEmbeddable() { + + Identifier identifier = JdbcIdentifierBuilder // + .forBackReferences(converter, getPath("embeddable.child"), "parent-eins") // + .build(); + + assertThat(identifier.getParts()) // + .extracting("name", "value", "targetType") // + .containsExactly( // + tuple(quoted("DUMMY_ENTITY"), "parent-eins", UUID.class) // + ); + } + + @Test // DATAJDBC-326 + public void backreferenceAcrossNoId() { + + Identifier identifier = JdbcIdentifierBuilder // + .forBackReferences(converter, getPath("noId.child"), "parent-eins") // + .build(); + + assertThat(identifier.getParts()) // + .extracting("name", "value", "targetType") // + .containsExactly( // + tuple(quoted("DUMMY_ENTITY"), "parent-eins", UUID.class) // + ); + } + + private AggregatePath getPath(String dotPath) { + return context.getAggregatePath(PersistentPropertyPathTestUtils.getPath(dotPath, DummyEntity.class, context)); + } + + @SuppressWarnings("unused") + static class DummyEntity { + + @Id UUID id; + String one; + Long two; + Child child; + + Map children; + + List moreChildren; + + Embeddable embeddable; + + NoId noId; + } + + @SuppressWarnings("unused") + static class Embeddable { + Child child; + } + + @SuppressWarnings("unused") + static class NoId { + Child child; + } + + static class Child {} +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/MappingJdbcConverterUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/MappingJdbcConverterUnitTests.java new file mode 100644 index 0000000000..b623b6ef94 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/MappingJdbcConverterUnitTests.java @@ -0,0 +1,369 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.mockito.Mockito.*; + +import java.nio.ByteBuffer; +import java.sql.Array; +import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.jdbc.support.JdbcUtil; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; + +/** + * Unit tests for {@link MappingJdbcConverter}. + * + * @author Mark Paluch + * @author Jens Schauder + */ +class MappingJdbcConverterUnitTests { + + private static final UUID UUID = java.util.UUID.fromString("87a48aa8-a071-705e-54a9-e52fe3a012f1"); + private static final byte[] BYTES_REPRESENTING_UUID = { -121, -92, -118, -88, -96, 113, 112, 94, 84, -87, -27, 47, + -29, + -96, 18, -15 }; + + private JdbcMappingContext context = new JdbcMappingContext(); + private StubbedJdbcTypeFactory typeFactory = new StubbedJdbcTypeFactory(); + private MappingJdbcConverter converter = new MappingJdbcConverter( // + context, // + (identifier, path) -> { + throw new UnsupportedOperationException(); + }, // + new JdbcCustomConversions(), // + typeFactory // + ); + + @Test // DATAJDBC-104, DATAJDBC-1384 + void testTargetTypesForPropertyType() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + SoftAssertions softly = new SoftAssertions(); + + checkTargetType(softly, entity, "someEnum", String.class); + checkTargetType(softly, entity, "localDateTime", LocalDateTime.class); + checkTargetType(softly, entity, "localDate", Timestamp.class); + checkTargetType(softly, entity, "localTime", Timestamp.class); + checkTargetType(softly, entity, "zonedDateTime", String.class); + checkTargetType(softly, entity, "offsetDateTime", OffsetDateTime.class); + checkTargetType(softly, entity, "instant", Timestamp.class); + checkTargetType(softly, entity, "date", Date.class); + checkTargetType(softly, entity, "timestamp", Timestamp.class); + checkTargetType(softly, entity, "uuid", UUID.class); + + softly.assertAll(); + } + + @Test // DATAJDBC-259 + void classificationOfCollectionLikeProperties() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + RelationalPersistentProperty listOfString = entity.getRequiredPersistentProperty("listOfString"); + RelationalPersistentProperty arrayOfString = entity.getRequiredPersistentProperty("arrayOfString"); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(converter.getColumnType(arrayOfString)).isEqualTo(String[].class); + softly.assertThat(converter.getColumnType(listOfString)).isEqualTo(String[].class); + + softly.assertAll(); + } + + @Test // DATAJDBC-221 + void referencesAreNotEntitiesAndGetStoredAsTheirId() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + SoftAssertions softly = new SoftAssertions(); + + RelationalPersistentProperty reference = entity.getRequiredPersistentProperty("reference"); + + softly.assertThat(reference.isEntity()).isFalse(); + softly.assertThat(converter.getColumnType(reference)).isEqualTo(Long.class); + + softly.assertAll(); + } + + @Test // DATAJDBC-637 + void conversionOfDateLikeValueAndBackYieldsOriginalValue() { + + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); + + assertSoftly(softly -> { + LocalDateTime testLocalDateTime = LocalDateTime.of(2001, 2, 3, 4, 5, 6, 123456789); + checkConversionToTimestampAndBack(softly, persistentEntity, "localDateTime", testLocalDateTime); + checkConversionToTimestampAndBack(softly, persistentEntity, "localDate", LocalDate.of(2001, 2, 3)); + checkConversionToTimestampAndBack(softly, persistentEntity, "localTime", LocalTime.of(1, 2, 3, 123456789)); + checkConversionToTimestampAndBack(softly, persistentEntity, "instant", + testLocalDateTime.toInstant(ZoneOffset.UTC)); + }); + + } + + @Test // GH-945 + void conversionOfPrimitiveArrays() { + + int[] ints = { 1, 2, 3, 4, 5 }; + JdbcValue converted = converter.writeJdbcValue(ints, ints.getClass(), JdbcUtil.targetSqlTypeFor(ints.getClass())); + + assertThat(converted.getValue()).isInstanceOf(Array.class); + assertThat(typeFactory.arraySource).containsExactly(1, 2, 3, 4, 5); + } + + @Test // GH-1684 + void accessesCorrectValuesForOneToOneRelationshipWithIdenticallyNamedIdProperties() { + + RowDocument rowdocument = new RowDocument(Map.of("ID", "one", "REFERENCED_ID", 23)); + + WithOneToOne result = converter.readAndResolve(WithOneToOne.class, rowdocument); + + assertThat(result).isEqualTo(new WithOneToOne("one", new Referenced(23L))); + } + + @Test // GH-1750 + void readByteArrayToNestedUuidWithCustomConverter() { + + JdbcMappingContext context = new JdbcMappingContext(); + StubbedJdbcTypeFactory typeFactory = new StubbedJdbcTypeFactory(); + Converter customConverter = new ByteArrayToUuid(); + MappingJdbcConverter converter = new MappingJdbcConverter( // + context, // + (identifier, path) -> { + throw new UnsupportedOperationException(); + }, // + new JdbcCustomConversions(Collections.singletonList(customConverter)), // + typeFactory // + ); + + assertSoftly(softly -> { + checkReadConversion(softly, converter, "uuidRef", AggregateReference.to(UUID)); + checkReadConversion(softly, converter, "uuid", UUID); + checkReadConversion(softly, converter, "optionalUuid", Optional.of(UUID)); + }); + + } + + private static void checkReadConversion(SoftAssertions softly, MappingJdbcConverter converter, String propertyName, + Object expected) { + + RelationalPersistentProperty property = converter.getMappingContext().getRequiredPersistentEntity(DummyEntity.class) + .getRequiredPersistentProperty(propertyName); + Object value = converter.readValue(BYTES_REPRESENTING_UUID, property.getTypeInformation() // + ); + + softly.assertThat(value).isEqualTo(expected); + } + + private void checkConversionToTimestampAndBack(SoftAssertions softly, RelationalPersistentEntity persistentEntity, + String propertyName, Object value) { + + RelationalPersistentProperty property = persistentEntity.getRequiredPersistentProperty(propertyName); + + Object converted = converter.writeValue(value, TypeInformation.of(converter.getColumnType(property))); + Object convertedBack = converter.readValue(converted, property.getTypeInformation()); + + softly.assertThat(convertedBack).describedAs(propertyName).isEqualTo(value); + } + + private void checkTargetType(SoftAssertions softly, RelationalPersistentEntity persistentEntity, + String propertyName, Class expected) { + + RelationalPersistentProperty property = persistentEntity.getRequiredPersistentProperty(propertyName); + + softly.assertThat(converter.getColumnType(property)).describedAs(propertyName).isEqualTo(expected); + } + + @SuppressWarnings("unused") + private static class DummyEntity { + + @Id private final Long id; + private final SomeEnum someEnum; + private final LocalDateTime localDateTime; + private final LocalDate localDate; + private final LocalTime localTime; + private final ZonedDateTime zonedDateTime; + private final OffsetDateTime offsetDateTime; + private final Instant instant; + private final Date date; + private final Timestamp timestamp; + private final AggregateReference reference; + private final UUID uuid; + private final AggregateReference uuidRef; + private final Optional optionalUuid; + + // DATAJDBC-259 + private final List listOfString; + private final String[] arrayOfString; + private final List listOfEntity; + private final OtherEntity[] arrayOfEntity; + + private DummyEntity(Long id, SomeEnum someEnum, LocalDateTime localDateTime, LocalDate localDate, + LocalTime localTime, ZonedDateTime zonedDateTime, OffsetDateTime offsetDateTime, Instant instant, Date date, + Timestamp timestamp, AggregateReference reference, UUID uuid, + AggregateReference uuidRef, Optional optionalUUID, List listOfString, String[] arrayOfString, + List listOfEntity, OtherEntity[] arrayOfEntity) { + this.id = id; + this.someEnum = someEnum; + this.localDateTime = localDateTime; + this.localDate = localDate; + this.localTime = localTime; + this.zonedDateTime = zonedDateTime; + this.offsetDateTime = offsetDateTime; + this.instant = instant; + this.date = date; + this.timestamp = timestamp; + this.reference = reference; + this.uuid = uuid; + this.uuidRef = uuidRef; + this.optionalUuid = optionalUUID; + this.listOfString = listOfString; + this.arrayOfString = arrayOfString; + this.listOfEntity = listOfEntity; + this.arrayOfEntity = arrayOfEntity; + } + + public Long getId() { + return this.id; + } + + public SomeEnum getSomeEnum() { + return this.someEnum; + } + + public LocalDateTime getLocalDateTime() { + return this.localDateTime; + } + + public LocalDate getLocalDate() { + return this.localDate; + } + + public LocalTime getLocalTime() { + return this.localTime; + } + + public ZonedDateTime getZonedDateTime() { + return this.zonedDateTime; + } + + public OffsetDateTime getOffsetDateTime() { + return this.offsetDateTime; + } + + public Instant getInstant() { + return this.instant; + } + + public Date getDate() { + return this.date; + } + + public Timestamp getTimestamp() { + return this.timestamp; + } + + public AggregateReference getReference() { + return this.reference; + } + + public UUID getUuid() { + return this.uuid; + } + + public List getListOfString() { + return this.listOfString; + } + + public String[] getArrayOfString() { + return this.arrayOfString; + } + + public List getListOfEntity() { + return this.listOfEntity; + } + + public OtherEntity[] getArrayOfEntity() { + return this.arrayOfEntity; + } + } + + @SuppressWarnings("unused") + private enum SomeEnum { + ALPHA + } + + @SuppressWarnings("unused") + private static class OtherEntity {} + + private static class StubbedJdbcTypeFactory implements JdbcTypeFactory { + Object[] arraySource; + + @Override + public Array createArray(Object[] value) { + arraySource = value; + return mock(Array.class); + } + } + + private record WithOneToOne(@Id String id, @MappedCollection(idColumn = "renamed") Referenced referenced) { + } + + private record Referenced(@Id Long id) { + } + + private record ReferencedByUuid(@Id UUID id) { + } + + static class ByteArrayToUuid implements Converter { + @Override + public UUID convert(byte[] source) { + + ByteBuffer byteBuffer = ByteBuffer.wrap(source); + long high = byteBuffer.getLong(); + long low = byteBuffer.getLong(); + return new UUID(high, low); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/NonQuotingDialect.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/NonQuotingDialect.java new file mode 100644 index 0000000000..fda4f5d932 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/NonQuotingDialect.java @@ -0,0 +1,54 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import org.springframework.data.jdbc.core.dialect.JdbcHsqlDbDialect; +import org.springframework.data.relational.core.dialect.AbstractDialect; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.HsqlDbDialect; +import org.springframework.data.relational.core.dialect.LimitClause; +import org.springframework.data.relational.core.dialect.LockClause; +import org.springframework.data.relational.core.sql.IdentifierProcessing; + +/** + * Simple {@link Dialect} that provides unquoted {@link IdentifierProcessing}. + * + * @author Mark Paluch + * @author Milan Milanov + * @author Jens Schauder + * @author Myeonghyeon Lee + */ +public class NonQuotingDialect extends AbstractDialect implements Dialect { + + public static final NonQuotingDialect INSTANCE = new NonQuotingDialect(); + + private NonQuotingDialect() {} + + @Override + public LimitClause limit() { + return JdbcHsqlDbDialect.INSTANCE.limit(); + } + + @Override + public LockClause lock() { + return JdbcHsqlDbDialect.INSTANCE.lock(); + } + + @Override + public IdentifierProcessing getIdentifierProcessing() { + return IdentifierProcessing.create(new IdentifierProcessing.Quoting(""), IdentifierProcessing.LetterCasing.AS_IS); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/QueryMapperUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/QueryMapperUnitTests.java new file mode 100644 index 0000000000..a67da7397f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/QueryMapperUnitTests.java @@ -0,0 +1,481 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.domain.Sort.Order.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Functions; +import org.springframework.data.relational.core.sql.OrderByField; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.domain.SqlSort; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; + +/** + * Unit tests for {@link QueryMapper}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Mikhail Fedorov + */ +public class QueryMapperUnitTests { + + JdbcMappingContext context = new JdbcMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, mock(RelationResolver.class)); + + QueryMapper mapper = new QueryMapper(converter); + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + QueryMapper createMapper(Converter... converters) { + + JdbcCustomConversions conversions = new JdbcCustomConversions(Arrays.asList(converters)); + + JdbcConverter converter = new MappingJdbcConverter(context, mock(RelationResolver.class), conversions, + mock(JdbcTypeFactory.class)); + + return new QueryMapper(converter); + } + + @Test // DATAJDBC-318 + public void shouldNotMapEmptyCriteria() { + + Criteria criteria = Criteria.empty(); + + assertThatIllegalArgumentException().isThrownBy(() -> map(criteria)); + } + + @Test // DATAJDBC-318 + public void shouldNotMapEmptyAndCriteria() { + + Criteria criteria = Criteria.empty().and(Collections.emptyList()); + + assertThatIllegalArgumentException().isThrownBy(() -> map(criteria)); + } + + @Test // DATAJDBC-318 + public void shouldNotMapEmptyNestedCriteria() { + + Criteria criteria = Criteria.empty().and(Collections.emptyList()).and(Criteria.empty().and(Criteria.empty())); + + assertThat(criteria.isEmpty()).isTrue(); + assertThatIllegalArgumentException().isThrownBy(() -> map(criteria)); + } + + @Test // DATAJDBC-318 + public void shouldMapSomeNestedCriteria() { + + Criteria criteria = Criteria.empty().and(Collections.emptyList()) + .and(Criteria.empty().and(Criteria.where("name").is("Hank"))); + + assertThat(criteria.isEmpty()).isFalse(); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("((person.\"NAME\" = ?[:name]))"); + } + + @Test // DATAJDBC-318 + public void shouldMapNestedGroup() { + + Criteria initial = Criteria.empty(); + + Criteria criteria = initial.and(Criteria.where("name").is("Foo")) // + .and(Criteria.where("name").is("Bar") // + .or("age").lessThan(49) // + .or(Criteria.where("name").not("Bar") // + .and("age").greaterThan(49) // + ) // + ); + + assertThat(criteria.isEmpty()).isFalse(); + + Condition condition = map(criteria); + + assertThat(condition).hasToString( + "(person.\"NAME\" = ?[:name]) AND (person.\"NAME\" = ?[:name1] OR person.age < ?[:age] OR (person.\"NAME\" != ?[:name3] AND person.age > ?[:age4]))"); + } + + @Test // DATAJDBC-318 + public void shouldMapFrom() { + + Criteria criteria = Criteria.from(Criteria.where("name").is("Foo")) // + .and(Criteria.where("name").is("Bar") // + .or("age").lessThan(49) // + ); + + assertThat(criteria.isEmpty()).isFalse(); + + Condition condition = map(criteria); + + assertThat(condition) + .hasToString("person.\"NAME\" = ?[:name] AND (person.\"NAME\" = ?[:name1] OR person.age < ?[:age])"); + } + + @Test // DATAJDBC-560 + public void shouldMapFromConcat() { + + Criteria criteria = Criteria.from(Criteria.where("name").is("Foo"), Criteria.where("name").is("Bar") // + .or("age").lessThan(49)); + + assertThat(criteria.isEmpty()).isFalse(); + + Condition condition = map(criteria); + + assertThat(condition) + .hasToString("(person.\"NAME\" = ?[:name] AND (person.\"NAME\" = ?[:name1] OR person.age < ?[:age]))"); + } + + @Test // DATAJDBC-318 + public void shouldMapSimpleCriteria() { + + Criteria criteria = Criteria.where("name").is("foo"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" = ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapSimpleCriteriaWithoutEntity() { + + Criteria criteria = Criteria.where("name").is("foo"); + + Condition condition = mapper.getMappedObject(new MapSqlParameterSource(), criteria, Table.create("person"), null); + + assertThat(condition).hasToString("person.name = ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapExpression() { + + Table table = Table.create("my_table").as("my_aliased_table"); + + Expression mappedObject = mapper.getMappedObject(table.column("alternative").as("my_aliased_col"), + context.getRequiredPersistentEntity(Person.class)); + + assertThat(mappedObject).hasToString("my_aliased_table.\"another_name\" AS my_aliased_col"); + } + + @Test // DATAJDBC-318 + public void shouldMapCountFunction() { + + Table table = Table.create("my_table").as("my_aliased_table"); + + Expression mappedObject = mapper.getMappedObject(Functions.count(table.column("alternative")), + context.getRequiredPersistentEntity(Person.class)); + + assertThat(mappedObject).hasToString("COUNT(my_aliased_table.\"another_name\")"); + } + + @Test // DATAJDBC-318 + public void shouldMapExpressionToUnknownColumn() { + + Table table = Table.create("my_table").as("my_aliased_table"); + + Expression mappedObject = mapper.getMappedObject(table.column("unknown").as("my_aliased_col"), + context.getRequiredPersistentEntity(Person.class)); + + assertThat(mappedObject).hasToString("my_aliased_table.unknown AS my_aliased_col"); + } + + @Test // DATAJDBC-318 + public void shouldMapExpressionWithoutEntity() { + + Table table = Table.create("my_table").as("my_aliased_table"); + + Expression mappedObject = mapper.getMappedObject(table.column("my_col").as("my_aliased_col"), null); + + assertThat(mappedObject).hasToString("my_aliased_table.my_col AS my_aliased_col"); + } + + @Test // DATAJDBC-318 + public void shouldMapSimpleNullableCriteria() { + + Criteria criteria = Criteria.where("name").isNull(); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" IS NULL"); + } + + @Test // DATAJDBC-318 + public void shouldConsiderColumnName() { + + Criteria criteria = Criteria.where("alternative").is("foo"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"another_name\" = ?[:another_name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapAndCriteria() { + + Criteria criteria = Criteria.where("name").is("foo").and("bar").is("baz"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" = ?[:name] AND person.bar = ?[:bar]"); + } + + @Test // DATAJDBC-318 + public void shouldMapOrCriteria() { + + Criteria criteria = Criteria.where("name").is("foo").or("bar").is("baz"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" = ?[:name] OR person.bar = ?[:bar]"); + } + + @Test // DATAJDBC-318 + public void shouldMapAndOrCriteria() { + + Criteria criteria = Criteria.where("name").is("foo") // + .and("name").isNotNull() // + .or("bar").is("baz") // + .and("anotherOne").is("alternative"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString( + "person.\"NAME\" = ?[:name] AND person.\"NAME\" IS NOT NULL OR person.bar = ?[:bar] AND person.anotherOne = ?[:anotherOne]"); + } + + @Test // DATAJDBC-318 + public void shouldMapNeq() { + + Criteria criteria = Criteria.where("name").not("foo"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" != ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsNull() { + + Criteria criteria = Criteria.where("name").isNull(); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" IS NULL"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsNotNull() { + + Criteria criteria = Criteria.where("name").isNotNull(); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" IS NOT NULL"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsIn() { + + Criteria criteria = Criteria.where("name").in("a", "b", "c"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" IN (?[:name], ?[:name1], ?[:name2])"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsNotIn() { + + Criteria criteria = Criteria.where("name").notIn("a", "b", "c"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" NOT IN (?[:name], ?[:name1], ?[:name2])"); + } + + @Test + void shouldMapIsNotInWithCollectionToStringConverter() { + + mapper = createMapper(CollectionToStringConverter.INSTANCE); + + Criteria criteria = Criteria.where("name").notIn("a", "b", "c"); + + Condition bindings = map(criteria); + + assertThat(bindings).hasToString("person.\"NAME\" NOT IN (?[:name], ?[:name1], ?[:name2])"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsGt() { + + Criteria criteria = Criteria.where("name").greaterThan("a"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" > ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsGte() { + + Criteria criteria = Criteria.where("name").greaterThanOrEquals("a"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" >= ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsLt() { + + Criteria criteria = Criteria.where("name").lessThan("a"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" < ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsLte() { + + Criteria criteria = Criteria.where("name").lessThanOrEquals("a"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" <= ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapBetween() { + + Criteria criteria = Criteria.where("name").between("a", "b"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" BETWEEN ?[:name] AND ?[:name1]"); + } + + @Test // DATAJDBC-318 + public void shouldMapIsLike() { + + Criteria criteria = Criteria.where("name").like("a"); + + Condition condition = map(criteria); + + assertThat(condition).hasToString("person.\"NAME\" LIKE ?[:name]"); + } + + @Test // DATAJDBC-318 + public void shouldMapSort() { + + Sort sort = Sort.by(desc("alternative")); + + List fields = mapper.getMappedSort(Table.create("tbl"), sort, + context.getRequiredPersistentEntity(Person.class)); + + assertThat(fields) // + .extracting(Objects::toString) // + .containsExactly("tbl.\"another_name\" DESC"); + } + + @Test // GH-1507 + public void shouldMapSortWithUnknownField() { + + Sort sort = Sort.by(desc("unknownField")); + + List fields = mapper.getMappedSort(Table.create("tbl"), sort, + context.getRequiredPersistentEntity(Person.class)); + + assertThat(fields) // + .extracting(Objects::toString) // + .containsExactly("tbl.unknownField DESC"); + } + + @Test // GH-1507 + public void shouldMapSortWithAllowedSpecialCharacters() { + + Sort sort = Sort.by(desc("x(._)x")); + + List fields = mapper.getMappedSort(Table.create("tbl"), sort, + context.getRequiredPersistentEntity(Person.class)); + + assertThat(fields) // + .extracting(Objects::toString) // + .containsExactly("tbl.x(._)x DESC"); + } + + @ParameterizedTest // GH-1507 + @ValueSource(strings = { " ", ";", "--" }) + public void shouldNotMapSortWithIllegalExpression(String input) { + + Sort sort = Sort.by(desc("unknown" + input + "Field")); + + assertThatThrownBy( + () -> mapper.getMappedSort(Table.create("tbl"), sort, context.getRequiredPersistentEntity(Person.class))) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test // GH-1507 + public void shouldMapSortWithUnsafeExpression() { + + String unsafeExpression = "arbitrary expression that may include evil stuff like ; & --"; + Sort sort = SqlSort.unsafe(unsafeExpression); + + List fields = mapper.getMappedSort(Table.create("tbl"), sort, + context.getRequiredPersistentEntity(Person.class)); + + assertThat(fields) // + .extracting(Objects::toString) // + .containsExactly(unsafeExpression + " ASC"); + } + + private Condition map(Criteria criteria) { + + return mapper.getMappedObject(parameterSource, criteria, Table.create("person"), + context.getRequiredPersistentEntity(Person.class)); + } + + static class Person { + + String name; + @Column("another_name") String alternative; + } + + enum CollectionToStringConverter implements Converter, String> { + INSTANCE; + + @Override + public String convert(Collection source) { + return source.toString(); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ResultSetTestUtil.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ResultSetTestUtil.java new file mode 100644 index 0000000000..bf8652091b --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ResultSetTestUtil.java @@ -0,0 +1,287 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.mockito.Mockito.*; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import javax.naming.OperationNotSupportedException; + +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; +import org.springframework.util.Assert; +import org.springframework.util.LinkedCaseInsensitiveMap; + +/** + * Utility for mocking ResultSets for tests. + * + * @author Jens Schauder + */ +class ResultSetTestUtil { + + static ResultSet mockResultSet(List columns, Object... values) { + + Assert.isTrue( // + values.length % columns.size() == 0, // + String // + .format( // + "Number of values [%d] must be a multiple of the number of columns [%d]", // + values.length, // + columns.size() // + ) // + ); + + List> result = convertValues(columns, values); + + return mock(ResultSet.class, new ResultSetAnswer(columns, result)); + } + + private static List> convertValues(List columns, Object[] values) { + + List> result = new ArrayList<>(); + + int index = 0; + while (index < values.length) { + + Map row = new LinkedCaseInsensitiveMap<>(); + result.add(row); + for (String column : columns) { + + row.put(column, values[index]); + index++; + } + } + return result; + } + + private static class ResultSetAnswer implements Answer { + + private final List names; + private final List> values; + private int index = -1; + + ResultSetAnswer(List names, List> values) { + + this.names = names; + this.values = values; + } + + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + + switch (invocation.getMethod().getName()) { + case "close" -> { + close(); + return null; + } + case "next" -> { + return next(); + } + case "getObject" -> { + Object argument = invocation.getArgument(0); + String name = argument instanceof Integer ? names.get(((Integer) argument) - 1) : (String) argument; + return getObject(name); + } + case "isAfterLast" -> { + return isAfterLast(); + } + case "isBeforeFirst" -> { + return isBeforeFirst(); + } + case "getRow" -> { + return isAfterLast() || isBeforeFirst() ? 0 : index + 1; + } + case "toString" -> { + return this.toString(); + } + case "findColumn" -> { + return findColumn(invocation.getArgument(0)); + } + case "getMetaData" -> { + return new MockedMetaData(); + } + default -> throw new OperationNotSupportedException(invocation.getMethod().getName()); + } + } + + private int findColumn(String name) { + if (names.contains(name)) { + return names.indexOf(name) + 1; + } + + return -1; + } + + private boolean isAfterLast() { + return index >= values.size() && !values.isEmpty(); + } + + private boolean isBeforeFirst() { + return index < 0 && !values.isEmpty(); + } + + private Object getObject(String column) throws SQLException { + + if (index == -1) { + throw new SQLException("ResultSet.isBeforeFirst. Make sure to call next() before calling this method"); + } + + Map rowMap = values.get(index); + + if (!rowMap.containsKey(column)) { + throw new SQLException(String.format("Trying to access a column (%s) that does not exist", column)); + } + + return rowMap.get(column); + } + + private boolean close() { + + index = -1; + return index < values.size(); + } + + private boolean next() { + + index++; + return index < values.size(); + } + + private class MockedMetaData implements ResultSetMetaData { + @Override + public int getColumnCount() { + return names.size(); + } + + @Override + public boolean isAutoIncrement(int i) { + return false; + } + + @Override + public boolean isCaseSensitive(int i) { + return false; + } + + @Override + public boolean isSearchable(int i) { + return false; + } + + @Override + public boolean isCurrency(int i) { + return false; + } + + @Override + public int isNullable(int i) { + return 0; + } + + @Override + public boolean isSigned(int i) { + return false; + } + + @Override + public int getColumnDisplaySize(int i) { + return 0; + } + + @Override + public String getColumnLabel(int i) { + return names.get(i - 1); + } + + @Override + public String getColumnName(int i) { + return null; + } + + @Override + public String getSchemaName(int i) { + return null; + } + + @Override + public int getPrecision(int i) { + return 0; + } + + @Override + public int getScale(int i) { + return 0; + } + + @Override + public String getTableName(int i) { + return null; + } + + @Override + public String getCatalogName(int i) { + return null; + } + + @Override + public int getColumnType(int i) { + return 0; + } + + @Override + public String getColumnTypeName(int i) { + return null; + } + + @Override + public boolean isReadOnly(int i) { + return false; + } + + @Override + public boolean isWritable(int i) { + return false; + } + + @Override + public boolean isDefinitelyWritable(int i) { + return false; + } + + @Override + public String getColumnClassName(int i) { + return null; + } + + @Override + public T unwrap(Class aClass) { + return null; + } + + @Override + public boolean isWrapperFor(Class aClass) { + return false; + } + } + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractorUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractorUnitTests.java new file mode 100644 index 0000000000..8616882934 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractorUnitTests.java @@ -0,0 +1,575 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Consumer; + +import org.assertj.core.api.Assertions; +import org.assertj.core.api.ThrowingConsumer; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.domain.RowDocument; + +/** + * Unit tests for the {@link RowDocumentResultSetExtractor}. + * + * @author Jens Schauder + * @author Mark Paluch + */ +public class RowDocumentResultSetExtractorUnitTests { + + RelationalMappingContext context = new JdbcMappingContext(new DefaultNamingStrategy()); + + private final PathToColumnMapping column = new PathToColumnMapping() { + @Override + public String column(AggregatePath path) { + return RowDocumentResultSetExtractorUnitTests.this.column(path); + } + + @Override + public String keyColumn(AggregatePath path) { + return column(path) + "_key"; + } + }; + + RowDocumentResultSetExtractor documentExtractor = new RowDocumentResultSetExtractor(context, column); + + @Test // GH-1446 + void emptyResultSetYieldsEmptyResult() { + + Assertions.setMaxElementsForPrinting(20); + + new ResultSetTester(WithEmbedded.class, context).resultSet(rsc -> { + rsc.withPaths("id1", "name"); + }).run(resultSet -> { + assertThatIllegalStateException() + .isThrownBy(() -> documentExtractor.extractNextDocument(WithEmbedded.class, resultSet)); + }); + } + + @Test // GH-1446 + void singleSimpleEntityGetsExtractedFromSingleRow() throws SQLException { + + testerFor(WithEmbedded.class).resultSet(rsc -> { + rsc.withPaths("id1", "name") // + .withRow(1, "Alfred"); + }).run(document -> { + + assertThat(document).containsEntry("id1", 1).containsEntry("name", "Alfred"); + }); + } + + @Test // GH-1446 + void multipleSimpleEntitiesGetExtractedFromMultipleRows() throws SQLException { + + new ResultSetTester(WithEmbedded.class, context).resultSet(rsc -> { + rsc.withPaths("id1", "name") // + .withRow(1, "Alfred") // + .withRow(2, "Bertram"); + }).run(resultSet -> { + + RowDocument document = documentExtractor.extractNextDocument(WithEmbedded.class, resultSet); + assertThat(document).containsEntry("id1", 1).containsEntry("name", "Alfred"); + + RowDocument nextDocument = documentExtractor.extractNextDocument(WithEmbedded.class, resultSet); + assertThat(nextDocument).containsEntry("id1", 2).containsEntry("name", "Bertram"); + }); + } + + @Nested + class EmbeddedReference { + @Test // GH-1446 + void embeddedGetsExtractedFromSingleRow() { + + testerFor(WithEmbedded.class).resultSet(rsc -> { + rsc.withPaths("id1", "embeddedNullable.dummyName") // + .withRow(1, "Imani"); + }).run(document -> { + + assertThat(document).containsEntry("id1", 1).containsEntry("dummy_name", "Imani"); + }); + } + + @Test // GH-1446 + void emptyEmbeddedGetsExtractedFromSingleRow() throws SQLException { + + testerFor(WithEmbedded.class).resultSet(rsc -> { + rsc.withPaths("id1", "embeddedNullable.dummyName") // + .withRow(1, null); + }).run(document -> { + + assertThat(document).hasSize(1).containsEntry("id1", 1); + }); + } + } + + @Nested + class ToOneRelationships { + @Test // GH-1446 + void entityReferenceGetsExtractedFromSingleRow() { + + testerFor(WithOneToOne.class).resultSet(rsc -> { + rsc.withPaths("id1", "related", "related.dummyName") // + .withRow(1, 1, "Dummy Alfred"); + }).run(document -> { + + assertThat(document).containsKey("related").containsEntry("related", + new RowDocument().append("dummy_name", "Dummy Alfred")); + }); + } + + @Test // GH-1446 + void nullEntityReferenceGetsExtractedFromSingleRow() { + + testerFor(WithOneToOne.class).resultSet(rsc -> { + rsc.withPaths("id1", "related", "related.dummyName") // + .withRow(1, null, "Dummy Alfred"); + }).run(document -> { + + assertThat(document).containsKey("related").containsEntry("related", + new RowDocument().append("dummy_name", "Dummy Alfred")); + }); + } + } + + @Nested + class Sets { + + @Test // GH-1446 + void extractEmptySetReference() { + + testerFor(WithSets.class).resultSet(rsc -> { + rsc.withPaths("id1", "first", "first.dummyName") // + .withRow(1, null, null)// + .withRow(1, null, null) // + .withRow(1, null, null); + }).run(document -> { + + assertThat(document).hasSize(1).containsEntry("id1", 1); + }); + } + + @Test // GH-1446 + void extractSingleSetReference() { + + testerFor(WithSets.class).resultSet(rsc -> { + rsc.withPath("id1").withKey("first").withPath("first.dummyName") // + .withRow(1, 1, "Dummy Alfred")// + .withRow(1, 2, "Dummy Berta") // + .withRow(1, 3, "Dummy Carl"); + }).run(document -> { + + assertThat(document).containsEntry("id1", 1).containsEntry("first", + Arrays.asList(RowDocument.of("dummy_name", "Dummy Alfred"), RowDocument.of("dummy_name", "Dummy Berta"), + RowDocument.of("dummy_name", "Dummy Carl"))); + }); + } + + @Test // GH-1446 + void extractSetReferenceAndSimpleProperty() { + + testerFor(WithSets.class).resultSet(rsc -> { + rsc.withPaths("id1", "name").withKey("first").withPath("first.dummyName") // + .withRow(1, "Simplicissimus", 1, "Dummy Alfred")// + .withRow(1, null, 2, "Dummy Berta") // + .withRow(1, null, 3, "Dummy Carl"); + }).run(document -> { + + assertThat(document).containsEntry("id1", 1).containsEntry("name", "Simplicissimus").containsEntry("first", + Arrays.asList(RowDocument.of("dummy_name", "Dummy Alfred"), RowDocument.of("dummy_name", "Dummy Berta"), + RowDocument.of("dummy_name", "Dummy Carl"))); + }); + } + + @Test // GH-1446 + void extractMultipleSetReference() { + + testerFor(WithSets.class).resultSet(rsc -> { + rsc.withPaths("id1").withKey("first").withPath("first.dummyName").withKey("second").withPath("second.dummyName") // + .withRow(1, 1, "Dummy Alfred", 1, "Other Ephraim")// + .withRow(1, 2, "Dummy Berta", 2, "Other Zeno") // + .withRow(1, 3, "Dummy Carl", null, null); + }).run(document -> { + + assertThat(document).hasSize(3) + .containsEntry("first", + Arrays.asList(RowDocument.of("dummy_name", "Dummy Alfred"), RowDocument.of("dummy_name", "Dummy Berta"), + RowDocument.of("dummy_name", "Dummy Carl"))) + .containsEntry("second", Arrays.asList(RowDocument.of("dummy_name", "Other Ephraim"), + RowDocument.of("dummy_name", "Other Zeno"))); + }); + } + + @Nested + class Lists { + + @Test // GH-1446 + void extractSingleListReference() { + + testerFor(WithList.class).resultSet(rsc -> { + rsc.withPaths("id").withKey("withoutIds").withPath("withoutIds.name") // + .withRow(1, 1, "Dummy Alfred")// + .withRow(1, 2, "Dummy Berta") // + .withRow(1, 3, "Dummy Carl"); + }).run(document -> { + + assertThat(document).hasSize(2).containsEntry("without_ids", + Arrays.asList(RowDocument.of("name", "Dummy Alfred"), RowDocument.of("name", "Dummy Berta"), + RowDocument.of("name", "Dummy Carl"))); + }); + } + + @Test // GH-1446 + void extractSingleUnorderedListReference() { + + testerFor(WithList.class).resultSet(rsc -> { + rsc.withPaths("id").withKey("withoutIds").withPath("withoutIds.name") // + .withRow(1, 0, "Dummy Alfred")// + .withRow(1, 2, "Dummy Carl") // + .withRow(1, 1, "Dummy Berta"); + }).run(document -> { + + assertThat(document).containsKey("without_ids"); + List dummy_list = document.getList("without_ids"); + assertThat(dummy_list).hasSize(3).contains(new RowDocument().append("name", "Dummy Alfred")) + .contains(new RowDocument().append("name", "Dummy Berta")) + .contains(new RowDocument().append("name", "Dummy Carl")); + }); + } + } + } + + @Nested + class Maps { + + @Test // GH-1446 + void extractSingleMapReference() { + + testerFor(WithMaps.class).resultSet(rsc -> { + rsc.withPaths("id1").withKey("first").withPath("first.dummyName") // + .withRow(1, "alpha", "Dummy Alfred")// + .withRow(1, "beta", "Dummy Berta") // + .withRow(1, "gamma", "Dummy Carl"); + }).run(document -> { + + assertThat(document).containsEntry("first", Map.of("alpha", RowDocument.of("dummy_name", "Dummy Alfred"), + "beta", RowDocument.of("dummy_name", "Dummy Berta"), "gamma", RowDocument.of("dummy_name", "Dummy Carl"))); + }); + } + + @Test // GH-1446 + void extractMultipleCollectionReference() { + + testerFor(WithMapsAndList.class).resultSet(rsc -> { + rsc.withPaths("id1").withKey("map").withPath("map.dummyName").withKey("list").withPath("list.name") // + .withRow(1, "alpha", "Dummy Alfred", 1, "Other Ephraim")// + .withRow(1, "beta", "Dummy Berta", 2, "Other Zeno") // + .withRow(1, "gamma", "Dummy Carl", null, null); + }).run(document -> { + + assertThat(document).containsEntry("map", Map.of("alpha", RowDocument.of("dummy_name", "Dummy Alfred"), // + "beta", RowDocument.of("dummy_name", "Dummy Berta"), // + "gamma", RowDocument.of("dummy_name", "Dummy Carl"))) // + .containsEntry("list", + Arrays.asList(RowDocument.of("name", "Other Ephraim"), RowDocument.of("name", "Other Zeno"))); + }); + } + + @Test // GH-1446 + void extractNestedMapsWithId() { + + testerFor(WithMaps.class).resultSet(rsc -> { + rsc.withPaths("id1", "name").withKey("intermediate") + .withPaths("intermediate.iId", "intermediate.intermediateName").withKey("intermediate.dummyMap") + .withPaths("intermediate.dummyMap.dummyName") + // + .withRow(1, "Alfred", "alpha", 23, "Inami", "omega", "Dustin") // + .withRow(1, null, "alpha", 23, null, "zeta", "Dora") // + .withRow(1, null, "beta", 24, "Ina", "eta", "Dotty") // + .withRow(1, null, "gamma", 25, "Ion", null, null); + }).run(document -> { + + assertThat(document).containsEntry("id1", 1).containsEntry("name", "Alfred"); + + Map intermediate = document.getMap("intermediate"); + assertThat(intermediate).containsKeys("alpha", "beta", "gamma"); + + RowDocument alpha = (RowDocument) intermediate.get("alpha"); + assertThat(alpha).containsEntry("i_id", 23).containsEntry("intermediate_name", "Inami"); + Map dummyMap = alpha.getMap("dummy_map"); + assertThat(dummyMap).containsEntry("omega", RowDocument.of("dummy_name", "Dustin")).containsEntry("zeta", + RowDocument.of("dummy_name", "Dora")); + + RowDocument gamma = (RowDocument) intermediate.get("gamma"); + assertThat(gamma).hasSize(2).containsEntry("i_id", 25).containsEntry("intermediate_name", "Ion"); + }); + } + } + + private String column(AggregatePath path) { + return path.toDotPath(); + } + + private static class WithEmbedded { + + @Id long id1; + String name; + @Embedded.Nullable DummyEntity embeddedNullable; + @Embedded.Empty DummyEntity embeddedNonNull; + } + + private static class WithOneToOne { + + @Id long id1; + String name; + DummyEntity related; + } + + private static class Person { + + String name; + } + + private static class PersonWithId { + + @Id Long id; + String name; + } + + private static class WithList { + + @Id long id; + + List withoutIds; + List withIds; + } + + private static class WithSets { + + @Id long id1; + String name; + Set first; + Set second; + } + + private static class WithMaps { + + @Id long id1; + + String name; + + Map first; + Map intermediate; + Map noId; + } + + private static class WithMapsAndList { + + @Id long id1; + + Map map; + List list; + } + + private static class Intermediate { + + @Id long iId; + String intermediateName; + + Set dummies; + List dummyList; + Map dummyMap; + } + + private static class IntermediateNoId { + + String intermediateName; + + Set dummies; + List dummyList; + Map dummyMap; + } + + private static class DummyEntity { + String dummyName; + Long longValue; + } + + /** + * Configurer for a {@link ResultSet}. + */ + interface ResultSetConfigurer { + + ResultSetConfigurer withColumns(String... columns); + + /** + * Add mapped paths. + * + * @param path + * @return + */ + ResultSetConfigurer withPath(String path); + + /** + * Add mapped paths. + * + * @param paths + * @return + */ + default ResultSetConfigurer withPaths(String... paths) { + for (String path : paths) { + withPath(path); + } + + return this; + } + + /** + * Add mapped key paths. + * + * @param path + * @return + */ + ResultSetConfigurer withKey(String path); + + ResultSetConfigurer withRow(Object... values); + } + + DocumentTester testerFor(Class entityType) { + return new DocumentTester(entityType, context, documentExtractor); + } + + private static class AbstractTester { + + private final Class entityType; + private final RelationalMappingContext context; + ResultSet resultSet; + + AbstractTester(Class entityType, RelationalMappingContext context) { + this.entityType = entityType; + this.context = context; + } + + AbstractTester resultSet(Consumer configuration) { + + List values = new ArrayList<>(); + List columns = new ArrayList<>(); + ResultSetConfigurer configurer = new ResultSetConfigurer() { + @Override + public ResultSetConfigurer withColumns(String... columnNames) { + columns.addAll(Arrays.asList(columnNames)); + return this; + } + + public ResultSetConfigurer withPath(String path) { + + PersistentPropertyPath propertyPath = context.getPersistentPropertyPath(path, + entityType); + + columns.add(context.getAggregatePath(propertyPath).toDotPath()); + return this; + } + + public ResultSetConfigurer withKey(String path) { + + PersistentPropertyPath propertyPath = context.getPersistentPropertyPath(path, + entityType); + + columns.add(context.getAggregatePath(propertyPath).toDotPath() + "_key"); + return this; + } + + @Override + public ResultSetConfigurer withRow(Object... rowValues) { + values.addAll(Arrays.asList(rowValues)); + return this; + } + }; + + configuration.accept(configurer); + this.resultSet = ResultSetTestUtil.mockResultSet(columns, values.toArray()); + + return this; + } + } + + private static class DocumentTester extends AbstractTester { + + private final Class entityType; + private final RowDocumentResultSetExtractor extractor; + + DocumentTester(Class entityType, RelationalMappingContext context, RowDocumentResultSetExtractor extractor) { + + super(entityType, context); + + this.entityType = entityType; + this.extractor = extractor; + } + + @Override + DocumentTester resultSet(Consumer configuration) { + + super.resultSet(configuration); + return this; + } + + public void run(ThrowingConsumer action) { + + try { + action.accept(extractor.extractNextDocument(entityType, resultSet)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + } + + private static class ResultSetTester extends AbstractTester { + + ResultSetTester(Class entityType, RelationalMappingContext context) { + super(entityType, context); + } + + @Override + ResultSetTester resultSet(Consumer configuration) { + + super.resultSet(configuration); + + return this; + } + + public void run(ThrowingConsumer action) { + action.accept(resultSet); + } + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorContextBasedNamingStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorContextBasedNamingStrategyUnitTests.java new file mode 100644 index 0000000000..745698211b --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorContextBasedNamingStrategyUnitTests.java @@ -0,0 +1,246 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.PersistentPropertyPathTestUtils; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Unit tests to verify a contextual {@link NamingStrategy} implementation that customizes using a user-centric + * {@link ThreadLocal}. NOTE: Due to the need to verify SQL generation and {@link SqlGenerator}'s package-private status + * suggests this unit test exist in this package, not {@literal org.springframework.data.jdbc.mappings.model}. + * + * @author Greg Turnquist + */ +public class SqlGeneratorContextBasedNamingStrategyUnitTests { + + RelationalMappingContext context = new JdbcMappingContext(); + ThreadLocal userHandler = new ThreadLocal<>(); + + /** + * Use a {@link NamingStrategy}, but override the schema with a {@link ThreadLocal}-based setting. + */ + private final NamingStrategy contextualNamingStrategy = new NamingStrategy() { + + @Override + public String getSchema() { + return userHandler.get(); + } + }; + + @Test // DATAJDBC-107 + public void findOne() { + + testAgainstMultipleUsers(user -> { + + SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); + + String sql = sqlGenerator.getFindOne(); + + SoftAssertions softAssertions = new SoftAssertions(); + softAssertions.assertThat(sql) // + .startsWith("SELECT") // + .contains(user + ".dummy_entity.id AS id,") // + .contains(user + ".dummy_entity.name AS name,") // + .contains("ref.l1id AS ref_l1id") // + .contains("ref.content AS ref_content") // + .contains("FROM " + user + ".dummy_entity"); + softAssertions.assertAll(); + }); + } + + @Test // DATAJDBC-107 + public void cascadingDeleteFirstLevel() { + + testAgainstMultipleUsers(user -> { + + SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); + + String sql = sqlGenerator.createDeleteByPath(getPath("ref")); + + assertThat(sql).isEqualTo( // + "DELETE FROM " // + + user + ".referenced_entity WHERE " // + + user + ".referenced_entity.dummy_entity = :rootId" // + ); + }); + } + + @Test // DATAJDBC-107 + public void cascadingDeleteAllSecondLevel() { + + testAgainstMultipleUsers(user -> { + + SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); + + String sql = sqlGenerator.createDeleteByPath(getPath("ref.further")); + + assertThat(sql).isEqualTo( // + "DELETE FROM " + user + ".second_level_referenced_entity " // + + "WHERE " + user + ".second_level_referenced_entity.referenced_entity IN " // + + "(SELECT " + user + ".referenced_entity.l1id FROM " + user + ".referenced_entity " // + + "WHERE " + user + ".referenced_entity.dummy_entity = :rootId)"); + }); + } + + @Test // DATAJDBC-107 + public void deleteAll() { + + testAgainstMultipleUsers(user -> { + + SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); + + String sql = sqlGenerator.createDeleteAllSql(null); + + assertThat(sql).isEqualTo("DELETE FROM " + user + ".dummy_entity"); + }); + } + + @Test // DATAJDBC-107 + public void cascadingDeleteAllFirstLevel() { + + testAgainstMultipleUsers(user -> { + + SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); + + String sql = sqlGenerator.createDeleteAllSql(getPath("ref")); + + assertThat(sql).isEqualTo( // + "DELETE FROM " + user + ".referenced_entity WHERE " + user + ".referenced_entity.dummy_entity IS NOT NULL"); + }); + } + + @Test // DATAJDBC-107 + public void cascadingDeleteSecondLevel() { + + testAgainstMultipleUsers(user -> { + + SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); + + String sql = sqlGenerator.createDeleteAllSql(getPath("ref.further")); + + assertThat(sql).isEqualTo( // + "DELETE FROM " + user + ".second_level_referenced_entity " // + + "WHERE " + user + ".second_level_referenced_entity.referenced_entity IN " // + + "(SELECT " + user + ".referenced_entity.l1id FROM " + user + ".referenced_entity " // + + "WHERE " + user + ".referenced_entity.dummy_entity IS NOT NULL)"); + }); + } + + private PersistentPropertyPath getPath(String path) { + return PersistentPropertyPathTestUtils.getPath(path, DummyEntity.class, this.context); + } + + /** + * Take a set of user-based assertions and run them against multiple users, in different threads. + */ + private void testAgainstMultipleUsers(Consumer testAssertions) { + + AtomicReference exception = new AtomicReference<>(); + CountDownLatch latch = new CountDownLatch(2); + + threadedTest("User1", latch, testAssertions, exception); + threadedTest("User2", latch, testAssertions, exception); + + try { + if (!latch.await(10L, TimeUnit.SECONDS)) { + fail("Test failed due to a time out."); + } + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + + Error ex = exception.get(); + if (ex != null) { + throw ex; + } + } + + /** + * Inside a {@link Runnable}, fetch the {@link ThreadLocal}-based username and execute the provided set of assertions. + * Then signal through the provided {@link CountDownLatch}. + */ + private void threadedTest(String user, CountDownLatch latch, Consumer testAssertions, + AtomicReference exception) { + + new Thread(() -> { + + try { + + userHandler.set(user); + testAssertions.accept(user); + + } catch (Error ex) { + exception.compareAndSet(null, ex); + } finally { + latch.countDown(); + } + + }).start(); + } + + /** + * Plug in a custom {@link NamingStrategy} for this test case. + */ + private SqlGenerator configureSqlGenerator(NamingStrategy namingStrategy) { + + RelationalMappingContext context = new JdbcMappingContext(namingStrategy); + JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); + + return new SqlGenerator(context, converter, persistentEntity, NonQuotingDialect.INSTANCE); + } + + @SuppressWarnings("unused") + static class DummyEntity { + + @Id Long id; + String name; + ReferencedEntity ref; + } + + @SuppressWarnings("unused") + static class ReferencedEntity { + + @Id Long l1id; + String content; + SecondLevelReferencedEntity further; + } + + @SuppressWarnings("unused") + static class SecondLevelReferencedEntity { + + @Id Long l2id; + String something; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorEmbeddedUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorEmbeddedUnitTests.java new file mode 100644 index 0000000000..7c510617b2 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorEmbeddedUnitTests.java @@ -0,0 +1,377 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.PersistentPropertyPathTestUtils; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.relational.core.sql.Aliased; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.lang.Nullable; + +/** + * Unit tests for the {@link SqlGenerator} in a context of the {@link Embedded} annotation. + * + * @author Bastian Wilhelm + * @author Mark Paluch + */ +class SqlGeneratorEmbeddedUnitTests { + + private final RelationalMappingContext context = new JdbcMappingContext(); + private JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + private SqlGenerator sqlGenerator; + + @BeforeEach + void setUp() { + this.context.setForceQuote(false); + this.sqlGenerator = createSqlGenerator(DummyEntity.class); + } + + SqlGenerator createSqlGenerator(Class type) { + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(type); + return new SqlGenerator(context, converter, persistentEntity, NonQuotingDialect.INSTANCE); + } + + @Test // DATAJDBC-111 + void findOne() { + final String sql = sqlGenerator.getFindOne(); + + assertSoftly(softly -> { + + softly.assertThat(sql).startsWith("SELECT") // + .contains("dummy_entity.id1 AS id1") // + .contains("dummy_entity.test AS test") // + .contains("dummy_entity.attr1 AS attr1") // + .contains("dummy_entity.attr2 AS attr2") // + .contains("dummy_entity.prefix2_attr1 AS prefix2_attr1") // + .contains("dummy_entity.prefix2_attr2 AS prefix2_attr2") // + .contains("dummy_entity.prefix_test AS prefix_test") // + .contains("dummy_entity.prefix_attr1 AS prefix_attr1") // + .contains("dummy_entity.prefix_attr2 AS prefix_attr2") // + .contains("dummy_entity.prefix_prefix2_attr1 AS prefix_prefix2_attr1") // + .contains("dummy_entity.prefix_prefix2_attr2 AS prefix_prefix2_attr2") // + .contains("WHERE dummy_entity.id1 = :id") // + .doesNotContain("JOIN").doesNotContain("embeddable"); // + }); + } + + @Test // DATAJDBC-111 + void findAll() { + final String sql = sqlGenerator.getFindAll(); + + assertSoftly(softly -> { + + softly.assertThat(sql).startsWith("SELECT") // + .contains("dummy_entity.id1 AS id1") // + .contains("dummy_entity.test AS test") // + .contains("dummy_entity.attr1 AS attr1") // + .contains("dummy_entity.attr2 AS attr2") // + .contains("dummy_entity.prefix2_attr1 AS prefix2_attr1") // + .contains("dummy_entity.prefix2_attr2 AS prefix2_attr2") // + .contains("dummy_entity.prefix_test AS prefix_test") // + .contains("dummy_entity.prefix_attr1 AS prefix_attr1") // + .contains("dummy_entity.prefix_attr2 AS prefix_attr2") // + .contains("dummy_entity.prefix_prefix2_attr1 AS prefix_prefix2_attr1") // + .contains("dummy_entity.prefix_prefix2_attr2 AS prefix_prefix2_attr2") // + .doesNotContain("JOIN") // + .doesNotContain("embeddable"); + }); + } + + @Test // DATAJDBC-111 + void findAllInList() { + final String sql = sqlGenerator.getFindAllInList(); + + assertSoftly(softly -> { + + softly.assertThat(sql).startsWith("SELECT") // + .contains("dummy_entity.id1 AS id1") // + .contains("dummy_entity.test AS test") // + .contains("dummy_entity.attr1 AS attr1") // + .contains("dummy_entity.attr2 AS attr2").contains("dummy_entity.prefix2_attr1 AS prefix2_attr1") // + .contains("dummy_entity.prefix2_attr2 AS prefix2_attr2") // + .contains("dummy_entity.prefix_test AS prefix_test") // + .contains("dummy_entity.prefix_attr1 AS prefix_attr1") // + .contains("dummy_entity.prefix_attr2 AS prefix_attr2") // + .contains("dummy_entity.prefix_prefix2_attr1 AS prefix_prefix2_attr1") // + .contains("dummy_entity.prefix_prefix2_attr2 AS prefix_prefix2_attr2") // + .contains("WHERE dummy_entity.id1 IN (:ids)") // + .doesNotContain("JOIN") // + .doesNotContain("embeddable"); + }); + } + + @Test // DATAJDBC-111 + void insert() { + final String sql = sqlGenerator.getInsert(emptySet()); + + assertSoftly(softly -> { + + softly.assertThat(sql) // + .startsWith("INSERT INTO") // + .contains("dummy_entity") // + .contains(":test") // + .contains(":attr1") // + .contains(":attr2") // + .contains(":prefix2_attr1") // + .contains(":prefix2_attr2") // + .contains(":prefix_test") // + .contains(":prefix_attr1") // + .contains(":prefix_attr2") // + .contains(":prefix_prefix2_attr1") // + .contains(":prefix_prefix2_attr2"); + }); + } + + @Test // DATAJDBC-111 + void update() { + final String sql = sqlGenerator.getUpdate(); + + assertSoftly(softly -> { + + softly.assertThat(sql) // + .startsWith("UPDATE") // + .contains("dummy_entity") // + .contains("test = :test") // + .contains("attr1 = :attr1") // + .contains("attr2 = :attr2") // + .contains("prefix2_attr1 = :prefix2_attr1") // + .contains("prefix2_attr2 = :prefix2_attr2") // + .contains("prefix_test = :prefix_test") // + .contains("prefix_attr1 = :prefix_attr1") // + .contains("prefix_attr2 = :prefix_attr2") // + .contains("prefix_prefix2_attr1 = :prefix_prefix2_attr1") // + .contains("prefix_prefix2_attr2 = :prefix_prefix2_attr2"); + }); + } + + @Test // DATAJDBC-340 + @Disabled // this is just broken right now + void deleteByPath() { + + final String sql = sqlGenerator + .createDeleteByPath(PersistentPropertyPathTestUtils.getPath("embedded.other", DummyEntity2.class, context)); + + assertThat(sql).containsSequence("DELETE FROM other_entity", // + "WHERE", // + "embedded_with_reference IN (", // + "SELECT ", // + "id ", // + "FROM", // + "dummy_entity2", // + "WHERE", // + "embedded_with_reference = :rootId"); + } + + @Test // DATAJDBC-340 + void noJoinForEmbedded() { + + SqlGenerator.Join join = generateJoin("embeddable", DummyEntity.class); + + assertThat(join).isNull(); + } + + @Test // DATAJDBC-340 + void columnForEmbeddedProperty() { + + assertThat(generatedColumn("embeddable.test", DummyEntity.class)) // + .extracting( // + c -> c.getName(), // + c -> c.getTable().getName(), // + c -> getAlias(c.getTable()), // + this::getAlias) // + .containsExactly( // + SqlIdentifier.unquoted("test"), // + SqlIdentifier.unquoted("dummy_entity"), // + null, // + SqlIdentifier.unquoted("test")); + } + + @Test // GH-1695 + void columnForEmbeddedPropertyWithPrefix() { + assertThat(generatedColumn("nested.childId", WithEmbeddedAndAggregateReference.class)) + .hasToString("a.nested_child_id AS nested_child_id"); + } + + @Test // DATAJDBC-340 + void noColumnForEmbedded() { + + assertThat(generatedColumn("embeddable", DummyEntity.class)) // + .isNull(); + } + + @Test // DATAJDBC-340 + void noJoinForPrefixedEmbedded() { + + SqlGenerator.Join join = generateJoin("prefixedEmbeddable", DummyEntity.class); + + assertThat(join).isNull(); + } + + @Test // DATAJDBC-340 + void columnForPrefixedEmbeddedProperty() { + + assertThat(generatedColumn("prefixedEmbeddable.test", DummyEntity.class)) // + .extracting( // + c -> c.getName(), // + c -> c.getTable().getName(), // + c -> getAlias(c.getTable()), // + this::getAlias) // + .containsExactly( // + SqlIdentifier.unquoted("prefix_test"), // + SqlIdentifier.unquoted("dummy_entity"), // + null, // + SqlIdentifier.unquoted("prefix_test")); + } + + @Test // DATAJDBC-340 + void noJoinForCascadedEmbedded() { + + SqlGenerator.Join join = generateJoin("embeddable.embeddable", DummyEntity.class); + + assertThat(join).isNull(); + } + + @Test // DATAJDBC-340 + void columnForCascadedEmbeddedProperty() { + + assertThat(generatedColumn("embeddable.embeddable.attr1", DummyEntity.class)) // + .extracting(c -> c.getName(), c -> c.getTable().getName(), c -> getAlias(c.getTable()), this::getAlias) + .containsExactly(SqlIdentifier.unquoted("attr1"), SqlIdentifier.unquoted("dummy_entity"), null, + SqlIdentifier.unquoted("attr1")); + } + + @Test // DATAJDBC-340 + void joinForEmbeddedWithReference() { + + SqlGenerator.Join join = generateJoin("embedded.other", DummyEntity2.class); + + assertSoftly(softly -> { + + softly.assertThat(join.getJoinTable().getName()).isEqualTo(SqlIdentifier.unquoted("other_entity")); + softly.assertThat(join.getJoinColumn().getTable()).isEqualTo(join.getJoinTable()); + softly.assertThat(join.getJoinColumn().getName()).isEqualTo(SqlIdentifier.unquoted("dummy_entity2")); + softly.assertThat(join.getParentId().getName()).isEqualTo(SqlIdentifier.unquoted("id")); + softly.assertThat(join.getParentId().getTable().getName()).isEqualTo(SqlIdentifier.unquoted("dummy_entity2")); + }); + } + + @Test // DATAJDBC-340 + void columnForEmbeddedWithReferenceProperty() { + + assertThat(generatedColumn("embedded.other.value", DummyEntity2.class)) // + .extracting( // + c -> c.getName(), // + c -> c.getTable().getName(), // + c -> getAlias(c.getTable()), // + this::getAlias) // + .containsExactly( // + SqlIdentifier.unquoted("value"), // + SqlIdentifier.unquoted("other_entity"), // + SqlIdentifier.quoted("prefix_other"), // + SqlIdentifier.unquoted("prefix_other_value")); + } + + private SqlGenerator.Join generateJoin(String path, Class type) { + return createSqlGenerator(type) + .getJoin(context.getAggregatePath(PersistentPropertyPathTestUtils.getPath(path, type, context))); + } + + @Nullable + private SqlIdentifier getAlias(Object maybeAliased) { + + if (maybeAliased instanceof Aliased) { + return ((Aliased) maybeAliased).getAlias(); + } + return null; + } + + private org.springframework.data.relational.core.sql.Column generatedColumn(String path, Class type) { + + return createSqlGenerator(type) + .getColumn(context.getAggregatePath(PersistentPropertyPathTestUtils.getPath(path, type, context))); + } + + @SuppressWarnings("unused") + static class DummyEntity { + + @Column("id1") + @Id Long id; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix_") CascadedEmbedded prefixedEmbeddable; + + @Embedded(onEmpty = OnEmpty.USE_NULL) CascadedEmbedded embeddable; + } + + @SuppressWarnings("unused") + static class CascadedEmbedded { + String test; + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix2_") Embeddable prefixedEmbeddable; + @Embedded(onEmpty = OnEmpty.USE_NULL) Embeddable embeddable; + } + + @SuppressWarnings("unused") + static class Embeddable { + Long attr1; + String attr2; + } + + @SuppressWarnings("unused") + static class DummyEntity2 { + + @Id Long id; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix_") EmbeddedWithReference embedded; + } + + static class EmbeddedWithReference { + OtherEntity other; + } + + static class OtherEntity { + String value; + } + + @Table("a") + private + record WithEmbeddedAndAggregateReference(@Id long id, + @Embedded.Nullable(prefix = "nested_") WithAggregateReference nested) { + } + + private record WithAggregateReference(AggregateReference childId) { + } + + private record Child(@Id long id) { + + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorFixedNamingStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorFixedNamingStrategyUnitTests.java new file mode 100644 index 0000000000..502b310b52 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorFixedNamingStrategyUnitTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.PersistentPropertyPathTestUtils; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.dialect.AnsiDialect; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Unit tests the {@link SqlGenerator} with a fixed {@link NamingStrategy} implementation containing a hard wired + * schema, table, and property prefix. + * + * @author Greg Turnquist + * @author Mark Paluch + */ +class SqlGeneratorFixedNamingStrategyUnitTests { + + private final NamingStrategy fixedCustomTablePrefixStrategy = new NamingStrategy() { + + @Override + public String getSchema() { + return "FixedCustomSchema"; + } + + @Override + public String getTableName(Class type) { + return "FixedCustomTablePrefix_" + type.getSimpleName(); + } + + @Override + public String getColumnName(RelationalPersistentProperty property) { + return "FixedCustomPropertyPrefix_" + property.getName(); + } + }; + + private final NamingStrategy upperCaseLowerCaseStrategy = new NamingStrategy() { + + @Override + public String getTableName(Class type) { + return type.getSimpleName().toUpperCase(); + } + + @Override + public String getColumnName(RelationalPersistentProperty property) { + return property.getName().toLowerCase(); + } + }; + + private RelationalMappingContext context; + + @Test // DATAJDBC-107 + void findOneWithOverriddenFixedTableName() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.getFindOne(); + + SoftAssertions softAssertions = new SoftAssertions(); + softAssertions.assertThat(sql) // + .isEqualTo( + "SELECT \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_ID\" AS \"FIXEDCUSTOMPROPERTYPREFIX_ID\", " + + "\"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_NAME\" AS \"FIXEDCUSTOMPROPERTYPREFIX_NAME\", " + + "\"ref\".\"FIXEDCUSTOMPROPERTYPREFIX_L1ID\" AS \"REF_FIXEDCUSTOMPROPERTYPREFIX_L1ID\", " + + "\"ref\".\"FIXEDCUSTOMPROPERTYPREFIX_CONTENT\" AS \"REF_FIXEDCUSTOMPROPERTYPREFIX_CONTENT\", " + + "\"ref_further\".\"FIXEDCUSTOMPROPERTYPREFIX_L2ID\" AS \"REF_FURTHER_FIXEDCUSTOMPROPERTYPREFIX_L2ID\", " + + "\"ref_further\".\"FIXEDCUSTOMPROPERTYPREFIX_SOMETHING\" AS \"REF_FURTHER_FIXEDCUSTOMPROPERTYPREFIX_SOMETHING\" " + + "FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" " + + "LEFT OUTER JOIN \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" \"ref\" ON \"ref\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" = \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_ID\" L" + + "EFT OUTER JOIN \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_SECONDLEVELREFERENCEDENTITY\" \"ref_further\" ON \"ref_further\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" = \"ref\".\"FIXEDCUSTOMPROPERTYPREFIX_L1ID\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_ID\" = :id"); + softAssertions.assertAll(); + } + + @Test // DATAJDBC-107 + void findOneWithUppercasedTablesAndLowercasedColumns() { + + SqlGenerator sqlGenerator = configureSqlGenerator(upperCaseLowerCaseStrategy); + + String sql = sqlGenerator.getFindOne(); + + SoftAssertions softAssertions = new SoftAssertions(); + softAssertions.assertThat(sql) // + .isEqualTo( + "SELECT \"DUMMYENTITY\".\"ID\" AS \"ID\", \"DUMMYENTITY\".\"NAME\" AS \"NAME\", \"ref\".\"L1ID\" AS \"REF_L1ID\", \"ref\".\"CONTENT\" AS \"REF_CONTENT\", " + + "\"ref_further\".\"L2ID\" AS \"REF_FURTHER_L2ID\", \"ref_further\".\"SOMETHING\" AS \"REF_FURTHER_SOMETHING\" " + + "FROM \"DUMMYENTITY\" " + + "LEFT OUTER JOIN \"REFERENCEDENTITY\" \"ref\" ON \"ref\".\"DUMMYENTITY\" = \"DUMMYENTITY\".\"ID\" " + + "LEFT OUTER JOIN \"SECONDLEVELREFERENCEDENTITY\" \"ref_further\" ON \"ref_further\".\"REFERENCEDENTITY\" = \"ref\".\"L1ID\" " + + "WHERE \"DUMMYENTITY\".\"ID\" = :id"); + softAssertions.assertAll(); + } + + @Test // DATAJDBC-107 + void cascadingDeleteFirstLevel() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.createDeleteByPath(getPath("ref")); + + assertThat(sql).isEqualTo("DELETE FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" = :rootId"); + } + + @Test // DATAJDBC-107 + void cascadingDeleteAllSecondLevel() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.createDeleteByPath(getPath("ref.further")); + + assertThat(sql) + .isEqualTo("DELETE FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_SECONDLEVELREFERENCEDENTITY\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_SECONDLEVELREFERENCEDENTITY\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" IN " + + "(SELECT \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_L1ID\" " + + "FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" = :rootId)"); + } + + @Test // DATAJDBC-107 + void deleteAll() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.createDeleteAllSql(null); + + assertThat(sql).isEqualTo("DELETE FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\""); + } + + @Test // DATAJDBC-107 + void cascadingDeleteAllFirstLevel() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.createDeleteAllSql(getPath("ref")); + + assertThat(sql).isEqualTo("DELETE FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" IS NOT NULL"); + } + + @Test // DATAJDBC-107 + void cascadingDeleteSecondLevel() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.createDeleteAllSql(getPath("ref.further")); + + assertThat(sql) + .isEqualTo("DELETE FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_SECONDLEVELREFERENCEDENTITY\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_SECONDLEVELREFERENCEDENTITY\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" IN " + + "(SELECT \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_L1ID\" " + + "FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\" " + + "WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_REFERENCEDENTITY\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" IS NOT NULL)"); + } + + @Test // DATAJDBC-113 + void deleteByList() { + + SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); + + String sql = sqlGenerator.getDeleteByList(); + + assertThat(sql).isEqualTo( + "DELETE FROM \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\" WHERE \"FIXEDCUSTOMSCHEMA\".\"FIXEDCUSTOMTABLEPREFIX_DUMMYENTITY\".\"FIXEDCUSTOMPROPERTYPREFIX_ID\" IN (:ids)"); + } + + private PersistentPropertyPath getPath(String path) { + return PersistentPropertyPathTestUtils.getPath(path, DummyEntity.class, context); + } + + /** + * Plug in a custom {@link NamingStrategy} for this test case. + */ + private SqlGenerator configureSqlGenerator(NamingStrategy namingStrategy) { + + context = new JdbcMappingContext(namingStrategy); + JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); + return new SqlGenerator(context, converter, persistentEntity, AnsiDialect.INSTANCE); + } + + @SuppressWarnings("unused") + static class DummyEntity { + + @Id Long id; + String name; + ReferencedEntity ref; + } + + @SuppressWarnings("unused") + static class ReferencedEntity { + + @Id Long l1id; + String content; + SecondLevelReferencedEntity further; + } + + @SuppressWarnings("unused") + static class SecondLevelReferencedEntity { + + @Id Long l2id; + String something; + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorUnitTests.java new file mode 100644 index 0000000000..cc264cbe62 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlGeneratorUnitTests.java @@ -0,0 +1,1180 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.util.Collections.emptySet; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.entry; +import static org.assertj.core.api.SoftAssertions.assertSoftly; +import static org.springframework.data.relational.core.mapping.ForeignKeyNaming.APPLY_RENAMING; +import static org.springframework.data.relational.core.mapping.ForeignKeyNaming.IGNORE_RENAMING; +import static org.springframework.data.relational.core.sql.SqlIdentifier.EMPTY; +import static org.springframework.data.relational.core.sql.SqlIdentifier.quoted; +import static org.springframework.data.relational.core.sql.SqlIdentifier.unquoted; + +import java.util.Map; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.PersistentPropertyPathTestUtils; +import org.springframework.data.jdbc.core.dialect.JdbcPostgresDialect; +import org.springframework.data.jdbc.core.dialect.JdbcSqlServerDialect; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.dialect.AnsiDialect; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.relational.core.sql.Aliased; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.lang.Nullable; + +/** + * Unit tests for the {@link SqlGenerator}. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Oleksandr Kucher + * @author Bastian Wilhelm + * @author Mark Paluch + * @author Tom Hombergs + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Mikhail Polivakha + * @author Chirag Tailor + * @author Diego Krupitza + * @author Hari Ohm Prasath + * @author Viktor Ardelean + */ +@SuppressWarnings("Convert2MethodRef") +class SqlGeneratorUnitTests { + + private static final Identifier BACKREF = Identifier.of(unquoted("backref"), "some-value", String.class); + + private final PrefixingNamingStrategy namingStrategy = new PrefixingNamingStrategy(); + private RelationalMappingContext context = new JdbcMappingContext(namingStrategy); + private final JdbcConverter converter = new MappingJdbcConverter(context, (identifier, path) -> { + throw new UnsupportedOperationException(); + }); + private SqlGenerator sqlGenerator; + + @BeforeEach + void setUp() { + this.sqlGenerator = createSqlGenerator(DummyEntity.class); + } + + SqlGenerator createSqlGenerator(Class type) { + + return createSqlGenerator(type, NonQuotingDialect.INSTANCE); + } + + SqlGenerator createSqlGenerator(Class type, Dialect dialect) { + + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(type); + + return new SqlGenerator(context, converter, persistentEntity, dialect); + } + + @Test // DATAJDBC-112 + void findOne() { + + String sql = sqlGenerator.getFindOne(); + + assertSoftly(softly -> softly // + .assertThat(sql) // + .startsWith("SELECT") // + .contains("dummy_entity.id1 AS id1,") // + .contains("dummy_entity.x_name AS x_name,") // + .contains("dummy_entity.x_other AS x_other,") // + .contains("ref.x_l1id AS ref_x_l1id") // + .contains("ref.x_content AS ref_x_content").contains(" FROM dummy_entity") // + .contains("ON ref.dummy_entity = dummy_entity.id1") // + .contains("WHERE dummy_entity.id1 = :id") // + // 1-N relationships do not get loaded via join + .doesNotContain("Element AS elements")); + } + + @Test // DATAJDBC-493 + void getAcquireLockById() { + + String sql = sqlGenerator.getAcquireLockById(LockMode.PESSIMISTIC_WRITE); + + assertSoftly(softly -> softly // + .assertThat(sql) // + .startsWith("SELECT") // + .contains("dummy_entity.id1") // + .contains("WHERE dummy_entity.id1 = :id") // + .contains("FOR UPDATE") // + .doesNotContain("Element AS elements")); + } + + @Test // DATAJDBC-493 + void getAcquireLockAll() { + + String sql = sqlGenerator.getAcquireLockAll(LockMode.PESSIMISTIC_WRITE); + + assertSoftly(softly -> softly // + .assertThat(sql) // + .startsWith("SELECT") // + .contains("dummy_entity.id1") // + .contains("FOR UPDATE") // + .doesNotContain("Element AS elements")); + } + + @Test // DATAJDBC-112 + void cascadingDeleteFirstLevel() { + + String sql = sqlGenerator.createDeleteByPath(getPath("ref", DummyEntity.class)); + + assertThat(sql).isEqualTo("DELETE FROM referenced_entity WHERE referenced_entity.dummy_entity = :rootId"); + } + + @Test // GH-537 + void cascadingDeleteInByPathFirstLevel() { + + String sql = sqlGenerator.createDeleteInByPath(getPath("ref", DummyEntity.class)); + + assertThat(sql).isEqualTo("DELETE FROM referenced_entity WHERE referenced_entity.dummy_entity IN (:ids)"); + } + + @Test // DATAJDBC-112 + void cascadingDeleteByPathSecondLevel() { + + String sql = sqlGenerator.createDeleteByPath(getPath("ref.further", DummyEntity.class)); + + assertThat(sql).isEqualTo( + "DELETE FROM second_level_referenced_entity WHERE second_level_referenced_entity.referenced_entity IN (SELECT referenced_entity.x_l1id FROM referenced_entity WHERE referenced_entity.dummy_entity = :rootId)"); + } + + @Test // GH-537 + void cascadingDeleteInByPathSecondLevel() { + + String sql = sqlGenerator.createDeleteInByPath(getPath("ref.further", DummyEntity.class)); + + assertThat(sql).isEqualTo( + "DELETE FROM second_level_referenced_entity WHERE second_level_referenced_entity.referenced_entity IN (SELECT referenced_entity.x_l1id FROM referenced_entity WHERE referenced_entity.dummy_entity IN (:ids))"); + } + + @Test // DATAJDBC-112 + void deleteAll() { + + String sql = sqlGenerator.createDeleteAllSql(null); + + assertThat(sql).isEqualTo("DELETE FROM dummy_entity"); + } + + @Test // DATAJDBC-112 + void cascadingDeleteAllFirstLevel() { + + String sql = sqlGenerator.createDeleteAllSql(getPath("ref", DummyEntity.class)); + + assertThat(sql).isEqualTo("DELETE FROM referenced_entity WHERE referenced_entity.dummy_entity IS NOT NULL"); + } + + @Test // DATAJDBC-112 + void cascadingDeleteAllSecondLevel() { + + String sql = sqlGenerator.createDeleteAllSql(getPath("ref.further", DummyEntity.class)); + + assertThat(sql).isEqualTo( + "DELETE FROM second_level_referenced_entity WHERE second_level_referenced_entity.referenced_entity IN (SELECT referenced_entity.x_l1id FROM referenced_entity WHERE referenced_entity.dummy_entity IS NOT NULL)"); + } + + @Test // DATAJDBC-227 + void deleteAllMap() { + + String sql = sqlGenerator.createDeleteAllSql(getPath("mappedElements", DummyEntity.class)); + + assertThat(sql).isEqualTo("DELETE FROM element WHERE element.dummy_entity IS NOT NULL"); + } + + @Test // DATAJDBC-227 + void deleteMapByPath() { + + String sql = sqlGenerator.createDeleteByPath(getPath("mappedElements", DummyEntity.class)); + + assertThat(sql).isEqualTo("DELETE FROM element WHERE element.dummy_entity = :rootId"); + } + + @Test // DATAJDBC-101 + void findAllSortedByUnsorted() { + + String sql = sqlGenerator.getFindAll(Sort.unsorted()); + + assertThat(sql).doesNotContain("ORDER BY"); + } + + @Test // DATAJDBC-101 + void findAllSortedBySingleField() { + + String sql = sqlGenerator.getFindAll(Sort.by("name")); + + assertThat(sql).contains("SELECT", // + "dummy_entity.id1 AS id1", // + "dummy_entity.x_name AS x_name", // + "dummy_entity.x_other AS x_other", // + "ref.x_l1id AS ref_x_l1id", // + "ref.x_content AS ref_x_content", // + "ref_further.x_l2id AS ref_further_x_l2id", // + "ref_further.x_something AS ref_further_x_something", // + "FROM dummy_entity ", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "ORDER BY dummy_entity.x_name ASC"); + } + + @Test // DATAJDBC-101 + void findAllSortedByMultipleFields() { + + String sql = sqlGenerator + .getFindAll(Sort.by(new Sort.Order(Sort.Direction.DESC, "name"), new Sort.Order(Sort.Direction.ASC, "other"))); + + assertThat(sql).contains("SELECT", // + "dummy_entity.id1 AS id1", // + "dummy_entity.x_name AS x_name", // + "dummy_entity.x_other AS x_other", // + "ref.x_l1id AS ref_x_l1id", // + "ref.x_content AS ref_x_content", // + "ref_further.x_l2id AS ref_further_x_l2id", // + "ref_further.x_something AS ref_further_x_something", // + "FROM dummy_entity ", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "ORDER BY dummy_entity.x_name DESC", // + "x_other ASC"); + } + + @Test // GH-821 + void findAllSortedWithNullHandling_resolvesNullHandlingWhenDialectSupportsIt() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class, JdbcPostgresDialect.INSTANCE); + + String sql = sqlGenerator + .getFindAll(Sort.by(new Sort.Order(Sort.Direction.ASC, "name", Sort.NullHandling.NULLS_LAST))); + + assertThat(sql).contains("ORDER BY \"dummy_entity\".\"x_name\" ASC NULLS LAST"); + } + + @Test // GH-821 + void findAllSortedWithNullHandling_ignoresNullHandlingWhenDialectDoesNotSupportIt() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class, JdbcSqlServerDialect.INSTANCE); + + String sql = sqlGenerator + .getFindAll(Sort.by(new Sort.Order(Sort.Direction.ASC, "name", Sort.NullHandling.NULLS_LAST))); + + assertThat(sql).endsWith("ORDER BY \"dummy_entity\".\"x_name\" ASC"); + } + + @Test // DATAJDBC-101 + void findAllPagedByUnpaged() { + + String sql = sqlGenerator.getFindAll(Pageable.unpaged()); + + assertThat(sql).doesNotContain("ORDER BY").doesNotContain("FETCH FIRST").doesNotContain("OFFSET"); + } + + @Test // DATAJDBC-101 + void findAllPaged() { + + String sql = sqlGenerator.getFindAll(PageRequest.of(2, 20)); + + assertThat(sql).contains("SELECT", // + "dummy_entity.id1 AS id1", // + "dummy_entity.x_name AS x_name", // + "dummy_entity.x_other AS x_other", // + "ref.x_l1id AS ref_x_l1id", // + "ref.x_content AS ref_x_content", // + "ref_further.x_l2id AS ref_further_x_l2id", // + "ref_further.x_something AS ref_further_x_something", // + "FROM dummy_entity ", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "OFFSET 40", // + "LIMIT 20"); + } + + @Test // DATAJDBC-101 + void findAllPagedAndSorted() { + + String sql = sqlGenerator.getFindAll(PageRequest.of(3, 10, Sort.by("name"))); + + assertThat(sql).contains("SELECT", // + "dummy_entity.id1 AS id1", // + "dummy_entity.x_name AS x_name", // + "dummy_entity.x_other AS x_other", // + "ref.x_l1id AS ref_x_l1id", // + "ref.x_content AS ref_x_content", // + "ref_further.x_l2id AS ref_further_x_l2id", // + "ref_further.x_something AS ref_further_x_something", // + "FROM dummy_entity ", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "ORDER BY dummy_entity.x_name ASC", // + "OFFSET 30", // + "LIMIT 10"); + } + + @Test // GH-1919 + void selectByQuery() { + + Query query = Query.query(Criteria.where("id").is(23L)); + + String sql = sqlGenerator.selectByQuery(query, new MapSqlParameterSource()); + + assertThat(sql).contains( // + "SELECT", // + "FROM dummy_entity", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "WHERE dummy_entity.id1 = :id1" // + ); + } + + @Test // GH-1919 + void selectBySortedQuery() { + + Query query = Query.query(Criteria.where("id").is(23L)) // + .sort(Sort.by(Sort.Order.asc("id"))); + + String sql = sqlGenerator.selectByQuery(query, new MapSqlParameterSource()); + + assertThat(sql).contains( // + "SELECT", // + "FROM dummy_entity", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "WHERE dummy_entity.id1 = :id1", // + "ORDER BY dummy_entity.id1 ASC" // + ); + assertThat(sql).containsOnlyOnce("LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1"); + assertThat(sql).containsOnlyOnce("LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id"); + } + + @Test // DATAJDBC-131, DATAJDBC-111 + void findAllByProperty() { + + // this would get called when ListParent is the element type of a Set + String sql = sqlGenerator.getFindAllByProperty(BACKREF, null, false); + + assertThat(sql).contains("SELECT", // + "dummy_entity.id1 AS id1", // + "dummy_entity.x_name AS x_name", // + "dummy_entity.x_other AS x_other", // + "ref.x_l1id AS ref_x_l1id", // + "ref.x_content AS ref_x_content", // + "ref_further.x_l2id AS ref_further_x_l2id", // + "ref_further.x_something AS ref_further_x_something", // + "FROM dummy_entity ", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "WHERE dummy_entity.backref = :backref"); + } + + @Test // DATAJDBC-223 + void findAllByPropertyWithMultipartIdentifier() { + + // this would get called when ListParent is the element type of a Set + Identifier parentIdentifier = Identifier.of(unquoted("backref"), "some-value", String.class) // + .withPart(unquoted("backref_key"), "key-value", Object.class); + String sql = sqlGenerator.getFindAllByProperty(parentIdentifier, null, false); + + assertThat(sql).contains("SELECT", // + "dummy_entity.id1 AS id1", // + "dummy_entity.x_name AS x_name", // + "dummy_entity.x_other AS x_other", // + "ref.x_l1id AS ref_x_l1id", // + "ref.x_content AS ref_x_content", // + "ref_further.x_l2id AS ref_further_x_l2id", // + "ref_further.x_something AS ref_further_x_something", // + "FROM dummy_entity ", // + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1", // + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id", // + "dummy_entity.backref = :backref", // + "dummy_entity.backref_key = :backref_key"); + } + + @Test // DATAJDBC-131, DATAJDBC-111 + void findAllByPropertyWithKey() { + + // this would get called when ListParent is th element type of a Map + String sql = sqlGenerator.getFindAllByProperty(BACKREF, + new AggregatePath.ColumnInfo(unquoted("key-column"), unquoted("key-column")), false); + + assertThat(sql).isEqualTo("SELECT dummy_entity.id1 AS id1, dummy_entity.x_name AS x_name, " // + + "dummy_entity.x_other AS x_other, " // + + "ref.x_l1id AS ref_x_l1id, ref.x_content AS ref_x_content, " + + "ref_further.x_l2id AS ref_further_x_l2id, ref_further.x_something AS ref_further_x_something, " // + + "dummy_entity.key-column AS key-column " // + + "FROM dummy_entity " // + + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1 " // + + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id " // + + "WHERE dummy_entity.backref = :backref"); + } + + @Test // DATAJDBC-130 + void findAllByPropertyOrderedWithoutKey() { + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> sqlGenerator.getFindAllByProperty(BACKREF, null, true)); + } + + @Test // DATAJDBC-131, DATAJDBC-111 + void findAllByPropertyWithKeyOrdered() { + + // this would get called when ListParent is th element type of a Map + String sql = sqlGenerator.getFindAllByProperty(BACKREF, + new AggregatePath.ColumnInfo(unquoted("key-column"), unquoted("key-column")), true); + + assertThat(sql).isEqualTo("SELECT dummy_entity.id1 AS id1, dummy_entity.x_name AS x_name, " // + + "dummy_entity.x_other AS x_other, " // + + "ref.x_l1id AS ref_x_l1id, ref.x_content AS ref_x_content, " + + "ref_further.x_l2id AS ref_further_x_l2id, ref_further.x_something AS ref_further_x_something, " // + + "dummy_entity.key-column AS key-column " // + + "FROM dummy_entity " // + + "LEFT OUTER JOIN referenced_entity ref ON ref.dummy_entity = dummy_entity.id1 " // + + "LEFT OUTER JOIN second_level_referenced_entity ref_further ON ref_further.referenced_entity = ref.x_l1id " // + + "WHERE dummy_entity.backref = :backref " // + + "ORDER BY key-column"); + } + + @Test // GH-1073 + public void findAllByPropertyAvoidsDuplicateColumns() { + + final SqlGenerator sqlGenerator = createSqlGenerator(ReferencedEntity.class); + final String sql = sqlGenerator.getFindAllByProperty( + Identifier.of(quoted("id"), "parent-id-value", DummyEntity.class), // + new AggregatePath.ColumnInfo(quoted("X_L1ID"), quoted("X_L1ID")), // this key column collides with the name + // derived by the naming strategy for the id + // of + // ReferencedEntity. + false); + + final String id = "referenced_entity.x_l1id AS x_l1id"; + assertThat(sql.indexOf(id)) // + .describedAs(sql) // + .isEqualTo(sql.lastIndexOf(id)); + + } + + @Test // GH-833 + void findAllByPropertyWithEmptyBackrefColumn() { + + Identifier emptyIdentifier = Identifier.of(EMPTY, 0, Object.class); + assertThatThrownBy(() -> sqlGenerator.getFindAllByProperty(emptyIdentifier, + new AggregatePath.ColumnInfo(unquoted("key-column"), unquoted("key-column")), false)) // + .isInstanceOf(IllegalArgumentException.class) // + .hasMessageContaining( + "An empty SqlIdentifier can't be used in condition. Make sure that all composite primary keys are defined in the query"); + } + + @Test // DATAJDBC-219 + void updateWithVersion() { + + SqlGenerator sqlGenerator = createSqlGenerator(VersionedEntity.class, AnsiDialect.INSTANCE); + + assertThat(sqlGenerator.getUpdateWithVersion()).containsSubsequence( // + "UPDATE", // + "\"VERSIONED_ENTITY\"", // + "SET", // + "WHERE", // + "\"id1\" = :id1", // + "AND", // + "\"X_VERSION\" = :___oldOptimisticLockingVersion"); + } + + @Test // DATAJDBC-264 + void getInsertForEmptyColumnListPostgres() { + + SqlGenerator sqlGenerator = createSqlGenerator(IdOnlyEntity.class, JdbcPostgresDialect.INSTANCE); + + String insert = sqlGenerator.getInsert(emptySet()); + + assertThat(insert).endsWith(" VALUES (DEFAULT)"); + } + + @Test // GH-777 + void gerInsertForEmptyColumnListMsSqlServer() { + + SqlGenerator sqlGenerator = createSqlGenerator(IdOnlyEntity.class, JdbcSqlServerDialect.INSTANCE); + + String insert = sqlGenerator.getInsert(emptySet()); + + assertThat(insert).endsWith(" DEFAULT VALUES"); + } + + @Test // DATAJDBC-334 + void getInsertForQuotedColumnName() { + + SqlGenerator sqlGenerator = createSqlGenerator(EntityWithQuotedColumnName.class, AnsiDialect.INSTANCE); + + String insert = sqlGenerator.getInsert(emptySet()); + + assertThat(insert).isEqualTo("INSERT INTO \"ENTITY_WITH_QUOTED_COLUMN_NAME\" " // + + "(\"test\"\"_@123\") " + "VALUES (:test_123)"); + } + + @Test // DATAJDBC-266 + void joinForOneToOneWithoutIdIncludesTheBackReferenceOfTheOuterJoin() { + + SqlGenerator sqlGenerator = createSqlGenerator(ParentOfNoIdChild.class, AnsiDialect.INSTANCE); + + String findAll = sqlGenerator.getFindAll(); + + assertThat(findAll).containsSubsequence("SELECT", + "\"child\".\"PARENT_OF_NO_ID_CHILD\" AS \"CHILD_PARENT_OF_NO_ID_CHILD\"", "FROM"); + } + + @Test // DATAJDBC-262 + void update() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class, AnsiDialect.INSTANCE); + + assertThat(sqlGenerator.getUpdate()).containsSubsequence( // + "UPDATE", // + "\"DUMMY_ENTITY\"", // + "SET", // + "WHERE", // + "\"id1\" = :id1"); + } + + @Test // DATAJDBC-324 + void readOnlyPropertyExcludedFromQuery_when_generateUpdateSql() { + + final SqlGenerator sqlGenerator = createSqlGenerator(EntityWithReadOnlyProperty.class, AnsiDialect.INSTANCE); + + assertThat(sqlGenerator.getUpdate()).isEqualToIgnoringCase( // + "UPDATE \"ENTITY_WITH_READ_ONLY_PROPERTY\" " // + + "SET \"X_NAME\" = :X_NAME " // + + "WHERE \"ENTITY_WITH_READ_ONLY_PROPERTY\".\"X_ID\" = :X_ID" // + ); + } + + @Test // DATAJDBC-334 + void getUpdateForQuotedColumnName() { + + SqlGenerator sqlGenerator = createSqlGenerator(EntityWithQuotedColumnName.class, AnsiDialect.INSTANCE); + + String update = sqlGenerator.getUpdate(); + + assertThat(update).isEqualTo("UPDATE \"ENTITY_WITH_QUOTED_COLUMN_NAME\" " // + + "SET \"test\"\"_@123\" = :test_123 " // + + "WHERE \"ENTITY_WITH_QUOTED_COLUMN_NAME\".\"test\"\"_@id\" = :test_id"); + } + + @Test // DATAJDBC-324 + void readOnlyPropertyExcludedFromQuery_when_generateInsertSql() { + + final SqlGenerator sqlGenerator = createSqlGenerator(EntityWithReadOnlyProperty.class, AnsiDialect.INSTANCE); + + assertThat(sqlGenerator.getInsert(emptySet())).isEqualToIgnoringCase( // + "INSERT INTO \"ENTITY_WITH_READ_ONLY_PROPERTY\" (\"X_NAME\") " // + + "VALUES (:x_name)" // + ); + } + + @Test // DATAJDBC-324 + void readOnlyPropertyIncludedIntoQuery_when_generateFindAllSql() { + + final SqlGenerator sqlGenerator = createSqlGenerator(EntityWithReadOnlyProperty.class); + + assertThat(sqlGenerator.getFindAll()).isEqualToIgnoringCase("SELECT " + + "entity_with_read_only_property.x_id AS x_id, " + "entity_with_read_only_property.x_name AS x_name, " + + "entity_with_read_only_property.x_read_only_value AS x_read_only_value " + + "FROM entity_with_read_only_property"); + } + + @Test // DATAJDBC-324 + void readOnlyPropertyIncludedIntoQuery_when_generateFindAllByPropertySql() { + + final SqlGenerator sqlGenerator = createSqlGenerator(EntityWithReadOnlyProperty.class); + + assertThat(sqlGenerator.getFindAllByProperty(BACKREF, + new AggregatePath.ColumnInfo(unquoted("key-column"), unquoted("key-column")), true)).isEqualToIgnoringCase( // + "SELECT " // + + "entity_with_read_only_property.x_id AS x_id, " // + + "entity_with_read_only_property.x_name AS x_name, " // + + "entity_with_read_only_property.x_read_only_value AS x_read_only_value, " // + + "entity_with_read_only_property.key-column AS key-column " // + + "FROM entity_with_read_only_property " // + + "WHERE entity_with_read_only_property.backref = :backref " // + + "ORDER BY key-column" // + ); + } + + @Test // DATAJDBC-324 + void readOnlyPropertyIncludedIntoQuery_when_generateFindAllInListSql() { + + final SqlGenerator sqlGenerator = createSqlGenerator(EntityWithReadOnlyProperty.class); + + assertThat(sqlGenerator.getFindAllInList()).isEqualToIgnoringCase( // + "SELECT " // + + "entity_with_read_only_property.x_id AS x_id, " // + + "entity_with_read_only_property.x_name AS x_name, " // + + "entity_with_read_only_property.x_read_only_value AS x_read_only_value " // + + "FROM entity_with_read_only_property " // + + "WHERE entity_with_read_only_property.x_id IN (:ids)" // + ); + } + + @Test // DATAJDBC-324 + void readOnlyPropertyIncludedIntoQuery_when_generateFindOneSql() { + + final SqlGenerator sqlGenerator = createSqlGenerator(EntityWithReadOnlyProperty.class); + + assertThat(sqlGenerator.getFindOne()).isEqualToIgnoringCase( // + "SELECT " // + + "entity_with_read_only_property.x_id AS x_id, " // + + "entity_with_read_only_property.x_name AS x_name, " // + + "entity_with_read_only_property.x_read_only_value AS x_read_only_value " // + + "FROM entity_with_read_only_property " // + + "WHERE entity_with_read_only_property.x_id = :id" // + ); + } + + @Test // DATAJDBC-340 + void deletingLongChain() { + + assertThat( + createSqlGenerator(Chain4.class).createDeleteByPath(getPath("chain3.chain2.chain1.chain0", Chain4.class))) // + .isEqualTo("DELETE FROM chain0 " + // + "WHERE chain0.chain1 IN (" + // + "SELECT chain1.x_one " + // + "FROM chain1 " + // + "WHERE chain1.chain2 IN (" + // + "SELECT chain2.x_two " + // + "FROM chain2 " + // + "WHERE chain2.chain3 IN (" + // + "SELECT chain3.x_three " + // + "FROM chain3 " + // + "WHERE chain3.chain4 = :rootId" + // + ")))"); + } + + @Test // DATAJDBC-359 + void deletingLongChainNoId() { + + assertThat(createSqlGenerator(NoIdChain4.class) + .createDeleteByPath(getPath("chain3.chain2.chain1.chain0", NoIdChain4.class))) // + .isEqualTo("DELETE FROM no_id_chain0 WHERE no_id_chain0.no_id_chain4 = :rootId"); + } + + @Test // DATAJDBC-359 + void deletingLongChainNoIdWithBackreferenceNotReferencingTheRoot() { + + assertThat(createSqlGenerator(IdIdNoIdChain.class) + .createDeleteByPath(getPath("idNoIdChain.chain4.chain3.chain2.chain1.chain0", IdIdNoIdChain.class))) // + .isEqualTo( // + "DELETE FROM no_id_chain0 " // + + "WHERE no_id_chain0.no_id_chain4 IN (" // + + "SELECT no_id_chain4.x_four " // + + "FROM no_id_chain4 " // + + "WHERE no_id_chain4.id_no_id_chain IN (" // + + "SELECT id_no_id_chain.x_id " // + + "FROM id_no_id_chain " // + + "WHERE id_no_id_chain.id_id_no_id_chain = :rootId" // + + "))"); + } + + @Test // DATAJDBC-340 + void noJoinForSimpleColumn() { + assertThat(generateJoin("id", DummyEntity.class)).isNull(); + } + + @Test // DATAJDBC-340 + void joinForSimpleReference() { + + SqlGenerator.Join join = generateJoin("ref", DummyEntity.class); + + assertSoftly(softly -> { + + softly.assertThat(join.getJoinTable().getName()).isEqualTo(SqlIdentifier.quoted("REFERENCED_ENTITY")); + softly.assertThat(join.getJoinColumn().getTable()).isEqualTo(join.getJoinTable()); + softly.assertThat(join.getJoinColumn().getName()).isEqualTo(SqlIdentifier.quoted("DUMMY_ENTITY")); + softly.assertThat(join.getParentId().getName()).isEqualTo(SqlIdentifier.quoted("id1")); + softly.assertThat(join.getParentId().getTable().getName()).isEqualTo(SqlIdentifier.quoted("DUMMY_ENTITY")); + }); + } + + @Test // DATAJDBC-340 + void noJoinForCollectionReference() { + + SqlGenerator.Join join = generateJoin("elements", DummyEntity.class); + + assertThat(join).isNull(); + + } + + @Test // DATAJDBC-340 + void noJoinForMappedReference() { + + SqlGenerator.Join join = generateJoin("mappedElements", DummyEntity.class); + + assertThat(join).isNull(); + } + + @Test // DATAJDBC-340 + void joinForSecondLevelReference() { + + SqlGenerator.Join join = generateJoin("ref.further", DummyEntity.class); + + assertSoftly(softly -> { + + softly.assertThat(join.getJoinTable().getName()) + .isEqualTo(SqlIdentifier.quoted("SECOND_LEVEL_REFERENCED_ENTITY")); + softly.assertThat(join.getJoinColumn().getTable()).isEqualTo(join.getJoinTable()); + softly.assertThat(join.getJoinColumn().getName()).isEqualTo(SqlIdentifier.quoted("REFERENCED_ENTITY")); + softly.assertThat(join.getParentId().getName()).isEqualTo(SqlIdentifier.quoted("X_L1ID")); + softly.assertThat(join.getParentId().getTable().getName()).isEqualTo(SqlIdentifier.quoted("REFERENCED_ENTITY")); + }); + } + + @Test // DATAJDBC-340 + void joinForOneToOneWithoutId() { + + SqlGenerator.Join join = generateJoin("child", ParentOfNoIdChild.class); + Table joinTable = join.getJoinTable(); + + assertSoftly(softly -> { + + softly.assertThat(joinTable.getName()).isEqualTo(SqlIdentifier.quoted("NO_ID_CHILD")); + softly.assertThat(joinTable).isInstanceOf(Aliased.class); + softly.assertThat(((Aliased) joinTable).getAlias()).isEqualTo(SqlIdentifier.quoted("child")); + softly.assertThat(join.getJoinColumn().getTable()).isEqualTo(joinTable); + softly.assertThat(join.getJoinColumn().getName()).isEqualTo(SqlIdentifier.quoted("PARENT_OF_NO_ID_CHILD")); + softly.assertThat(join.getParentId().getName()).isEqualTo(SqlIdentifier.quoted("X_ID")); + softly.assertThat(join.getParentId().getTable().getName()) + .isEqualTo(SqlIdentifier.quoted("PARENT_OF_NO_ID_CHILD")); + + }); + } + + @Nullable + private SqlGenerator.Join generateJoin(String path, Class type) { + return createSqlGenerator(type, AnsiDialect.INSTANCE) + .getJoin(context.getAggregatePath(PersistentPropertyPathTestUtils.getPath(path, type, context))); + } + + @Test // DATAJDBC-340 + void simpleColumn() { + + assertThat(generatedColumn("id", DummyEntity.class)) // + .extracting(c -> c.getName(), c -> c.getTable().getName(), c -> getAlias(c.getTable()), this::getAlias) + .containsExactly(SqlIdentifier.quoted("id1"), SqlIdentifier.quoted("DUMMY_ENTITY"), null, + SqlIdentifier.quoted("id1")); + } + + @Test // DATAJDBC-340 + void columnForIndirectProperty() { + + assertThat(generatedColumn("ref.l1id", DummyEntity.class)) // + .extracting(c -> c.getName(), c -> c.getTable().getName(), c -> getAlias(c.getTable()), this::getAlias) // + .containsExactly(SqlIdentifier.quoted("X_L1ID"), SqlIdentifier.quoted("REFERENCED_ENTITY"), + SqlIdentifier.quoted("ref"), SqlIdentifier.quoted("REF_X_L1ID")); + } + + @Test // DATAJDBC-340 + void noColumnForReferencedEntity() { + assertThat(generatedColumn("ref", DummyEntity.class)).isNull(); + } + + @Test // DATAJDBC-340 + void columnForReferencedEntityWithoutId() { + + assertThat(generatedColumn("child", ParentOfNoIdChild.class)) // + .extracting(c -> c.getName(), c -> c.getTable().getName(), c -> getAlias(c.getTable()), this::getAlias) // + .containsExactly(SqlIdentifier.quoted("PARENT_OF_NO_ID_CHILD"), SqlIdentifier.quoted("NO_ID_CHILD"), + SqlIdentifier.quoted("child"), SqlIdentifier.quoted("CHILD_PARENT_OF_NO_ID_CHILD")); + } + + @Test // GH-1192 + void selectByQueryValidTest() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class); + + DummyEntity probe = new DummyEntity(); + probe.name = "Diego"; + + Criteria criteria = Criteria.where("name").is(probe.name); + Query query = Query.query(criteria); + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + String generatedSQL = sqlGenerator.selectByQuery(query, parameterSource); + assertThat(generatedSQL).isNotNull().contains(":x_name"); + + assertThat(parameterSource.getValues()) // + .containsOnly(entry("x_name", probe.name)); + } + + @Test // GH-1329 + void selectWithOutAnyCriteriaTest() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class); + Query query = Query.query(Criteria.empty()); + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + String generatedSQL = sqlGenerator.selectByQuery(query, parameterSource); + + assertThat(generatedSQL).isNotNull().doesNotContain("where"); + } + + @Test // GH-1192 + void existsByQuerySimpleValidTest() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class); + + DummyEntity probe = new DummyEntity(); + probe.name = "Diego"; + + Criteria criteria = Criteria.where("name").is(probe.name); + Query query = Query.query(criteria); + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + String generatedSQL = sqlGenerator.existsByQuery(query, parameterSource); + assertThat(generatedSQL).isNotNull().contains(":x_name"); + + assertThat(parameterSource.getValues()) // + .containsOnly(entry("x_name", probe.name)); + } + + @Test // GH-1192 + void countByQuerySimpleValidTest() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class); + + DummyEntity probe = new DummyEntity(); + probe.name = "Diego"; + + Criteria criteria = Criteria.where("name").is(probe.name); + Query query = Query.query(criteria); + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + String generatedSQL = sqlGenerator.countByQuery(query, parameterSource); + assertThat(generatedSQL) // + .isNotNull() // + .containsIgnoringCase("COUNT(1)") // + .contains(":x_name"); + + assertThat(parameterSource.getValues()) // + .containsOnly(entry("x_name", probe.name)); + } + + @Test // GH-1192 + void selectByQueryPaginationValidTest() { + + SqlGenerator sqlGenerator = createSqlGenerator(DummyEntity.class); + + DummyEntity probe = new DummyEntity(); + probe.name = "Diego"; + + Criteria criteria = Criteria.where("name").is(probe.name); + Query query = Query.query(criteria); + + PageRequest pageRequest = PageRequest.of(2, 1, Sort.by(Sort.Order.asc("name"))); + + MapSqlParameterSource parameterSource = new MapSqlParameterSource(); + + String generatedSQL = sqlGenerator.selectByQuery(query, parameterSource, pageRequest); + assertThat(generatedSQL) // + .isNotNull() // + .contains(":x_name") // + .containsIgnoringCase("ORDER BY dummy_entity.x_name ASC") // + .containsIgnoringCase("LIMIT 1") // + .containsIgnoringCase("OFFSET 2 LIMIT 1"); + + assertThat(parameterSource.getValues()) // + .containsOnly(entry("x_name", probe.name)); + } + + @Test // GH-1161 + void backReferenceShouldConsiderRenamedParent() { + + namingStrategy.setForeignKeyNaming(APPLY_RENAMING); + context = new JdbcMappingContext(namingStrategy); + + String sql = sqlGenerator.createDeleteInByPath(getPath("ref", RenamedDummy.class)); + + assertThat(sql).isEqualTo("DELETE FROM referenced_entity WHERE referenced_entity.renamed IN (:ids)"); + } + + @Test // GH-1161 + void backReferenceShouldIgnoreRenamedParent() { + + namingStrategy.setForeignKeyNaming(IGNORE_RENAMING); + context = new JdbcMappingContext(namingStrategy); + + String sql = sqlGenerator.createDeleteInByPath(getPath("ref", RenamedDummy.class)); + + assertThat(sql).isEqualTo("DELETE FROM referenced_entity WHERE referenced_entity.renamed_dummy IN (:ids)"); + } + + @Test // GH-1161 + void keyColumnShouldConsiderRenamedParent() { + + namingStrategy.setForeignKeyNaming(APPLY_RENAMING); + context = new JdbcMappingContext(namingStrategy); + + SqlGenerator sqlGenerator = createSqlGenerator(ReferencedEntity.class); + String sql = sqlGenerator.getFindAllByProperty(Identifier.of(unquoted("parentId"), 23, RenamedDummy.class), + getPath("ref", RenamedDummy.class)); + + assertThat(sql).contains("referenced_entity.renamed_key AS renamed_key", "WHERE referenced_entity.parentId"); + } + + @Test // GH-1161 + void keyColumnShouldIgnoreRenamedParent() { + + namingStrategy.setForeignKeyNaming(IGNORE_RENAMING); + context = new JdbcMappingContext(namingStrategy); + + SqlGenerator sqlGenerator = createSqlGenerator(ReferencedEntity.class); + String sql = sqlGenerator.getFindAllByProperty(Identifier.of(unquoted("parentId"), 23, RenamedDummy.class), + getPath("ref", RenamedDummy.class)); + + assertThat(sql).contains("referenced_entity.renamed_dummy_key AS renamed_dummy_key", + "WHERE referenced_entity.parentId"); + } + + @Test // GH-1865 + void mappingMapKeyToChildShouldNotResultInDuplicateColumn() { + + SqlGenerator sqlGenerator = createSqlGenerator(Child.class); + String sql = sqlGenerator.getFindAllByProperty(Identifier.of(unquoted("parent"), 23, Parent.class), + context.getAggregatePath(getPath("children", Parent.class)).getTableInfo().qualifierColumnInfo(), false); + + assertThat(sql).containsOnlyOnce("child.NICK_NAME AS NICK_NAME"); + } + + @Nullable + private SqlIdentifier getAlias(Object maybeAliased) { + + if (maybeAliased instanceof Aliased aliased) { + return aliased.getAlias(); + } + return null; + } + + @Nullable + private org.springframework.data.relational.core.sql.Column generatedColumn(String path, Class type) { + + return createSqlGenerator(type, AnsiDialect.INSTANCE) + .getColumn(context.getAggregatePath(PersistentPropertyPathTestUtils.getPath(path, type, context))); + } + + private PersistentPropertyPath getPath(String path, Class baseType) { + return PersistentPropertyPathTestUtils.getPath(path, baseType, context); + } + + @SuppressWarnings("unused") + static class DummyEntity { + + @Column("id1") + @Id Long id; + String name; + ReferencedEntity ref; + Set elements; + Map mappedElements; + AggregateReference other; + Map mappedReference; + } + + @SuppressWarnings("unused") + @org.springframework.data.relational.core.mapping.Table("renamed") + static class RenamedDummy { + + @Id Long id; + String name; + Map ref; + } + + @SuppressWarnings("unused") + static class VersionedEntity extends DummyEntity { + @Version Integer version; + } + + @SuppressWarnings("unused") + static class ReferencedEntity { + + @Id Long l1id; + String content; + SecondLevelReferencedEntity further; + } + + @SuppressWarnings("unused") + static class SecondLevelReferencedEntity { + + @Id Long l2id; + String something; + } + + @SuppressWarnings("unused") + static class Element { + @Id Long id; + String content; + } + + @SuppressWarnings("unused") + static class ParentOfNoIdChild { + + @Id Long id; + NoIdChild child; + } + + private static class NoIdChild {} + + @SuppressWarnings("unused") + static class OtherAggregate { + @Id Long id; + String name; + } + + private static class PrefixingNamingStrategy extends DefaultNamingStrategy { + + @Override + public String getColumnName(RelationalPersistentProperty property) { + return "x_" + super.getColumnName(property); + } + + } + + @SuppressWarnings("unused") + static class IdOnlyEntity { + + @Id Long id; + } + + @SuppressWarnings("unused") + static class EntityWithReadOnlyProperty { + + @Id Long id; + String name; + @ReadOnlyProperty String readOnlyValue; + } + + @SuppressWarnings("unused") + static class EntityWithQuotedColumnName { + + // these column names behave like single double quote in the name since the get quoted and then doubling the double + // quote escapes it. + @Id + @Column("test\"\"_@id") Long id; + @Column("test\"\"_@123") String name; + } + + @SuppressWarnings("unused") + static class Chain0 { + @Id Long zero; + String zeroValue; + } + + @SuppressWarnings("unused") + static class Chain1 { + @Id Long one; + String oneValue; + Chain0 chain0; + } + + @SuppressWarnings("unused") + static class Chain2 { + @Id Long two; + String twoValue; + Chain1 chain1; + } + + @SuppressWarnings("unused") + static class Chain3 { + @Id Long three; + String threeValue; + Chain2 chain2; + } + + @SuppressWarnings("unused") + static class Chain4 { + @Id Long four; + String fourValue; + Chain3 chain3; + } + + @SuppressWarnings("unused") + static class NoIdChain0 { + String zeroValue; + } + + @SuppressWarnings("unused") + static class NoIdChain1 { + String oneValue; + NoIdChain0 chain0; + } + + @SuppressWarnings("unused") + static class NoIdChain2 { + String twoValue; + NoIdChain1 chain1; + } + + @SuppressWarnings("unused") + static class NoIdChain3 { + String threeValue; + NoIdChain2 chain2; + } + + @SuppressWarnings("unused") + static class NoIdChain4 { + @Id Long four; + String fourValue; + NoIdChain3 chain3; + } + + @SuppressWarnings("unused") + static class IdNoIdChain { + @Id Long id; + NoIdChain4 chain4; + } + + @SuppressWarnings("unused") + static class IdIdNoIdChain { + @Id Long id; + IdNoIdChain idNoIdChain; + } + + record Parent(@Id Long id, String name, @MappedCollection(keyColumn = "NICK_NAME") Map children) { + } + + record Child(@Column("NICK_NAME") String nickName, String name) { + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlIdentifierParameterSourceUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlIdentifierParameterSourceUnitTests.java new file mode 100644 index 0000000000..559997315f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlIdentifierParameterSourceUnitTests.java @@ -0,0 +1,170 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static org.assertj.core.api.SoftAssertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Tests for {@link SqlIdentifierParameterSource}. + * + * @author Jens Schauder + * @author Mikhail Polivakha + */ +class SqlIdentifierParameterSourceUnitTests { + + @Test // DATAJDBC-386 + void empty() { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + + assertSoftly(softly -> { + + softly.assertThat(parameters.getParameterNames()).isEmpty(); + softly.assertThat(parameters.getValue("blah")).isNull(); + softly.assertThat(parameters.hasValue("blah")).isFalse(); + softly.assertThat(parameters.getSqlType("blah")).isEqualTo(Integer.MIN_VALUE); + }); + } + + @Test // DATAJDBC-386 + void addSingleValue() { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + + parameters.addValue(SqlIdentifier.unquoted("key"), 23); + + assertSoftly(softly -> { + + softly.assertThat(parameters.getParameterNames()).isEqualTo(new String[] { "key" }); + softly.assertThat(parameters.getValue("key")).isEqualTo(23); + softly.assertThat(parameters.hasValue("key")).isTrue(); + + softly.assertThat(parameters.getValue("blah")).isNull(); + softly.assertThat(parameters.hasValue("blah")).isFalse(); + softly.assertThat(parameters.getSqlType("blah")).isEqualTo(Integer.MIN_VALUE); + }); + } + + @Test // GH-1565 + void addSingleUnsanitaryValue() { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + + parameters.addValue(SqlIdentifier.unquoted("ke.y"), 23); + + assertSoftly(softly -> { + + softly.assertThat(parameters.getParameterNames()).isEqualTo(new String[] { "key" }); + softly.assertThat(parameters.getValue("key")).isEqualTo(23); + softly.assertThat(parameters.hasValue("key")).isTrue(); + + softly.assertThat(parameters.getValue("ke.y")).isNull(); + softly.assertThat(parameters.hasValue("ke.y")).isFalse(); + softly.assertThat(parameters.getSqlType("ke.y")).isEqualTo(Integer.MIN_VALUE); + }); + } + + @Test // DATAJDBC-386 + void addSingleValueWithType() { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + + parameters.addValue(SqlIdentifier.unquoted("key"), 23, 42); + + assertSoftly(softly -> { + + softly.assertThat(parameters.getParameterNames()).isEqualTo(new String[] { "key" }); + softly.assertThat(parameters.getValue("key")).isEqualTo(23); + softly.assertThat(parameters.hasValue("key")).isTrue(); + softly.assertThat(parameters.getSqlType("key")).isEqualTo(42); + + softly.assertThat(parameters.getValue("blah")).isNull(); + softly.assertThat(parameters.hasValue("blah")).isFalse(); + softly.assertThat(parameters.getSqlType("blah")).isEqualTo(Integer.MIN_VALUE); + }); + } + + @Test // DATAJDBC-386 + void addOtherDatabaseObjectIdentifierParameterSource() { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + parameters.addValue(SqlIdentifier.unquoted("key1"), 111, 11); + parameters.addValue(SqlIdentifier.unquoted("key2"), 111); + + SqlIdentifierParameterSource parameters2 = new SqlIdentifierParameterSource(); + parameters2.addValue(SqlIdentifier.unquoted("key2"), 222, 22); + parameters2.addValue(SqlIdentifier.unquoted("key3"), 222); + + parameters.addAll(parameters2); + + assertSoftly(softly -> { + + softly.assertThat(parameters.getParameterNames()).containsExactlyInAnyOrder("key1", "key2", "key3"); + softly.assertThat(parameters.getValue("key1")).isEqualTo(111); + softly.assertThat(parameters.hasValue("key1")).isTrue(); + softly.assertThat(parameters.getSqlType("key1")).isEqualTo(11); + + softly.assertThat(parameters.getValue("key2")).isEqualTo(222); + softly.assertThat(parameters.hasValue("key2")).isTrue(); + softly.assertThat(parameters.getSqlType("key2")).isEqualTo(22); + + softly.assertThat(parameters.getValue("key3")).isEqualTo(222); + softly.assertThat(parameters.hasValue("key3")).isTrue(); + softly.assertThat(parameters.getSqlType("key3")).isEqualTo(Integer.MIN_VALUE); + + softly.assertThat(parameters.getValue("blah")).isNull(); + softly.assertThat(parameters.hasValue("blah")).isFalse(); + softly.assertThat(parameters.getSqlType("blah")).isEqualTo(Integer.MIN_VALUE); + }); + } + + @Test // DATAJDBC-386 + void addOtherDatabaseObjectIdentifierParameterSourceWithUnsanitaryValue() { + + SqlIdentifierParameterSource parameters = new SqlIdentifierParameterSource(); + parameters.addValue(SqlIdentifier.unquoted("key1"), 111, 11); + parameters.addValue(SqlIdentifier.unquoted("key2"), 111); + + SqlIdentifierParameterSource parameters2 = new SqlIdentifierParameterSource(); + parameters2.addValue(SqlIdentifier.unquoted("key.2"), 222, 22); + parameters2.addValue(SqlIdentifier.unquoted("key.3"), 222); + + parameters.addAll(parameters2); + + assertSoftly(softly -> { + + softly.assertThat(parameters.getParameterNames()).containsExactlyInAnyOrder("key1", "key2", "key3"); + softly.assertThat(parameters.getValue("key1")).isEqualTo(111); + softly.assertThat(parameters.hasValue("key1")).isTrue(); + softly.assertThat(parameters.getSqlType("key1")).isEqualTo(11); + + softly.assertThat(parameters.getValue("key2")).isEqualTo(222); + softly.assertThat(parameters.hasValue("key2")).isTrue(); + softly.assertThat(parameters.getSqlType("key2")).isEqualTo(22); + + softly.assertThat(parameters.getValue("key3")).isEqualTo(222); + softly.assertThat(parameters.hasValue("key3")).isTrue(); + softly.assertThat(parameters.getSqlType("key3")).isEqualTo(Integer.MIN_VALUE); + + softly.assertThat(parameters.getValue("blah")).isNull(); + softly.assertThat(parameters.hasValue("blah")).isFalse(); + softly.assertThat(parameters.getSqlType("blah")).isEqualTo(Integer.MIN_VALUE); + }); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlParametersFactoryTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlParametersFactoryTest.java new file mode 100644 index 0000000000..9efdb3aeab --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/SqlParametersFactoryTest.java @@ -0,0 +1,304 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.convert; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.jdbc.core.convert.DefaultDataAccessStrategyUnitTests.*; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.jdbc.core.JdbcOperations; + +/** + * Unit tests for {@link SqlParametersFactory}. + * + * @author Chirag Tailor + */ +class SqlParametersFactoryTest { + + RelationalMappingContext context = new JdbcMappingContext(); + RelationResolver relationResolver = mock(RelationResolver.class); + MappingJdbcConverter converter = new MappingJdbcConverter(context, relationResolver); + SqlParametersFactory sqlParametersFactory = new SqlParametersFactory(context, converter); + + @Test // DATAJDBC-412 + public void considersConfiguredWriteConverterForIdValueObjects_onRead() { + + SqlParametersFactory sqlParametersFactory = createSqlParametersFactoryWithConverters( + singletonList(IdValueToStringConverter.INSTANCE)); + + String rawId = "batman"; + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory.forQueryById(new IdValue(rawId), + WithValueObjectId.class, SqlGenerator.ID_SQL_PARAMETER); + + assertThat(sqlParameterSource.getValue("id")).isEqualTo(rawId); + } + + @Test // DATAJDBC-349 + public void considersConfiguredWriteConverterForIdValueObjectsWhichReferencedInOneToManyRelationship() { + + SqlParametersFactory sqlParametersFactory = createSqlParametersFactoryWithConverters( + singletonList(IdValueToStringConverter.INSTANCE)); + + String rawId = "batman"; + IdValue rootIdValue = new IdValue(rawId); + + DummyEntityRoot root = new DummyEntityRoot(rootIdValue); + DummyEntity child = new DummyEntity(ORIGINAL_ID); + root.dummyEntities.add(child); + + HashMap additionalParameters = new HashMap<>(); + additionalParameters.put(SqlIdentifier.quoted("DUMMYENTITYROOT"), rootIdValue); + + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory + .forQueryByIdentifier(Identifier.from(additionalParameters)); + + assertThat(sqlParameterSource.getValue("DUMMYENTITYROOT")).isEqualTo(rawId); + } + + @Test + // DATAJDBC-146 + void identifiersGetAddedAsParameters() { + + long id = 4711L; + DummyEntity instance = new DummyEntity(id); + long reference = 23L; + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory.forInsert(instance, DummyEntity.class, + Identifier.of(SqlIdentifier.unquoted("reference"), reference, Long.class), IdValueSource.PROVIDED); + + assertThat(sqlParameterSource.getParameterNames()).hasSize(2); + assertThat(sqlParameterSource.getValue("id")).isEqualTo(id); + assertThat(sqlParameterSource.getValue("reference")).isEqualTo(reference); + } + + @Test + // DATAJDBC-146 + void additionalIdentifierForIdDoesNotLeadToDuplicateParameters() { + + long id = 4711L; + DummyEntity instance = new DummyEntity(id); + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory.forInsert(instance, DummyEntity.class, + Identifier.of(SqlIdentifier.unquoted("id"), 23L, Long.class), IdValueSource.PROVIDED); + + assertThat(sqlParameterSource.getParameterNames()).hasSize(1); + assertThat(sqlParameterSource.getValue("id")).isEqualTo(id); + } + + @Test + // DATAJDBC-235 + void considersConfiguredWriteConverter() { + + SqlParametersFactory sqlParametersFactory = createSqlParametersFactoryWithConverters( + asList(BooleanToStringConverter.INSTANCE, StringToBooleanConverter.INSTANCE)); + + long id = 4711L; + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory.forInsert(new EntityWithBoolean(id, true), + EntityWithBoolean.class, Identifier.empty(), IdValueSource.PROVIDED); + + assertThat(sqlParameterSource.getValue("id")).isEqualTo(id); + assertThat(sqlParameterSource.getValue("flag")).isEqualTo("T"); + } + + @Test + // DATAJDBC-412 + void considersConfiguredWriteConverterForIdValueObjects_onWrite() { + + SqlParametersFactory sqlParametersFactory = createSqlParametersFactoryWithConverters( + singletonList(IdValueToStringConverter.INSTANCE)); + + String rawId = "batman"; + WithValueObjectId entity = new WithValueObjectId(new IdValue(rawId)); + String value = "vs. superman"; + entity.value = value; + + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory.forInsert(entity, WithValueObjectId.class, + Identifier.empty(), IdValueSource.PROVIDED); + assertThat(sqlParameterSource.getValue("id")).isEqualTo(rawId); + assertThat(sqlParameterSource.getValue("value")).isEqualTo(value); + } + + @Test + // GH-1405 + void parameterNamesGetSanitized() { + + WithIllegalCharacters entity = new WithIllegalCharacters(23L, "aValue"); + + SqlIdentifierParameterSource sqlParameterSource = sqlParametersFactory.forInsert(entity, + WithIllegalCharacters.class, Identifier.empty(), IdValueSource.PROVIDED); + + assertThat(sqlParameterSource.getValue("id")).isEqualTo(23L); + assertThat(sqlParameterSource.getValue("value")).isEqualTo("aValue"); + + assertThat(sqlParameterSource.getValue("i.d")).isNull(); + assertThat(sqlParameterSource.getValue("val&ue")).isNull(); + } + + @WritingConverter + enum IdValueToStringConverter implements Converter { + + INSTANCE; + + @Override + public String convert(IdValue source) { + return source.id; + } + } + + private static class WithValueObjectId { + + @Id private final IdValue id; + String value; + + private WithValueObjectId(IdValue id) { + this.id = id; + } + + public IdValue getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setValue(String value) { + this.value = value; + } + } + + private static final class IdValue { + private final String id; + + public IdValue(String id) { + this.id = id; + } + + public String getId() { + return this.id; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final IdValue other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + return Objects.equals(this$id, other$id); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + return result; + } + + public String toString() { + return "SqlParametersFactoryTest.IdValue(id=" + this.getId() + ")"; + } + } + + @WritingConverter + enum BooleanToStringConverter implements Converter { + + INSTANCE; + + @Override + public String convert(Boolean source) { + return source != null && source ? "T" : "F"; + } + } + + @ReadingConverter + enum StringToBooleanConverter implements Converter { + + INSTANCE; + + @Override + public Boolean convert(String source) { + return source != null && source.equalsIgnoreCase("T") ? Boolean.TRUE : Boolean.FALSE; + } + } + + private static class EntityWithBoolean { + + @Id Long id; + boolean flag; + + public EntityWithBoolean(Long id, boolean flag) { + this.id = id; + this.flag = flag; + } + } + + // DATAJDBC-349 + private static class DummyEntityRoot { + + @Id private final IdValue id; + List dummyEntities = new ArrayList<>(); + + public DummyEntityRoot(IdValue id) { + this.id = id; + } + } + + private static class DummyEntity { + + @Id private final Long id; + + public DummyEntity(Long id) { + this.id = id; + } + } + + private static class WithIllegalCharacters { + + @Column("i.d") + @Id Long id; + + @Column("val&ue") String value; + + public WithIllegalCharacters(Long id, String value) { + this.id = id; + this.value = value; + } + } + + private SqlParametersFactory createSqlParametersFactoryWithConverters(List converters) { + + MappingJdbcConverter converter = new MappingJdbcConverter(context, relationResolver, + new JdbcCustomConversions(converters), new DefaultJdbcTypeFactory(mock(JdbcOperations.class))); + return new SqlParametersFactory(context, converter); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcDb2DialectUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcDb2DialectUnitTests.java new file mode 100644 index 0000000000..2e3263cd3a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcDb2DialectUnitTests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import static org.assertj.core.api.SoftAssertions.*; + +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; + +/** + * Tests for {@link JdbcMySqlDialect}. + * + * @author Jens Schauder + */ +class JdbcDb2DialectUnitTests { + + @Test // GH-974 + void testCustomConversions() { + + JdbcCustomConversions customConversions = new JdbcCustomConversions( + (List) JdbcDb2Dialect.INSTANCE.getConverters()); + + assertSoftly(softly -> { + softly.assertThat(customConversions.getCustomWriteTarget(LocalDateTime.class)).contains(Timestamp.class); + softly.assertThat(customConversions.getCustomWriteTarget(OffsetDateTime.class)).contains(Timestamp.class); + }); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcMySqlDialectUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcMySqlDialectUnitTests.java new file mode 100644 index 0000000000..d78db60d3b --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcMySqlDialectUnitTests.java @@ -0,0 +1,47 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import static org.assertj.core.api.SoftAssertions.*; + +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; +import org.springframework.data.jdbc.core.mapping.JdbcValue; + +/** + * Tests for {@link JdbcMySqlDialect}. + * + * @author Jens Schauder + */ +class JdbcMySqlDialectUnitTests { + + @Test // GH-974 + void testCustomConversions() { + + JdbcCustomConversions customConversions = new JdbcCustomConversions( + (List) new JdbcMySqlDialect().getConverters()); + + assertSoftly(softly -> { + + softly.assertThat(customConversions.getCustomWriteTarget(LocalDateTime.class)).isEmpty(); + softly.assertThat(customConversions.getCustomWriteTarget(OffsetDateTime.class)).contains(JdbcValue.class); + }); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcPostgresDialectUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcPostgresDialectUnitTests.java new file mode 100644 index 0000000000..a0d8991264 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcPostgresDialectUnitTests.java @@ -0,0 +1,53 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.postgresql.geometric.PGbox; +import org.postgresql.geometric.PGcircle; +import org.postgresql.geometric.PGlseg; +import org.postgresql.geometric.PGpath; +import org.postgresql.geometric.PGpoint; +import org.postgresql.geometric.PGpolygon; +import org.postgresql.util.PGobject; + +/** + * Unit tests for {@link JdbcPostgresDialect}. + * + * @author Jens Schauder + */ +public class JdbcPostgresDialectUnitTests { + + @Test // GH-1065 + void pgobjectIsConsideredSimple() { + assertThat(JdbcPostgresDialect.INSTANCE.simpleTypes()).contains(PGobject.class); + } + + @Test // GH-1065 + void geometricalTypesAreConsideredSimple() { + + assertThat(JdbcPostgresDialect.INSTANCE.simpleTypes()).contains( // + PGpoint.class, // + PGbox.class, // + PGcircle.class, // + org.postgresql.geometric.PGline.class, // + PGpath.class, // + PGpolygon.class, // + PGlseg.class); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcSqlServerDialectTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcSqlServerDialectTest.java new file mode 100644 index 0000000000..fb7da832fa --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/JdbcSqlServerDialectTest.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.dialect; + +import java.time.Instant; +import java.util.List; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; + +/** + * Tests for {@link JdbcSqlServerDialect} + * + * @author Mikhail Polivakha + */ +class JdbcSqlServerDialectTest { + + @Test // GH-1873 + void testCustomConversions() { + + JdbcCustomConversions jdbcCustomConversions = new JdbcCustomConversions( + (List) JdbcSqlServerDialect.INSTANCE.getConverters()); + + Assertions + .assertThat(jdbcCustomConversions.hasCustomReadTarget(microsoft.sql.DateTimeOffset.class, Instant.class)) + .isTrue(); + } +} \ No newline at end of file diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/OffsetDateTimeToTimestampConverterUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/OffsetDateTimeToTimestampConverterUnitTests.java new file mode 100644 index 0000000000..6c6c52c70c --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/OffsetDateTimeToTimestampConverterUnitTests.java @@ -0,0 +1,43 @@ +package org.springframework.data.jdbc.core.dialect; + +import org.junit.jupiter.api.Test; + +import java.sql.Timestamp; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; + +import static org.assertj.core.api.Assertions.*; + +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Tests for {@link JdbcDb2Dialect.OffsetDateTimeToTimestampConverter}. + * + * @author Jens Schauder + */ +class OffsetDateTimeToTimestampConverterUnitTests { + + @Test + void conversionPreservesInstant() { + + OffsetDateTime offsetDateTime = OffsetDateTime.of(5, 5, 5, 5,5,5,123456789, ZoneOffset.ofHours(3)); + + Timestamp timestamp = JdbcDb2Dialect.OffsetDateTimeToTimestampConverter.INSTANCE.convert(offsetDateTime); + + assertThat(timestamp.toInstant()).isEqualTo(offsetDateTime.toInstant()); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/PostgresDialectIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/PostgresDialectIntegrationTests.java new file mode 100644 index 0000000000..0d3685801e --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/dialect/PostgresDialectIntegrationTests.java @@ -0,0 +1,213 @@ +package org.springframework.data.jdbc.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +import org.junit.jupiter.api.Test; +import org.postgresql.util.PGobject; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; +import org.springframework.data.jdbc.core.mapping.JdbcSimpleTypes; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.repository.CrudRepository; + +/** + * Integration tests for PostgreSQL Dialect. Start this test with {@code -Dspring.profiles.active=postgres}. + * + * @author Nikita Konev + * @author Mark Paluch + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.POSTGRES) +public class PostgresDialectIntegrationTests { + + @Autowired CustomerRepository customerRepository; + + @Test // GH-920 + void shouldSaveAndLoadJson() throws SQLException { + + PGobject sessionData = new PGobject(); + sessionData.setType("jsonb"); + sessionData.setValue("{\"hello\": \"json\"}"); + + Customer saved = customerRepository + .save(new Customer(null, "Adam Smith", new JsonHolder("{\"hello\": \"world\"}"), sessionData)); + + Optional loaded = customerRepository.findById(saved.getId()); + + assertThat(loaded).hasValueSatisfying(actual -> { + + assertThat(actual.getPersonData().getContent()).isEqualTo("{\"hello\": \"world\"}"); + assertThat(actual.getSessionData().getValue()).isEqualTo("{\"hello\": \"json\"}"); + }); + } + + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(value = CustomerRepository.class, type = FilterType.ASSIGNABLE_TYPE)) + static class Config { + + @Bean + CustomConversions jdbcCustomConversions(Dialect dialect) { + SimpleTypeHolder simpleTypeHolder = new SimpleTypeHolder(dialect.simpleTypes(), JdbcSimpleTypes.HOLDER); + + return new JdbcCustomConversions( + CustomConversions.StoreConversions.of(simpleTypeHolder, storeConverters(dialect)), userConverters()); + } + + private List storeConverters(Dialect dialect) { + + List converters = new ArrayList<>(); + converters.addAll(dialect.getConverters()); + converters.addAll(JdbcCustomConversions.storeConverters()); + return converters; + } + + private List userConverters() { + return Arrays.asList(JsonHolderToPGobjectConverter.INSTANCE, PGobjectToJsonHolderConverter.INSTANCE); + } + } + + enum JsonHolderToPGobjectConverter implements Converter { + + INSTANCE; + + @Override + public PGobject convert(JsonHolder source) { + PGobject result = new PGobject(); + result.setType("json"); + try { + result.setValue(source.getContent()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + return result; + } + } + + enum PGobjectToJsonHolderConverter implements Converter { + + INSTANCE; + + @Override + public JsonHolder convert(PGobject source) { + return new JsonHolder(source.getValue()); + } + } + + @Table("customers") + public static final class Customer { + + @Id private final Long id; + private final String name; + private final JsonHolder personData; + private final PGobject sessionData; + + public Customer(Long id, String name, JsonHolder personData, PGobject sessionData) { + this.id = id; + this.name = name; + this.personData = personData; + this.sessionData = sessionData; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public JsonHolder getPersonData() { + return this.personData; + } + + public PGobject getSessionData() { + return this.sessionData; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Customer other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (!Objects.equals(this$name, other$name)) + return false; + final Object this$personData = this.getPersonData(); + final Object other$personData = other.getPersonData(); + if (!Objects.equals(this$personData, other$personData)) + return false; + final Object this$sessionData = this.getSessionData(); + final Object other$sessionData = other.getSessionData(); + return Objects.equals(this$sessionData, other$sessionData); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $personData = this.getPersonData(); + result = result * PRIME + ($personData == null ? 43 : $personData.hashCode()); + final Object $sessionData = this.getSessionData(); + result = result * PRIME + ($sessionData == null ? 43 : $sessionData.hashCode()); + return result; + } + + public String toString() { + return "PostgresDialectIntegrationTests.Customer(id=" + this.getId() + ", name=" + this.getName() + + ", personData=" + this.getPersonData() + ", sessionData=" + this.getSessionData() + ")"; + } + } + + public static class JsonHolder { + String content; + + public JsonHolder(String content) { + this.content = content; + } + + public JsonHolder() {} + + public String getContent() { + return this.content; + } + + public void setContent(String content) { + this.content = content; + } + } + + interface CustomerRepository extends CrudRepository {} + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/BasicJdbcPersistentPropertyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/BasicJdbcPersistentPropertyUnitTests.java new file mode 100644 index 0000000000..776ab41985 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/BasicJdbcPersistentPropertyUnitTests.java @@ -0,0 +1,253 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import junit.framework.AssertionFailedError; + +import java.time.LocalDateTime; +import java.time.ZonedDateTime; +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.BasicRelationalPersistentProperty; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Unit tests for the {@link BasicRelationalPersistentProperty}. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Florian Lüdiger + * @author Mark Paluch + */ +public class BasicJdbcPersistentPropertyUnitTests { + + RelationalMappingContext context = new JdbcMappingContext(); + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + @Test // DATAJDBC-106 + public void detectsAnnotatedColumnName() { + + assertThat(entity.getRequiredPersistentProperty("name").getColumnName()).isEqualTo(quoted("dummy_name")); + assertThat(entity.getRequiredPersistentProperty("localDateTime").getColumnName()) + .isEqualTo(quoted("dummy_last_updated_at")); + } + + @Test // DATAJDBC-218 + public void detectsAnnotatedColumnAndKeyName() { + + String propertyName = "someList"; + RelationalPersistentProperty listProperty = entity.getRequiredPersistentProperty(propertyName); + AggregatePath path = getPersistentPropertyPath(DummyEntity.class, propertyName); + + assertThat(listProperty.getReverseColumnName(path.getRequiredBaseProperty().getOwner())) + .isEqualTo(quoted("dummy_column_name")); + assertThat(listProperty.getKeyColumn()).isEqualTo(quoted("dummy_key_column_name")); + } + + @Test // DATAJDBC-331 + public void detectsReverseColumnNameFromColumnAnnotation() { + + String propertyName = "someList"; + RelationalPersistentProperty listProperty = context // + .getRequiredPersistentEntity(WithCollections.class) // + .getRequiredPersistentProperty(propertyName); + AggregatePath path = getPersistentPropertyPath(DummyEntity.class, propertyName); + + assertThat(listProperty.getKeyColumn()).isEqualTo(quoted("WITH_COLLECTIONS_KEY")); + assertThat(listProperty.getReverseColumnName(path.getRequiredBaseProperty().getOwner())) + .isEqualTo(quoted("some_value")); + } + + @Test // DATAJDBC-331 + public void detectsKeyColumnOverrideNameFromMappedCollectionAnnotation() { + + RelationalPersistentProperty listProperty = context // + .getRequiredPersistentEntity(WithCollections.class) // + .getRequiredPersistentProperty("overrideList"); + AggregatePath path = getPersistentPropertyPath(WithCollections.class, "overrideList"); + + assertThat(listProperty.getKeyColumn()).isEqualTo(quoted("override_key")); + assertThat(listProperty.getReverseColumnName(path.getRequiredBaseProperty().getOwner())) + .isEqualTo(quoted("override_id")); + } + + @Test // GH-938 + void considersAggregateReferenceAnAssociation() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + assertSoftly(softly -> { + + softly.assertThat(entity.getRequiredPersistentProperty("reference").isAssociation()) // + .as("reference") // + .isTrue(); + + softly.assertThat(entity.getRequiredPersistentProperty("id").isAssociation()) // + .as("id") // + .isFalse(); + softly.assertThat(entity.getRequiredPersistentProperty("someEnum").isAssociation()) // + .as("someEnum") // + .isFalse(); + softly.assertThat(entity.getRequiredPersistentProperty("localDateTime").isAssociation()) // + .as("localDateTime") // + .isFalse(); + softly.assertThat(entity.getRequiredPersistentProperty("zonedDateTime").isAssociation()) // + .as("zonedDateTime") // + .isFalse(); + softly.assertThat(entity.getRequiredPersistentProperty("listField").isAssociation()) // + .as("listField") // + .isFalse(); + softly.assertThat(entity.getRequiredPersistentProperty("uuid").isAssociation()) // + .as("uuid") // + .isFalse(); + }); + } + + private AggregatePath getPersistentPropertyPath(Class type, String propertyName) { + + PersistentPropertyPath path = context + .findPersistentPropertyPaths(type, p -> p.getName().equals(propertyName)).getFirst() + .orElseThrow(() -> new AssertionFailedError(String.format("Couldn't find path for '%s'", propertyName))); + + return context.getAggregatePath(path); + } + + @SuppressWarnings("unused") + private enum SomeEnum { + ALPHA + } + + @SuppressWarnings("unused") + private static class DummyEntity { + + @Id private final Long id; + private final SomeEnum someEnum; + private final LocalDateTime localDateTime; + private final ZonedDateTime zonedDateTime; + private final AggregateReference reference; + private final List listField; + private final UUID uuid; + + @MappedCollection(idColumn = "dummy_column_name", + keyColumn = "dummy_key_column_name") private List someList; + + // DATACMNS-106 + private @Column("dummy_name") String name; + + private DummyEntity(Long id, SomeEnum someEnum, LocalDateTime localDateTime, ZonedDateTime zonedDateTime, + AggregateReference reference, List listField, UUID uuid) { + this.id = id; + this.someEnum = someEnum; + this.localDateTime = localDateTime; + this.zonedDateTime = zonedDateTime; + this.reference = reference; + this.listField = listField; + this.uuid = uuid; + } + + @Column("dummy_last_updated_at") + public LocalDateTime getLocalDateTime() { + return localDateTime; + } + + public void setListSetter(Integer integer) { + + } + + public List getListGetter() { + return null; + } + + public Long getId() { + return this.id; + } + + public SomeEnum getSomeEnum() { + return this.someEnum; + } + + public ZonedDateTime getZonedDateTime() { + return this.zonedDateTime; + } + + public AggregateReference getReference() { + return this.reference; + } + + public List getListField() { + return this.listField; + } + + public UUID getUuid() { + return this.uuid; + } + + public List getSomeList() { + return this.someList; + } + + public String getName() { + return this.name; + } + + public void setSomeList(List someList) { + this.someList = someList; + } + + public void setName(String name) { + this.name = name; + } + } + + private static class WithCollections { + + @Column(value = "some_value") List someList; + + @Column(value = "some_value") // + @MappedCollection(idColumn = "override_id", keyColumn = "override_key") // + List overrideList; + + public List getSomeList() { + return this.someList; + } + + public List getOverrideList() { + return this.overrideList; + } + + public void setSomeList(List someList) { + this.someList = someList; + } + + public void setOverrideList(List overrideList) { + this.overrideList = overrideList; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/IdOnlyAggregateReferenceTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/IdOnlyAggregateReferenceTest.java new file mode 100644 index 0000000000..5c224edfb8 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/IdOnlyAggregateReferenceTest.java @@ -0,0 +1,47 @@ +package org.springframework.data.jdbc.core.mapping; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.mapping.AggregateReference.IdOnlyAggregateReference; + +/** + * Unit tests for the {@link IdOnlyAggregateReference}. + * + * @author Myeonghyeon Lee + */ +public class IdOnlyAggregateReferenceTest { + + @Test // DATAJDBC-427 + public void equals() { + + AggregateReference reference1 = AggregateReference.to("1"); + AggregateReference reference2 = AggregateReference.to("1"); + + assertThat(reference1).isEqualTo(reference2); + assertThat(reference2).isEqualTo(reference1); + } + + @Test // DATAJDBC-427 + public void equalsFalse() { + + AggregateReference reference1 = AggregateReference.to("1"); + AggregateReference reference2 = AggregateReference.to("2"); + + assertThat(reference1).isNotEqualTo(reference2); + assertThat(reference2).isNotEqualTo(reference1); + } + + @Test // DATAJDBC-427 + public void hashCodeTest() { + + AggregateReference reference1 = AggregateReference.to("1"); + AggregateReference reference2 = AggregateReference.to("1"); + + assertThat(reference1.hashCode()).isEqualTo(reference2.hashCode()); + } + + private static class DummyEntity { + private String id; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java new file mode 100644 index 0000000000..9e7818cbc5 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java @@ -0,0 +1,346 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import static org.assertj.core.api.Assertions.*; + +import liquibase.change.AddColumnConfig; +import liquibase.change.ColumnConfig; +import liquibase.change.core.AddColumnChange; +import liquibase.change.core.AddForeignKeyConstraintChange; +import liquibase.change.core.DropColumnChange; +import liquibase.change.core.DropForeignKeyConstraintChange; +import liquibase.change.core.DropTableChange; +import liquibase.changelog.ChangeSet; +import liquibase.changelog.DatabaseChangeLog; +import liquibase.database.core.H2Database; +import liquibase.database.jvm.JdbcConnection; + +import java.io.File; +import java.io.InputStream; +import java.nio.file.Files; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Set; + +import org.assertj.core.api.ThrowingConsumer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.springframework.core.io.ClassRelativeResourceLoader; +import org.springframework.core.io.FileSystemResource; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.schema.LiquibaseChangeSetWriter.ChangeSetMetadata; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.util.Predicates; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; + +/** + * Integration tests for {@link LiquibaseChangeSetWriter}. + * + * @author Mark Paluch + * @author Evgenii Koba + */ +class LiquibaseChangeSetWriterIntegrationTests { + + @Test // GH-1430 + void shouldRemoveUnusedTable() { + + withEmbeddedDatabase("unused-table.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(new RelationalMappingContext()); + writer.setDropTableFilter(Predicates.isTrue()); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(1); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(DropTableChange.class); + + DropTableChange drop = (DropTableChange) changeSet.getChanges().get(0); + assertThat(drop.getTableName()).isEqualToIgnoringCase("DELETE_ME"); + }); + } + + @Test // GH-1430 + void shouldNotDropTablesByDefault() { + + withEmbeddedDatabase("unused-table.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(new RelationalMappingContext()); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).isEmpty(); + }); + } + + @Test // GH-1430 + void shouldAddColumnToTable() { + + withEmbeddedDatabase("person-with-id-and-name.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(Person.class)); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(1); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class); + + AddColumnChange addColumns = (AddColumnChange) changeSet.getChanges().get(0); + assertThat(addColumns.getTableName()).isEqualToIgnoringCase("PERSON"); + assertThat(addColumns.getColumns()).hasSize(1); + + AddColumnConfig addColumn = addColumns.getColumns().get(0); + assertThat(addColumn.getName()).isEqualTo("last_name"); + assertThat(addColumn.getType()).isEqualTo("VARCHAR(255 BYTE)"); + }); + } + + @Test // GH-1430 + void shouldRemoveColumnFromTable() { + + withEmbeddedDatabase("person-with-id-and-name.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class)); + writer.setDropColumnFilter((s, s2) -> true); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(2); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class); + + AddColumnChange addColumns = (AddColumnChange) changeSet.getChanges().get(0); + assertThat(addColumns.getTableName()).isEqualToIgnoringCase("PERSON"); + assertThat(addColumns.getColumns()).hasSize(2); + assertThat(addColumns.getColumns()).extracting(AddColumnConfig::getName).containsExactly("my_id", "hello"); + + DropColumnChange dropColumns = (DropColumnChange) changeSet.getChanges().get(1); + assertThat(dropColumns.getTableName()).isEqualToIgnoringCase("PERSON"); + assertThat(dropColumns.getColumns()).hasSize(2); + assertThat(dropColumns.getColumns()).extracting(ColumnConfig::getName).map(String::toUpperCase).contains("ID", + "FIRST_NAME"); + }); + } + + @Test // GH-1430 + void doesNotRemoveColumnsByDefault() { + + withEmbeddedDatabase("person-with-id-and-name.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class)); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(1); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class); + }); + } + + @Test // GH-1430 + void shouldCreateNewChangeLog(@TempDir File tempDir) { + + withEmbeddedDatabase("person-with-id-and-name.sql", c -> { + + File changelogYml = new File(tempDir, "changelog.yml"); + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class)); + writer.writeChangeSet(new FileSystemResource(changelogYml)); + + assertThat(tempDir).isDirectoryContaining(it -> it.getName().equalsIgnoreCase("changelog.yml")); + + assertThat(changelogYml).content().contains("author: Spring Data Relational").contains("name: hello"); + }); + } + + @Test // GH-1430 + void shouldAppendToChangeLog(@TempDir File tempDir) { + + withEmbeddedDatabase("person-with-id-and-name.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + File changelogYml = new File(tempDir, "changelog.yml"); + try (InputStream is = getClass().getResourceAsStream("changelog.yml")) { + Files.copy(is, changelogYml.toPath()); + } + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class)); + writer.writeChangeSet(new FileSystemResource(new File(tempDir, "changelog.yml"))); + + assertThat(changelogYml).content().contains("author: Someone").contains("author: Spring Data Relational") + .contains("name: hello"); + }); + } + + @Test // GH-1599 + void dropAndCreateTableWithRightOrderOfFkChanges() { + + withEmbeddedDatabase("drop-and-create-table-with-fk.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(GroupOfPersons.class)); + writer.setDropTableFilter(Predicates.isTrue()); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(4); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(DropForeignKeyConstraintChange.class); + assertThat(changeSet.getChanges().get(3)).isInstanceOf(AddForeignKeyConstraintChange.class); + + DropForeignKeyConstraintChange dropForeignKey = (DropForeignKeyConstraintChange) changeSet.getChanges().get(0); + assertThat(dropForeignKey.getConstraintName()).isEqualToIgnoringCase("fk_to_drop"); + assertThat(dropForeignKey.getBaseTableName()).isEqualToIgnoringCase("table_to_drop"); + + AddForeignKeyConstraintChange addForeignKey = (AddForeignKeyConstraintChange) changeSet.getChanges().get(3); + assertThat(addForeignKey.getBaseTableName()).isEqualToIgnoringCase("person"); + assertThat(addForeignKey.getBaseColumnNames()).isEqualToIgnoringCase("group_id"); + assertThat(addForeignKey.getReferencedTableName()).isEqualToIgnoringCase("group_of_persons"); + assertThat(addForeignKey.getReferencedColumnNames()).isEqualToIgnoringCase("id"); + }); + } + + @Test // GH-1599 + void dropAndCreateFkInRightOrder() { + + withEmbeddedDatabase("drop-and-create-fk.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(GroupOfPersons.class)); + writer.setDropColumnFilter((s, s2) -> true); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(3); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(DropForeignKeyConstraintChange.class); + assertThat(changeSet.getChanges().get(2)).isInstanceOf(AddForeignKeyConstraintChange.class); + + DropForeignKeyConstraintChange dropForeignKey = (DropForeignKeyConstraintChange) changeSet.getChanges().get(0); + assertThat(dropForeignKey.getConstraintName()).isEqualToIgnoringCase("fk_to_drop"); + assertThat(dropForeignKey.getBaseTableName()).isEqualToIgnoringCase("person"); + + AddForeignKeyConstraintChange addForeignKey = (AddForeignKeyConstraintChange) changeSet.getChanges().get(2); + assertThat(addForeignKey.getBaseTableName()).isEqualToIgnoringCase("person"); + assertThat(addForeignKey.getBaseColumnNames()).isEqualToIgnoringCase("group_id"); + assertThat(addForeignKey.getReferencedTableName()).isEqualToIgnoringCase("group_of_persons"); + assertThat(addForeignKey.getReferencedColumnNames()).isEqualToIgnoringCase("id"); + }); + } + + @Test // GH-1599 + void fieldForFkWillBeCreated() { + + withEmbeddedDatabase("create-fk-with-field.sql", c -> { + + H2Database h2Database = new H2Database(); + h2Database.setConnection(new JdbcConnection(c)); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(GroupOfPersons.class)); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(2); + assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class); + assertThat(changeSet.getChanges().get(1)).isInstanceOf(AddForeignKeyConstraintChange.class); + + AddColumnChange addColumn = (AddColumnChange) changeSet.getChanges().get(0); + assertThat(addColumn.getTableName()).isEqualToIgnoringCase("person"); + assertThat(addColumn.getColumns()).hasSize(1); + assertThat(addColumn.getColumns()).extracting(AddColumnConfig::getName).containsExactly("group_id"); + + AddForeignKeyConstraintChange addForeignKey = (AddForeignKeyConstraintChange) changeSet.getChanges().get(1); + assertThat(addForeignKey.getBaseTableName()).isEqualToIgnoringCase("person"); + assertThat(addForeignKey.getBaseColumnNames()).isEqualToIgnoringCase("group_id"); + assertThat(addForeignKey.getReferencedTableName()).isEqualToIgnoringCase("group_of_persons"); + assertThat(addForeignKey.getReferencedColumnNames()).isEqualToIgnoringCase("id"); + }); + } + + RelationalMappingContext contextOf(Class... classes) { + + RelationalMappingContext context = new RelationalMappingContext(); + context.setInitialEntitySet(Set.of(classes)); + context.afterPropertiesSet(); + return context; + } + + void withEmbeddedDatabase(String script, ThrowingConsumer c) { + + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder(new ClassRelativeResourceLoader(getClass())) // + .generateUniqueName(true) // + .setType(EmbeddedDatabaseType.H2) // + .setScriptEncoding("UTF-8") // + .ignoreFailedDrops(true) // + .addScript(script) // + .build(); + + try { + + try (Connection connection = embeddedDatabase.getConnection()) { + c.accept(connection); + } + + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + embeddedDatabase.shutdown(); + } + } + + @Table + static class Person { + @Id int id; + String firstName; + String lastName; + } + + @Table("person") + static class DifferentPerson { + @Id int my_id; + String hello; + } + + @Table + static class GroupOfPersons { + @Id int id; + @MappedCollection(idColumn = "group_id") Set persons; + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java new file mode 100644 index 0000000000..5c0d1c5229 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java @@ -0,0 +1,269 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import static org.assertj.core.api.Assertions.*; + +import liquibase.change.Change; +import liquibase.change.ColumnConfig; +import liquibase.change.core.AddForeignKeyConstraintChange; +import liquibase.change.core.CreateTableChange; +import liquibase.changelog.ChangeSet; +import liquibase.changelog.DatabaseChangeLog; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import org.assertj.core.groups.Tuple; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.schema.LiquibaseChangeSetWriter.ChangeSetMetadata; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Unit tests for {@link LiquibaseChangeSetWriter}. + * + * @author Mark Paluch + * @author Evgenii Koba + */ +class LiquibaseChangeSetWriterUnitTests { + + @Test // GH-1480 + void newTableShouldCreateChangeSet() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(VariousTypes.class); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog()); + + CreateTableChange createTable = (CreateTableChange) changeSet.getChanges().get(0); + + assertThat(createTable.getColumns()).extracting(ColumnConfig::getName).containsSequence("id", + "luke_i_am_your_father", "dark_side", "floater"); + assertThat(createTable.getColumns()).extracting(ColumnConfig::getType).containsSequence("BIGINT", + "VARCHAR(255 BYTE)", "TINYINT", "FLOAT"); + + ColumnConfig id = createTable.getColumns().get(0); + assertThat(id.getConstraints().isNullable()).isFalse(); + } + + @Test // GH-1480 + void shouldApplySchemaFilter() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(VariousTypes.class); + context.getRequiredPersistentEntity(OtherTable.class); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + writer.setSchemaFilter(it -> it.getName().contains("OtherTable")); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog()); + + assertThat(changeSet.getChanges()).hasSize(1); + CreateTableChange createTable = (CreateTableChange) changeSet.getChanges().get(0); + + assertThat(createTable.getTableName()).isEqualTo("other_table"); + } + + @Test // GH-1599 + void createForeignKeyWithNewTable() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(Tables.class); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog()); + + AddForeignKeyConstraintChange addForeignKey = (AddForeignKeyConstraintChange) changeSet.getChanges().get(2); + + assertThat(addForeignKey.getBaseTableName()).isEqualTo("other_table"); + assertThat(addForeignKey.getBaseColumnNames()).isEqualTo("tables"); + assertThat(addForeignKey.getReferencedTableName()).isEqualTo("tables"); + assertThat(addForeignKey.getReferencedColumnNames()).isEqualTo("id"); + + } + + @Test // GH-1599 + void fieldForFkShouldNotBeCreatedTwice() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(DifferentTables.class); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog()); + + Optional tableWithFk = changeSet.getChanges().stream().filter(change -> change instanceof CreateTableChange createTableChange + && createTableChange.getTableName().equals("table_with_fk_field")).findFirst(); + assertThat(tableWithFk.isPresent()).isEqualTo(true); + + List columns = ((CreateTableChange) tableWithFk.get()).getColumns(); + assertThat(columns).extracting(ColumnConfig::getName).containsExactly("id", "tables_id"); + } + + @Test // GH-1599 + void createForeignKeyForNestedEntities() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(ListOfMapOfNoIdTables.class); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog()); + + assertCreateTable(changeSet, "no_id_table", Tuple.tuple("field", "VARCHAR(255 BYTE)", null), + Tuple.tuple("list_id", "INT", true), Tuple.tuple("list_of_map_of_no_id_tables_key", "INT", true), + Tuple.tuple("map_of_no_id_tables_key", "VARCHAR(255 BYTE)", true)); + + assertCreateTable(changeSet, "map_of_no_id_tables", Tuple.tuple("list_id", "INT", true), + Tuple.tuple("list_of_map_of_no_id_tables_key", "INT", true)); + + assertCreateTable(changeSet, "list_of_map_of_no_id_tables", Tuple.tuple("id", "INT", true)); + + assertAddForeignKey(changeSet, "no_id_table", "list_id,list_of_map_of_no_id_tables_key", "map_of_no_id_tables", + "list_id,list_of_map_of_no_id_tables_key"); + + assertAddForeignKey(changeSet, "map_of_no_id_tables", "list_id", "list_of_map_of_no_id_tables", "id"); + } + + @Test // GH-1599 + void createForeignKeyForOneToOneWithMultipleChildren() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(OneToOneLevel1.class); + + LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + + ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog()); + + assertCreateTable(changeSet, "other_table", Tuple.tuple("id", "BIGINT", true), + Tuple.tuple("one_to_one_level1", "INT", null)); + + assertCreateTable(changeSet, "one_to_one_level2", Tuple.tuple("one_to_one_level1", "INT", true)); + + assertCreateTable(changeSet, "no_id_table", Tuple.tuple("field", "VARCHAR(255 BYTE)", null), + Tuple.tuple("one_to_one_level2", "INT", true), Tuple.tuple("additional_one_to_one_level2", "INT", null)); + + assertAddForeignKey(changeSet, "other_table", "one_to_one_level1", "one_to_one_level1", "id"); + + assertAddForeignKey(changeSet, "one_to_one_level2", "one_to_one_level1", "one_to_one_level1", "id"); + + assertAddForeignKey(changeSet, "no_id_table", "one_to_one_level2", "one_to_one_level2", "one_to_one_level1"); + + assertAddForeignKey(changeSet, "no_id_table", "additional_one_to_one_level2", "one_to_one_level2", + "one_to_one_level1"); + + } + + + void assertCreateTable(ChangeSet changeSet, String tableName, Tuple... columnTuples) { + Optional createTableOptional = changeSet.getChanges().stream().filter(change -> change instanceof CreateTableChange createTableChange && createTableChange.getTableName().equals(tableName)).findFirst(); + assertThat(createTableOptional.isPresent()).isTrue(); + CreateTableChange createTable = (CreateTableChange) createTableOptional.get(); + assertThat(createTable.getColumns()) + .extracting(ColumnConfig::getName, ColumnConfig::getType, column -> column.getConstraints().isPrimaryKey()) + .containsExactly(columnTuples); + } + + void assertAddForeignKey(ChangeSet changeSet, String baseTableName, String baseColumnNames, + String referencedTableName, String referencedColumnNames) { + Optional addFkOptional = changeSet.getChanges().stream().filter(change -> change instanceof AddForeignKeyConstraintChange addForeignKeyConstraintChange + && addForeignKeyConstraintChange.getBaseTableName().equals(baseTableName) + && addForeignKeyConstraintChange.getBaseColumnNames().equals(baseColumnNames)).findFirst(); + assertThat(addFkOptional.isPresent()).isTrue(); + AddForeignKeyConstraintChange addFk = (AddForeignKeyConstraintChange) addFkOptional.get(); + assertThat(addFk.getBaseTableName()).isEqualTo(baseTableName); + assertThat(addFk.getBaseColumnNames()).isEqualTo(baseColumnNames); + assertThat(addFk.getReferencedTableName()).isEqualTo(referencedTableName); + assertThat(addFk.getReferencedColumnNames()).isEqualTo(referencedColumnNames); + } + + @org.springframework.data.relational.core.mapping.Table + static class VariousTypes { + @Id long id; + String lukeIAmYourFather; + Boolean darkSide; + Float floater; + Double doubleClass; + Integer integerClass; + } + + @org.springframework.data.relational.core.mapping.Table + static class OtherTable { + @Id long id; + } + + @org.springframework.data.relational.core.mapping.Table + static class Tables { + @Id int id; + @MappedCollection Set tables; + } + + @org.springframework.data.relational.core.mapping.Table + static class SetOfTables { + @Id int id; + @MappedCollection(idColumn = "set_id") Set setOfTables; + } + + @org.springframework.data.relational.core.mapping.Table + static class DifferentTables { + @Id int id; + @MappedCollection(idColumn = "tables_id") Set tables; + } + + @org.springframework.data.relational.core.mapping.Table + static class TableWithFkField { + @Id int id; + int tablesId; + } + + @org.springframework.data.relational.core.mapping.Table + static class NoIdTable { + String field; + } + + @org.springframework.data.relational.core.mapping.Table + static class MapOfNoIdTables { + @MappedCollection Map tables; + } + + @org.springframework.data.relational.core.mapping.Table + static class ListOfMapOfNoIdTables { + @Id int id; + @MappedCollection(idColumn = "list_id") List listOfTables; + } + + @org.springframework.data.relational.core.mapping.Table + static class OneToOneLevel1 { + @Id int id; + OneToOneLevel2 oneToOneLevel2; + OtherTable otherTable; + } + + @org.springframework.data.relational.core.mapping.Table + static class OneToOneLevel2 { + NoIdTable table1; + @Column("additional_one_to_one_level2") NoIdTable table2; + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java new file mode 100644 index 0000000000..6e8f23996a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java @@ -0,0 +1,90 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import static org.assertj.core.api.Assertions.*; + +import java.text.Collator; +import java.util.Locale; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Unit tests for the {@link Tables}. + * + * @author Kurt Niemi + * @author Mark Paluch + */ +class SchemaDiffUnitTests { + + @Test + void testDiffSchema() { + + RelationalMappingContext context = new RelationalMappingContext(); + context.getRequiredPersistentEntity(SchemaDiffUnitTests.Table1.class); + context.getRequiredPersistentEntity(SchemaDiffUnitTests.Table2.class); + + Tables mappedEntities = Tables.from(context); + Tables existingTables = Tables.from(context); + + // Table table1 does not exist on the database yet. + existingTables.tables().remove(new Table("table1")); + + // Add column to table2 + Column newColumn = new Column("newcol", "VARCHAR(255)"); + Table table2 = mappedEntities.tables().get(mappedEntities.tables().indexOf(new Table("table2"))); + table2.columns().add(newColumn); + + // This should be deleted + Table delete_me = new Table(null, "delete_me"); + delete_me.columns().add(newColumn); + existingTables.tables().add(delete_me); + + SchemaDiff diff = SchemaDiff.diff(mappedEntities, existingTables, Collator.getInstance(Locale.ROOT)::compare); + + // Verify that newtable is an added table in the diff + assertThat(diff.tableAdditions()).isNotEmpty(); + assertThat(diff.tableAdditions()).extracting(Table::name).containsOnly("table1"); + + assertThat(diff.tableDeletions()).isNotEmpty(); + assertThat(diff.tableDeletions()).extracting(Table::name).containsOnly("delete_me"); + + assertThat(diff.tableDiffs()).hasSize(1); + assertThat(diff.tableDiffs()).extracting(it -> it.table().name()).containsOnly("table2"); + assertThat(diff.tableDiffs().get(0).columnsToAdd()).contains(newColumn); + assertThat(diff.tableDiffs().get(0).columnsToDrop()).isEmpty(); + } + + // Test table classes for performing schema diff + @org.springframework.data.relational.core.mapping.Table + static class Table1 { + String force; + String be; + String with; + String you; + } + + @org.springframework.data.relational.core.mapping.Table + static class Table2 { + String lukeIAmYourFather; + Boolean darkSide; + Float floater; + Double doubleClass; + Integer integerClass; + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java new file mode 100644 index 0000000000..298c397185 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java @@ -0,0 +1,68 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.core.mapping.schema; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.nio.charset.Charset; +import java.time.Duration; +import java.time.ZoneId; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * Unit tests for {@link SqlTypeMapping}. + * + * @author Mark Paluch + */ +class SqlTypeMappingUnitTests { + + SqlTypeMapping typeMapping = new DefaultSqlTypeMapping() // + .and(property -> property.getActualType().equals(ZoneId.class) ? "ZONEID" : null) + .and(property -> property.getActualType().equals(Duration.class) ? "INTERVAL" : null); + + @Test // GH-1480 + void shouldComposeTypeMapping() { + + RelationalPersistentProperty p = mock(RelationalPersistentProperty.class); + doReturn(String.class).when(p).getActualType(); + + assertThat(typeMapping.getColumnType(p)).isEqualTo("VARCHAR(255 BYTE)"); + assertThat(typeMapping.getRequiredColumnType(p)).isEqualTo("VARCHAR(255 BYTE)"); + } + + @Test // GH-1480 + void shouldDelegateToCompositeTypeMapping() { + + RelationalPersistentProperty p = mock(RelationalPersistentProperty.class); + doReturn(Duration.class).when(p).getActualType(); + + assertThat(typeMapping.getColumnType(p)).isEqualTo("INTERVAL"); + assertThat(typeMapping.getRequiredColumnType(p)).isEqualTo("INTERVAL"); + } + + @Test // GH-1480 + void shouldPassThruNullValues() { + + RelationalPersistentProperty p = mock(RelationalPersistentProperty.class); + doReturn(Charset.class).when(p).getActualType(); + + assertThat(typeMapping.getColumnType(p)).isNull(); + assertThatIllegalArgumentException().isThrownBy(() -> typeMapping.getRequiredColumnType(p)); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mapping/model/DefaultNamingStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mapping/model/DefaultNamingStrategyUnitTests.java new file mode 100644 index 0000000000..69f0f5bf50 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mapping/model/DefaultNamingStrategyUnitTests.java @@ -0,0 +1,133 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.mapping.model; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; + +import java.time.LocalDateTime; +import java.util.List; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for the default {@link NamingStrategy}. + * + * @author Kazuki Shimizu + * @author Jens Schauder + * @author Mark Paluch + */ +public class DefaultNamingStrategyUnitTests { + + private final NamingStrategy target = DefaultNamingStrategy.INSTANCE; + + private final RelationalPersistentEntity persistentEntity = // + new JdbcMappingContext(target).getRequiredPersistentEntity(DummyEntity.class); + + @Test // DATAJDBC-184 + public void getTableName() { + assertThat(target.getTableName(persistentEntity.getType())).isEqualTo("dummy_entity"); + } + + @Test // DATAJDBC-184 + public void getColumnName() { + + assertThat(target.getColumnName(persistentEntity.getPersistentProperty("id"))) // + .isEqualTo("id"); + assertThat(target.getColumnName(persistentEntity.getPersistentProperty("createdAt"))) // + .isEqualTo("created_at"); + assertThat(target.getColumnName(persistentEntity.getPersistentProperty("dummySubEntities"))) // + .isEqualTo("dummy_sub_entities"); + } + + @Test // DATAJDBC-184 + public void getReverseColumnInfoName() { + assertThat(target.getReverseColumnName(persistentEntity.getPersistentProperty("dummySubEntities"))) + .isEqualTo("dummy_entity"); + } + + @Test // DATAJDBC-184 + public void getKeyColumn() { + + assertThat(target.getKeyColumn(persistentEntity.getPersistentProperty("dummySubEntities"))) // + .isEqualTo("dummy_entity_key"); + } + + @Test // DATAJDBC-184 + public void getSchema() { + assertThat(target.getSchema()).isEqualTo(""); + } + + private static class DummyEntity { + + @Id + private int id; + private LocalDateTime createdAt; + private List dummySubEntities; + + public int getId() { + return this.id; + } + + public LocalDateTime getCreatedAt() { + return this.createdAt; + } + + public List getDummySubEntities() { + return this.dummySubEntities; + } + + public void setId(int id) { + this.id = id; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + + public void setDummySubEntities(List dummySubEntities) { + this.dummySubEntities = dummySubEntities; + } + } + + private static class DummySubEntity { + + @Id + private int id; + private LocalDateTime createdAt; + + public int getId() { + return this.id; + } + + public LocalDateTime getCreatedAt() { + return this.createdAt; + } + + public void setId(int id) { + this.id = id; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + } + +} diff --git a/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntity.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntity.java similarity index 78% rename from src/test/java/org/springframework/data/jdbc/mybatis/DummyEntity.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntity.java index 796fd6b2f4..5e27f5e731 100644 --- a/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntity.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntity.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,11 +24,16 @@ @Alias("DummyEntity") class DummyEntity { - @Id final Long id; + @Id + final Long id; final String name; public DummyEntity(Long id, String name) { this.id = id; this.name = name; } + + public DummyEntity withId(Long id) { + return this.id == id ? this : new DummyEntity(id, this.name); + } } diff --git a/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntityMapper.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntityMapper.java similarity index 85% rename from src/test/java/org/springframework/data/jdbc/mybatis/DummyEntityMapper.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntityMapper.java index ad4d5116f3..08f8b59efe 100644 --- a/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntityMapper.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/DummyEntityMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisContextUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisContextUnitTests.java new file mode 100644 index 0000000000..1cb7e918d0 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisContextUnitTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.mybatis; + +import static org.assertj.core.api.SoftAssertions.*; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +public class MyBatisContextUnitTests { + + @Test // DATAJDBC-542 + public void testGetReturnsValuesFromIdentifier() { + + Map map = new HashMap<>(); + map.put(SqlIdentifier.quoted("one"), "oneValue"); + map.put(SqlIdentifier.unquoted("two"), "twoValue"); + map.put(SqlIdentifier.quoted("Three"), "threeValue"); + map.put(SqlIdentifier.unquoted("Four"), "fourValue"); + + MyBatisContext context = new MyBatisContext(Identifier.from(map), null, null); + + assertSoftly(softly -> { + + softly.assertThat(context.get("one")).isEqualTo("oneValue"); + softly.assertThat(context.get("two")).isEqualTo("twoValue"); + softly.assertThat(context.get("Three")).isEqualTo("threeValue"); + softly.assertThat(context.get("Four")).isEqualTo("fourValue"); + softly.assertThat(context.get("four")).isNull(); + softly.assertThat(context.get("five")).isNull(); + }); + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisCustomizingNamespaceHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisCustomizingNamespaceHsqlIntegrationTests.java new file mode 100644 index 0000000000..30e7f511f0 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisCustomizingNamespaceHsqlIntegrationTests.java @@ -0,0 +1,127 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.mybatis; + +import static org.assertj.core.api.Assertions.*; + +import junit.framework.AssertionFailedError; + +import java.io.IOException; + +import org.apache.ibatis.session.Configuration; +import org.apache.ibatis.session.SqlSession; +import org.apache.ibatis.session.SqlSessionFactory; +import org.junit.jupiter.api.Test; +import org.mybatis.spring.SqlSessionFactoryBean; +import org.mybatis.spring.SqlSessionTemplate; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.Primary; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; + +/** + * Tests the integration for customizing the namespace with Mybatis. + * + * @author Kazuki Shimizu + * @author Jens Schauder + * @author Tyler Van Gorder + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class MyBatisCustomizingNamespaceHsqlIntegrationTests { + + @Autowired SqlSessionFactory sqlSessionFactory; + @Autowired DummyEntityRepository repository; + + @Test // DATAJDBC-178 + public void myBatisGetsUsedForInsertAndSelect() { + + DummyEntity entity = new DummyEntity(null, "some name"); + DummyEntity saved = repository.save(entity); + + assertThat(saved.id).isNotNull(); + + DummyEntity reloaded = repository.findById(saved.id).orElseThrow(AssertionFailedError::new); + + assertThat(reloaded.name).isEqualTo("name " + saved.id); + } + + interface DummyEntityRepository extends CrudRepository {} + + @org.springframework.context.annotation.Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true) + static class Config { + + @Bean + TestClass testClass() { + return TestClass.of(MyBatisCustomizingNamespaceHsqlIntegrationTests.class); + } + + @Bean + SqlSessionFactoryBean createSessionFactory(EmbeddedDatabase db) throws IOException { + + Configuration configuration = new Configuration(); + configuration.getTypeAliasRegistry().registerAlias("MyBatisContext", MyBatisContext.class); + configuration.getTypeAliasRegistry().registerAlias(DummyEntity.class); + + SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean(); + sqlSessionFactoryBean.setDataSource(db); + sqlSessionFactoryBean.setConfiguration(configuration); + sqlSessionFactoryBean.setMapperLocations(new PathMatchingResourcePatternResolver() + .getResources("classpath*:org/springframework/data/jdbc/mybatis/mapper/*Mapper.xml")); + + return sqlSessionFactoryBean; + } + + @Bean + SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory factory) { + return new SqlSessionTemplate(factory); + } + + @Bean + @Primary + MyBatisDataAccessStrategy dataAccessStrategy(SqlSession sqlSession) { + + RelationalMappingContext context = new JdbcMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(context, (Identifier, path) -> null); + + MyBatisDataAccessStrategy strategy = new MyBatisDataAccessStrategy(sqlSession); + + strategy.setNamespaceStrategy(new NamespaceStrategy() { + @Override + public String getNamespace(Class domainType) { + return domainType.getPackage().getName() + ".mapper." + domainType.getSimpleName() + "Mapper"; + } + }); + + return strategy; + } + } +} diff --git a/src/test/java/org/springframework/data/jdbc/core/MyBatisDataAccessStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategyUnitTests.java similarity index 50% rename from src/test/java/org/springframework/data/jdbc/core/MyBatisDataAccessStrategyUnitTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategyUnitTests.java index 3c9a57c23e..4f66d3d813 100644 --- a/src/test/java/org/springframework/data/jdbc/core/MyBatisDataAccessStrategyUnitTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategyUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,38 +13,56 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.core; +package org.springframework.data.jdbc.mybatis; import static java.util.Arrays.*; +import static java.util.Collections.*; import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; -import java.util.Collections; - +import java.util.Iterator; +import java.util.List; +import java.util.stream.Stream; +import org.apache.ibatis.cursor.Cursor; import org.apache.ibatis.session.SqlSession; -import org.apache.ibatis.session.SqlSessionFactory; -import org.junit.Before; -import org.junit.Test; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.jdbc.mybatis.MyBatisContext; -import org.springframework.data.jdbc.mybatis.MyBatisDataAccessStrategy; -import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.PersistentPropertyPathTestUtils; +import org.springframework.data.jdbc.core.convert.Identifier; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.conversion.IdValueSource; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; /** * Unit tests for the {@link MyBatisDataAccessStrategy}, mainly ensuring that the correct statements get's looked up. * * @author Jens Schauder + * @author Mark Paluch + * @author Tyler Van Gorder + * @author Chirag Tailor + * @author Sergey Korotaev */ public class MyBatisDataAccessStrategyUnitTests { + RelationalMappingContext context = new JdbcMappingContext(); + SqlSession session = mock(SqlSession.class); ArgumentCaptor captor = ArgumentCaptor.forClass(MyBatisContext.class); MyBatisDataAccessStrategy accessStrategy = new MyBatisDataAccessStrategy(session); - @Before + PersistentPropertyPath path = PersistentPropertyPathTestUtils.getPath("one.two", + DummyEntity.class, context); + + @BeforeEach public void before() { doReturn(false).when(session).selectOne(any(), any()); @@ -53,7 +71,8 @@ public void before() { @Test // DATAJDBC-123 public void insert() { - accessStrategy.insert("x", String.class, Collections.singletonMap("key", "value")); + accessStrategy.insert("x", String.class, Identifier.from(singletonMap(unquoted("key"), "value")), + IdValueSource.GENERATED); verify(session).insert(eq("java.lang.StringMapper.insert"), captor.capture()); @@ -69,7 +88,7 @@ public void insert() { null, // String.class, // "value" // - ); + ); } @Test // DATAJDBC-123 @@ -91,7 +110,7 @@ public void update() { null, // String.class, // null // - ); + ); } @Test // DATAJDBC-123 @@ -113,15 +132,17 @@ public void delete() { "an-id", // String.class, // null // - ); + ); } @Test // DATAJDBC-123 public void deleteAllByPath() { - accessStrategy.deleteAll(PropertyPath.from("class.name.bytes", String.class)); + accessStrategy.deleteAll(path); - verify(session).delete(eq("java.lang.StringMapper.deleteAll-class-name-bytes"), captor.capture()); + verify(session).delete(eq( + "org.springframework.data.jdbc.mybatis.MyBatisDataAccessStrategyUnitTests$DummyEntityMapper.deleteAll-one-two"), + captor.capture()); assertThat(captor.getValue()) // .isNotNull() // @@ -133,9 +154,9 @@ public void deleteAllByPath() { ).containsExactly( // null, // null, // - byte[].class, // + ChildTwo.class, // null // - ); + ); } @Test // DATAJDBC-123 @@ -157,15 +178,17 @@ public void deleteAllByType() { null, // String.class, // null // - ); + ); } @Test // DATAJDBC-123 public void deleteByPath() { - accessStrategy.delete("rootid", PropertyPath.from("class.name.bytes", String.class)); + accessStrategy.delete("rootid", path); - verify(session).delete(eq("java.lang.StringMapper.delete-class-name-bytes"), captor.capture()); + verify(session).delete( + eq("org.springframework.data.jdbc.mybatis.MyBatisDataAccessStrategyUnitTests$DummyEntityMapper.delete-one-two"), + captor.capture()); assertThat(captor.getValue()) // .isNotNull() // @@ -176,9 +199,9 @@ public void deleteByPath() { c -> c.get("key") // ).containsExactly( // null, "rootid", // - byte[].class, // + ChildTwo.class, // null // - ); + ); } @Test // DATAJDBC-123 @@ -199,7 +222,7 @@ public void findById() { null, "an-id", // String.class, // null // - ); + ); } @Test // DATAJDBC-123 @@ -221,7 +244,37 @@ public void findAll() { null, // String.class, // null // - ); + ); + } + + @Test // GH-1714 + public void streamAll() { + + String value = "some answer"; + + Cursor cursor = getCursor(value); + + when(session.selectCursor(anyString(), any())).then(answer -> cursor); + + Stream streamable = accessStrategy.streamAll(String.class); + + verify(session).selectCursor(eq("java.lang.StringMapper.streamAll"), captor.capture()); + + assertThat(streamable).isNotNull().containsExactly(value); + + assertThat(captor.getValue()) // + .isNotNull() // + .extracting( // + MyBatisContext::getInstance, // + MyBatisContext::getId, // + MyBatisContext::getDomainType, // + c -> c.get("key") // + ).containsExactly( // + null, // + null, // + String.class, // + null // + ); } @Test // DATAJDBC-123 @@ -243,36 +296,70 @@ public void findAllById() { asList("id1", "id2"), // String.class, // null // - ); + ); + } + + @Test // GH-1714 + public void streamAllByIds() { + + String value = "some answer 2"; + Cursor cursor = getCursor(value); + + when(session.selectCursor(anyString(), any())).then(answer -> cursor); + + accessStrategy.streamAllByIds(asList("id1", "id2"), String.class); + + verify(session).selectCursor(eq("java.lang.StringMapper.streamAllByIds"), captor.capture()); + + assertThat(captor.getValue()) // + .isNotNull() // + .extracting( // + MyBatisContext::getInstance, // + MyBatisContext::getId, // + MyBatisContext::getDomainType, // + c -> c.get("key") // + ).containsExactly( // + null, // + asList("id1", "id2"), // + String.class, // + null // + ); } @SuppressWarnings("unchecked") - @Test // DATAJDBC-123 - public void findAllByProperty() { + @Test // DATAJDBC-384 + public void findAllByPath() { - JdbcPersistentProperty property = mock(JdbcPersistentProperty.class, Mockito.RETURNS_DEEP_STUBS); + RelationalPersistentProperty property = mock(RelationalPersistentProperty.class, RETURNS_DEEP_STUBS); + PersistentPropertyPath path = mock(PersistentPropertyPath.class, RETURNS_DEEP_STUBS); + when(path.getBaseProperty()).thenReturn(property); when(property.getOwner().getType()).thenReturn((Class) String.class); - doReturn(Number.class).when(property).getType(); - doReturn("propertyName").when(property).getName(); - accessStrategy.findAllByProperty("id", property); + when(path.getLeafProperty()).thenReturn(property); + when(property.getType()).thenReturn((Class) Number.class); + + when(path.toDotPath()).thenReturn("dot.path"); - verify(session).selectList(eq("java.lang.StringMapper.findAllByProperty-propertyName"), captor.capture()); + accessStrategy.findAllByPath(Identifier.empty(), path); + + verify(session).selectList(eq("java.lang.StringMapper.findAllByPath-dot.path"), captor.capture()); assertThat(captor.getValue()) // .isNotNull() // .extracting( // MyBatisContext::getInstance, // MyBatisContext::getId, // + MyBatisContext::getIdentifier, // MyBatisContext::getDomainType, // c -> c.get("key") // ).containsExactly( // null, // - "id", // + null, // + Identifier.empty(), // Number.class, // null // - ); + ); } @Test // DATAJDBC-123 @@ -294,7 +381,7 @@ public void existsById() { "id", // String.class, // null // - ); + ); } @Test // DATAJDBC-157 @@ -304,7 +391,6 @@ public void count() { accessStrategy.count(String.class); - verify(session).selectOne(eq("java.lang.StringMapper.count"), captor.capture()); assertThat(captor.getValue()) // @@ -315,11 +401,124 @@ public void count() { MyBatisContext::getDomainType, // c -> c.get("key") // ).containsExactly( // - null, // - null, // - String.class, // - null // - ); + null, // + null, // + String.class, // + null // + ); } + @Test // DATAJDBC-101 + public void findAllSorted() { + + accessStrategy.findAll(String.class, Sort.by("length")); + + verify(session).selectList(eq("java.lang.StringMapper.findAllSorted"), captor.capture()); + + assertThat(captor.getValue()) // + .isNotNull() // + .extracting( // + MyBatisContext::getInstance, // + MyBatisContext::getId, // + MyBatisContext::getDomainType, // + c -> c.get("sort") // + ).containsExactly( // + null, // + null, // + String.class, // + Sort.by("length") // + ); + } + + @Test // GH-1714 + public void streamAllSorted() { + + String value = "some answer 3"; + Cursor cursor = getCursor(value); + + when(session.selectCursor(anyString(), any())).then(answer -> cursor); + + accessStrategy.streamAll(String.class, Sort.by("length")); + + verify(session).selectCursor(eq("java.lang.StringMapper.streamAllSorted"), captor.capture()); + + assertThat(captor.getValue()) // + .isNotNull() // + .extracting( // + MyBatisContext::getInstance, // + MyBatisContext::getId, // + MyBatisContext::getDomainType, // + c -> c.get("sort") // + ).containsExactly( // + null, // + null, // + String.class, // + Sort.by("length") // + ); + } + + @Test // DATAJDBC-101 + public void findAllPaged() { + + accessStrategy.findAll(String.class, PageRequest.of(0, 20)); + + verify(session).selectList(eq("java.lang.StringMapper.findAllPaged"), captor.capture()); + + assertThat(captor.getValue()) // + .isNotNull() // + .extracting( // + MyBatisContext::getInstance, // + MyBatisContext::getId, // + MyBatisContext::getDomainType, // + c -> c.get("pageable") // + ).containsExactly( // + null, // + null, // + String.class, // + PageRequest.of(0, 20) // + ); + } + + @SuppressWarnings("unused") + private static class DummyEntity { + ChildOne one; + } + + @SuppressWarnings("unused") + private static class ChildOne { + ChildTwo two; + } + + private static class ChildTwo { + } + + private Cursor getCursor(String value) { + return new Cursor<>() { + @Override + public boolean isOpen() { + return false; + } + + @Override + public boolean isConsumed() { + return false; + } + + @Override + public int getCurrentIndex() { + return 0; + } + + @Override + public void close() { + + } + + @NotNull + @Override + public Iterator iterator() { + return List.of(value).iterator(); + } + }; + } } diff --git a/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisHsqlIntegrationTests.java similarity index 71% rename from src/test/java/org/springframework/data/jdbc/mybatis/MyBatisHsqlIntegrationTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisHsqlIntegrationTests.java index f3e2ee331d..c5dd57d8e0 100644 --- a/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisHsqlIntegrationTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/mybatis/MyBatisHsqlIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,48 +17,79 @@ import static org.assertj.core.api.Assertions.assertThat; -import junit.framework.AssertionFailedError; - import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.SqlSessionTemplate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.Primary; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.dialect.JdbcHsqlDbDialect; import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestClass; import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; -import javax.net.ssl.SSLSocketFactory; +import junit.framework.AssertionFailedError; /** * Tests the integration with Mybatis. * * @author Jens Schauder * @author Greg Turnquist + * @author Mark Paluch */ -@ContextConfiguration -@Transactional +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) public class MyBatisHsqlIntegrationTests { + @Autowired SqlSessionFactory sqlSessionFactory; + @Autowired DummyEntityRepository repository; + + @Test // DATAJDBC-123 + public void mybatisSelfTest() { + + SqlSession session = sqlSessionFactory.openSession(); + + session.selectList("org.springframework.data.jdbc.mybatis.DummyEntityMapper.findById"); + } + + @Test // DATAJDBC-123 + public void myBatisGetsUsedForInsertAndSelect() { + + DummyEntity entity = new DummyEntity(null, "some name"); + DummyEntity saved = repository.save(entity); + + assertThat(saved.id).isNotNull(); + + DummyEntity reloaded = repository.findById(saved.id).orElseThrow(AssertionFailedError::new); + + assertThat(reloaded).isNotNull().extracting(e -> e.id, e -> e.name); + } + + interface DummyEntityRepository extends CrudRepository { + + } + @org.springframework.context.annotation.Configuration @Import(TestConfiguration.class) @EnableJdbcRepositories(considerNestedRepositories = true) static class Config { @Bean - Class testClass() { - return MyBatisHsqlIntegrationTests.class; + TestClass testClass() { + return TestClass.of(MyBatisHsqlIntegrationTests.class); } @Bean @@ -83,39 +114,12 @@ SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory factory) { } @Bean - MyBatisDataAccessStrategy dataAccessStrategy(SqlSession sqlSession) { - return new MyBatisDataAccessStrategy(sqlSession); - } - } - - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - - @Autowired SqlSessionFactory sqlSessionFactory; - @Autowired DummyEntityRepository repository; - - @Test // DATAJDBC-123 - public void mybatisSelfTest() { - - SqlSession session = sqlSessionFactory.openSession(); - - session.selectList("org.springframework.data.jdbc.mybatis.DummyEntityMapper.findById"); - } - - @Test // DATAJDBC-123 - public void myBatisGetsUsedForInsertAndSelect() { - - DummyEntity entity = new DummyEntity(null, "some name"); - DummyEntity saved = repository.save(entity); - - assertThat(saved.id).isNotNull(); - - DummyEntity reloaded = repository.findById(saved.id).orElseThrow(AssertionFailedError::new); - - assertThat(reloaded).isNotNull().extracting(e -> e.id, e -> e.name); - } - - interface DummyEntityRepository extends CrudRepository { + @Primary + DataAccessStrategy dataAccessStrategy(RelationalMappingContext context, JdbcConverter converter, + SqlSession sqlSession, EmbeddedDatabase db) { + return MyBatisDataAccessStrategy.createCombinedAccessStrategy(context, converter, + new NamedParameterJdbcTemplate(db), sqlSession, JdbcHsqlDbDialect.INSTANCE); + } } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/AbstractJdbcRepositoryLookUpStrategyTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/AbstractJdbcRepositoryLookUpStrategyTests.java new file mode 100644 index 0000000000..2e01720094 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/AbstractJdbcRepositoryLookUpStrategyTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Base class to test @EnableJdbcRepositories(queryLookupStrategy = ...) + * + * @author Diego Krupitza + * @since 2.4 + */ +@EnabledOnDatabase(DatabaseType.HSQL) +abstract class AbstractJdbcRepositoryLookUpStrategyTests { + + @Autowired protected OnesRepository onesRepository; + @Autowired NamedParameterJdbcOperations template; + @Autowired RelationalMappingContext context; + + void insertTestInstances() { + + AggregateOne firstAggregate = new AggregateOne("Diego"); + AggregateOne secondAggregate = new AggregateOne("Franz"); + AggregateOne thirdAggregate = new AggregateOne("Daniela"); + + onesRepository.saveAll(Arrays.asList(firstAggregate, secondAggregate, thirdAggregate)); + } + + void callDeclaredQuery(String name, int expectedSize, String... expectedNames) { + + insertTestInstances(); + + List likeNameD = onesRepository.findAllByName(name); + + assertThat(likeNameD).hasSize(expectedSize); + + assertThat(likeNameD.stream().map(item -> item.name).collect(Collectors.toList())) // + .contains(expectedNames); + + } + + protected void callDerivedQuery() { + insertTestInstances(); + + AggregateOne diego = onesRepository.findByName("Diego"); + assertThat(diego).isNotNull(); + assertThat(diego.id).isNotNull(); + assertThat(diego.name).isEqualToIgnoringCase("Diego"); + } + + interface OnesRepository extends CrudRepository { + + // if derived is used it is just a basic findByName + // if declared is used it should be a like check + @Query("Select * from aggregate_one where NAME like concat('%', :name, '%') ") + List findAllByName(String name); + + AggregateOne findByName(String name); + } + + static class AggregateOne { + + @Id Long id; + String name; + + public AggregateOne(String name) { + this.name = name; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/DeclaredQueryRepositoryUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/DeclaredQueryRepositoryUnitTests.java new file mode 100644 index 0000000000..2670ed88be --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/DeclaredQueryRepositoryUnitTests.java @@ -0,0 +1,145 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.DefaultJdbcTypeFactory; +import org.springframework.data.jdbc.core.convert.DelegatingDataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.jdbc.core.dialect.JdbcHsqlDbDialect; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.Nullable; + +/** + * Extracts the SQL statement that results from declared queries of a repository and perform assertions on it. + * + * @author Jens Schauder + */ +public class DeclaredQueryRepositoryUnitTests { + + private NamedParameterJdbcOperations operations = mock(NamedParameterJdbcOperations.class, RETURNS_DEEP_STUBS); + + @Test // GH-1856 + void plainSql() { + + repository(DummyEntityRepository.class).plainQuery(); + + assertThat(query()).isEqualTo("select * from someTable"); + } + + @Test // GH-1856 + void tableNameQuery() { + + repository(DummyEntityRepository.class).tableNameQuery(); + + assertThat(query()).isEqualTo("select * from \"DUMMY_ENTITY\""); + } + + @Test // GH-1856 + void renamedTableNameQuery() { + + repository(RenamedEntityRepository.class).tableNameQuery(); + + assertThat(query()).isEqualTo("select * from \"ReNamed\""); + } + + @Test // GH-1856 + void fullyQualifiedTableNameQuery() { + + repository(RenamedEntityRepository.class).qualifiedTableNameQuery(); + + assertThat(query()).isEqualTo("select * from \"someSchema\".\"ReNamed\""); + } + + private String query() { + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(String.class); + verify(operations).queryForObject(queryCaptor.capture(), any(SqlParameterSource.class), any(RowMapper.class)); + return queryCaptor.getValue(); + } + + private @NotNull T repository(Class repositoryInterface) { + + Dialect dialect = JdbcHsqlDbDialect.INSTANCE; + + RelationalMappingContext context = new JdbcMappingContext(); + + DelegatingDataAccessStrategy delegatingDataAccessStrategy = new DelegatingDataAccessStrategy(); + JdbcConverter converter = new MappingJdbcConverter(context, delegatingDataAccessStrategy, + new JdbcCustomConversions(), new DefaultJdbcTypeFactory(operations.getJdbcOperations())); + + DataAccessStrategy dataAccessStrategy = mock(DataAccessStrategy.class); + ApplicationEventPublisher publisher = mock(ApplicationEventPublisher.class); + + JdbcRepositoryFactory factory = new JdbcRepositoryFactory(dataAccessStrategy, context, converter, dialect, + publisher, operations); + + return factory.getRepository(repositoryInterface); + } + + @Table + record DummyEntity(@Id Long id, String name) { + } + + interface DummyEntityRepository extends CrudRepository { + + @Nullable + @Query("select * from someTable") + DummyEntity plainQuery(); + + @Nullable + @Query("select * from #{#tableName}") + DummyEntity tableNameQuery(); + } + + @Table(name = "ReNamed", schema = "someSchema") + record RenamedEntity(@Id Long id, String name) { + } + + interface RenamedEntityRepository extends CrudRepository { + + @Nullable + @Query("select * from #{#tableName}") + DummyEntity tableNameQuery(); + + @Nullable + @Query("select * from #{#qualifiedTableName}") + DummyEntity qualifiedTableNameQuery(); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryBeforeSaveHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryBeforeSaveHsqlIntegrationTests.java new file mode 100644 index 0000000000..42740dc51e --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryBeforeSaveHsqlIntegrationTests.java @@ -0,0 +1,307 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.repository.ListCrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Integration tests for the {@link BeforeSaveCallback}. + * + * @author Chirag Tailor + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class JdbcRepositoryBeforeSaveHsqlIntegrationTests { + + @Autowired NamedParameterJdbcTemplate template; + @Autowired ImmutableEntityRepository immutableWithManualIdEntityRepository; + @Autowired MutableEntityRepository mutableEntityRepository; + @Autowired MutableWithImmutableIdEntityRepository mutableWithImmutableIdEntityRepository; + @Autowired ImmutableWithMutableIdEntityRepository immutableWithMutableIdEntityRepository; + + @Test // GH-1199 + public void immutableEntity() { + + ImmutableEntity entity = new ImmutableEntity(null, "immutable"); + ImmutableEntity saved = immutableWithManualIdEntityRepository.save(entity); + + assertThat(saved.getId()).isNotNull(); + assertThat(saved.getName()).isEqualTo("fromBeforeSaveCallback"); + + List entities = immutableWithManualIdEntityRepository.findAll(); + assertThat(entities).hasSize(1); + ImmutableEntity reloaded = entities.get(0); + assertThat(reloaded.getId()).isNotNull(); + assertThat(reloaded.getName()).isEqualTo("fromBeforeSaveCallback"); + } + + @Test // GH-1199 + public void mutableEntity() { + + MutableEntity entity = new MutableEntity(null, "immutable"); + MutableEntity saved = mutableEntityRepository.save(entity); + + assertThat(saved.getId()).isNotNull(); + assertThat(saved.getName()).isEqualTo("fromBeforeSaveCallback"); + + List entities = mutableEntityRepository.findAll(); + assertThat(entities).hasSize(1); + MutableEntity reloaded = entities.get(0); + assertThat(reloaded.getId()).isNotNull(); + assertThat(reloaded.getName()).isEqualTo("fromBeforeSaveCallback"); + } + + @Test // GH-1199 + public void mutableWithImmutableIdEntity() { + + MutableWithImmutableIdEntity entity = new MutableWithImmutableIdEntity(null, "immutable"); + MutableWithImmutableIdEntity saved = mutableWithImmutableIdEntityRepository.save(entity); + + assertThat(saved.getId()).isNotNull(); + assertThat(saved.getName()).isEqualTo("fromBeforeSaveCallback"); + + List entities = mutableWithImmutableIdEntityRepository.findAll(); + assertThat(entities).hasSize(1); + MutableWithImmutableIdEntity reloaded = entities.get(0); + assertThat(reloaded.getId()).isNotNull(); + assertThat(reloaded.getName()).isEqualTo("fromBeforeSaveCallback"); + } + + @Test // GH-1199 + public void immutableWithMutableIdEntity() { + + ImmutableWithMutableIdEntity entity = new ImmutableWithMutableIdEntity(null, "immutable"); + ImmutableWithMutableIdEntity saved = immutableWithMutableIdEntityRepository.save(entity); + + assertThat(saved.getId()).isNotNull(); + assertThat(saved.getName()).isEqualTo("fromBeforeSaveCallback"); + + List entities = immutableWithMutableIdEntityRepository.findAll(); + assertThat(entities).hasSize(1); + ImmutableWithMutableIdEntity reloaded = entities.get(0); + assertThat(reloaded.getId()).isNotNull(); + assertThat(reloaded.getName()).isEqualTo("fromBeforeSaveCallback"); + } + + private interface ImmutableEntityRepository extends ListCrudRepository {} + + static final class ImmutableEntity { + @Id private final Long id; + private final String name; + + public ImmutableEntity(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final ImmutableEntity other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryBeforeSaveHsqlIntegrationTests.ImmutableEntity(id=" + this.getId() + ", name=" + + this.getName() + ")"; + } + + public ImmutableEntity withId(Long id) { + return this.id == id ? this : new ImmutableEntity(id, this.name); + } + + public ImmutableEntity withName(String name) { + return this.name == name ? this : new ImmutableEntity(this.id, name); + } + } + + private interface MutableEntityRepository extends ListCrudRepository {} + + static class MutableEntity { + @Id private Long id; + private String name; + + public MutableEntity(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(Long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + } + + private interface MutableWithImmutableIdEntityRepository + extends ListCrudRepository {} + + static class MutableWithImmutableIdEntity { + @Id private final Long id; + private String name; + + public MutableWithImmutableIdEntity(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + } + + private interface ImmutableWithMutableIdEntityRepository + extends ListCrudRepository {} + + static class ImmutableWithMutableIdEntity { + @Id private Long id; + private final String name; + + public ImmutableWithMutableIdEntity(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(Long id) { + this.id = id; + } + + public ImmutableWithMutableIdEntity withName(String name) { + return this.name == name ? this : new ImmutableWithMutableIdEntity(this.id, name); + } + } + + @Configuration + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(value = ListCrudRepository.class, type = FilterType.ASSIGNABLE_TYPE)) + @Import(TestConfiguration.class) + static class Config { + + /** + * {@link NamingStrategy} that harmlessly uppercases the table name, demonstrating how to inject one while not + * breaking existing SQL operations. + */ + @Bean + NamingStrategy namingStrategy() { + + return new NamingStrategy() { + + @Override + public String getTableName(Class type) { + return type.getSimpleName().toUpperCase(); + } + }; + } + + @Bean + BeforeSaveCallback nameSetterImmutable() { + return (aggregate, aggregateChange) -> aggregate.withName("fromBeforeSaveCallback"); + } + + @Bean + BeforeSaveCallback nameSetterMutable() { + return (aggregate, aggregateChange) -> { + aggregate.setName("fromBeforeSaveCallback"); + return aggregate; + }; + } + + @Bean + BeforeSaveCallback nameSetterMutableWithImmutableId() { + return (aggregate, aggregateChange) -> { + aggregate.setName("fromBeforeSaveCallback"); + return aggregate; + }; + } + + @Bean + BeforeSaveCallback nameSetterImmutableWithMutableId() { + return (aggregate, aggregateChange) -> aggregate.withName("fromBeforeSaveCallback"); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryConcurrencyIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryConcurrencyIntegrationTests.java new file mode 100644 index 0000000000..470c7fc88d --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryConcurrencyIntegrationTests.java @@ -0,0 +1,307 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import junit.framework.AssertionFailedError; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.StringJoiner; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.function.UnaryOperator; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.platform.commons.util.ExceptionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.dao.IncorrectUpdateSemanticsDataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +/** + * Tests that highly concurrent update operations of an entity don't cause deadlocks. + * + * @author Myeonghyeon Lee + * @author Jens Schauder + */ +@ExtendWith(SpringExtension.class) +public class JdbcRepositoryConcurrencyIntegrationTests { + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + TestClass testClass() { + return TestClass.of(JdbcRepositoryConcurrencyIntegrationTests.class); + } + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + } + + @Autowired + NamedParameterJdbcTemplate template; + @Autowired + DummyEntityRepository repository; + @Autowired + PlatformTransactionManager transactionManager; + + List concurrencyEntities; + DummyEntity entity; + + TransactionTemplate transactionTemplate; + List exceptions; + + @BeforeAll + public static void beforeClass() { + + Assertions.registerFormatterForType(CopyOnWriteArrayList.class, l -> { + + StringJoiner joiner = new StringJoiner(", ", "List(", ")"); + l.forEach(e -> { + + if (e instanceof Throwable) { + printThrowable(joiner, (Throwable) e); + } else { + joiner.add(e.toString()); + } + }); + + return joiner.toString(); + }); + } + + private static void printThrowable(StringJoiner joiner, Throwable t) { + + joiner.add(t + ExceptionUtils.readStackTrace(t)); + if (t.getCause() != null) { + + joiner.add("\ncaused by:\n"); + printThrowable(joiner, t.getCause()); + } + } + + @BeforeEach + public void before() { + + entity = repository.save(createDummyEntity()); + + assertThat(entity.getId()).isNotNull(); + + concurrencyEntities = createEntityStates(entity); + + transactionTemplate = new TransactionTemplate(this.transactionManager); + + exceptions = new CopyOnWriteArrayList<>(); + } + + @Test // DATAJDBC-488 + public void updateConcurrencyWithEmptyReferences() throws Exception { + + // latch for all threads to wait on. + CountDownLatch startLatch = new CountDownLatch(concurrencyEntities.size()); + // latch for main thread to wait on until all threads are done. + CountDownLatch doneLatch = new CountDownLatch(concurrencyEntities.size()); + + UnaryOperator action = e -> repository.save(e); + + concurrencyEntities.forEach(e -> executeInParallel(startLatch, doneLatch, action, e)); + + doneLatch.await(); + + DummyEntity reloaded = repository.findById(entity.id).orElseThrow(AssertionFailedError::new); + assertThat(reloaded.content).hasSize(2); + assertThat(exceptions).isEmpty(); + } + + @Test // DATAJDBC-493 + public void concurrentUpdateAndDelete() throws Exception { + + CountDownLatch startLatch = new CountDownLatch(concurrencyEntities.size() + 1); // latch for all threads to wait on. + CountDownLatch doneLatch = new CountDownLatch(concurrencyEntities.size() + 1); // latch for main thread to wait on + // until all threads are done. + UnaryOperator updateAction = e -> { + try { + return repository.save(e); + } catch (Exception ex) { + // When the delete execution is complete, the Update execution throws an + // IncorrectUpdateSemanticsDataAccessException. + if (ex.getCause() instanceof IncorrectUpdateSemanticsDataAccessException) { + return null; + } + throw ex; + } + }; + + UnaryOperator deleteAction = e -> { + repository.deleteById(entity.id); + return null; + }; + + concurrencyEntities.forEach(e -> executeInParallel(startLatch, doneLatch, updateAction, e)); + executeInParallel(startLatch, doneLatch, deleteAction, entity); + + doneLatch.await(); + + assertThat(exceptions).isEmpty(); + assertThat(repository.findById(entity.id)).isEmpty(); + } + + @Test // DATAJDBC-493 + public void concurrentUpdateAndDeleteAll() throws Exception { + + CountDownLatch startLatch = new CountDownLatch(concurrencyEntities.size() + 1); // latch for all threads to wait on. + CountDownLatch doneLatch = new CountDownLatch(concurrencyEntities.size() + 1); // latch for main thread to wait on + // until all threads are done. + + UnaryOperator updateAction = e -> { + try { + return repository.save(e); + } catch (Exception ex) { + // When the delete execution is complete, the Update execution throws an + // IncorrectUpdateSemanticsDataAccessException. + if (ex.getCause() instanceof IncorrectUpdateSemanticsDataAccessException) { + return null; + } + throw ex; + } + }; + + UnaryOperator deleteAction = e -> { + repository.deleteAll(); + return null; + }; + + concurrencyEntities.forEach(e -> executeInParallel(startLatch, doneLatch, updateAction, e)); + executeInParallel(startLatch, doneLatch, deleteAction, entity); + + doneLatch.await(); + + assertThat(exceptions).isEmpty(); + assertThat(repository.count()).isEqualTo(0); + } + + private void executeInParallel(CountDownLatch startLatch, CountDownLatch doneLatch, + UnaryOperator deleteAction, DummyEntity entity) { + // delete + new Thread(() -> { + try { + + startLatch.countDown(); + startLatch.await(); + + transactionTemplate.execute(status -> deleteAction.apply(entity)); + } catch (Exception ex) { + exceptions.add(ex); + } finally { + doneLatch.countDown(); + } + }).start(); + } + + private List createEntityStates(DummyEntity entity) { + + List concurrencyEntities = new ArrayList<>(); + Element element1 = new Element(null, 1L); + Element element2 = new Element(null, 2L); + + for (int i = 0; i < 50; i++) { + + List newContent = Arrays.asList(element1.withContent(element1.content + i + 2), + element2.withContent(element2.content + i + 2)); + + concurrencyEntities.add(entity.withName(entity.getName() + i).withContent(newContent)); + } + return concurrencyEntities; + } + + private static DummyEntity createDummyEntity() { + return new DummyEntity(null, "Entity Name", new ArrayList<>()); + } + + interface DummyEntityRepository extends CrudRepository { + } + + static class DummyEntity { + + @Id + private Long id; + String name; + final List content; + + public DummyEntity(Long id, String name, List content) { + this.id = id; + this.name = name; + this.content = content; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public List getContent() { + return this.content; + } + + public DummyEntity withName(String name) { + return this.name == name ? this : new DummyEntity(this.id, name, this.content); + } + + public DummyEntity withContent(List content) { + return this.content == content ? this : new DummyEntity(this.id, this.name, content); + } + } + + static class Element { + + @Id + private Long id; + final Long content; + + public Element(Long id, Long content) { + this.id = id; + this.content = content; + } + + public Element withContent(Long content) { + return this.content == content ? this : new Element(this.id, content); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCreateIfNotFoundLookUpStrategyTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCreateIfNotFoundLookUpStrategyTests.java new file mode 100644 index 0000000000..970a9893f0 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCreateIfNotFoundLookUpStrategyTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import org.junit.jupiter.api.Test; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.query.QueryLookupStrategy; + +/** + * Test to verify that + * @EnableJdbcRepositories(queryLookupStrategy = QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND) works as + * intended. + * + * @author Diego Krupitza + * @author Jens Schauder + */ +@IntegrationTest +class JdbcRepositoryCreateIfNotFoundLookUpStrategyTests extends AbstractJdbcRepositoryLookUpStrategyTests { + + @Test // GH-1043 + void declaredQueryShouldWork() { + onesRepository.deleteAll(); + callDeclaredQuery("D", 2, "Diego", "Daniela"); + } + + @Test // GH-1043 + void derivedQueryShouldWork() { + onesRepository.deleteAll(); + callDerivedQuery(); + } + + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, + queryLookupStrategy = QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND, + includeFilters = @ComponentScan.Filter(value = AbstractJdbcRepositoryLookUpStrategyTests.OnesRepository.class, + type = FilterType.ASSIGNABLE_TYPE)) + static class Config { + + @Bean + TestClass testClass() { + // boostrap with a different SQL init script + return TestClass.of(AbstractJdbcRepositoryLookUpStrategyTests.class); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCreateLookUpStrategyTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCreateLookUpStrategyTests.java new file mode 100644 index 0000000000..533ca7004a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCreateLookUpStrategyTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import org.junit.jupiter.api.Test; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.query.QueryLookupStrategy; + +/** + * Test to verify that @EnableJdbcRepositories(queryLookupStrategy = QueryLookupStrategy.Key.CREATE) works + * as intended. + * + * @author Diego Krupitza + * @author Jens Schauder + */ +@IntegrationTest +class JdbcRepositoryCreateLookUpStrategyTests extends AbstractJdbcRepositoryLookUpStrategyTests { + + @Test // GH-1043 + void declaredQueryShouldWork() { + onesRepository.deleteAll(); + + // here the declared query will use the derived query which does something totally different + callDeclaredQuery("D", 0); + } + + @Test // GH-1043 + void derivedQueryShouldWork() { + onesRepository.deleteAll(); + callDerivedQuery(); + } + + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, queryLookupStrategy = QueryLookupStrategy.Key.CREATE, + includeFilters = @ComponentScan.Filter(value = OnesRepository.class, type = FilterType.ASSIGNABLE_TYPE)) + static class Config { + + @Bean + TestClass testClass() { + // boostrap with a different SQL init script + return TestClass.of(AbstractJdbcRepositoryLookUpStrategyTests.class); + } + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCrossAggregateHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCrossAggregateHsqlIntegrationTests.java new file mode 100644 index 0000000000..1c52709796 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCrossAggregateHsqlIntegrationTests.java @@ -0,0 +1,112 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Very simple use cases for creation and usage of JdbcRepositories. + * + * @author Jens Schauder + * @author Salim Achouche + * @author Salim Achouche + */ + +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class JdbcRepositoryCrossAggregateHsqlIntegrationTests { + + private static final long TWO_ID = 23L; + + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(value = Ones.class, type = FilterType.ASSIGNABLE_TYPE)) + static class Config { + + } + + @Autowired NamedParameterJdbcTemplate template; + @Autowired Ones ones; + @Autowired RelationalMappingContext context; + + @SuppressWarnings("ConstantConditions") + @Test // DATAJDBC-221 + public void savesAndRead() { + + AggregateOne one = new AggregateOne(); + one.name = "Aggregate - 1"; + one.two = AggregateReference.to(TWO_ID); + + one = ones.save(one); + + AggregateOne reloaded = ones.findById(one.id).get(); + assertThat(reloaded.two.getId()).isEqualTo(TWO_ID); + } + + @Test // DATAJDBC-221 + public void savesAndUpdate() { + + AggregateOne one = new AggregateOne(); + one.name = "Aggregate - 1"; + one.two = AggregateReference.to(42L); + one = ones.save(one); + + one.two = AggregateReference.to(TWO_ID); + + ones.save(one); + + assertThat( // + JdbcTestUtils.countRowsInTableWhere( // + template.getJdbcOperations(), // + "aggregate_one", // + "two = " + TWO_ID) // + ).isEqualTo(1); + } + + interface Ones extends CrudRepository {} + + static class AggregateOne { + + @Id Long id; + String name; + AggregateReference two; + } + + static class AggregateTwo { + + @Id Long id; + String name; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCustomConversionIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCustomConversionIntegrationTests.java new file mode 100644 index 0000000000..d8f823f12d --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryCustomConversionIntegrationTests.java @@ -0,0 +1,299 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; + +import java.math.BigDecimal; +import java.sql.JDBCType; +import java.util.Date; +import java.util.List; +import java.util.Set; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; +import org.springframework.data.jdbc.core.mapping.JdbcValue; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; + +/** + * Tests storing and retrieving data types that get processed by custom conversions. + * + * @author Jens Schauder + * @author Sanghyuk Jung + * @author Chirag Tailor + */ +@IntegrationTest +public class JdbcRepositoryCustomConversionIntegrationTests { + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + EntityWithStringyBigDecimalRepository repository(JdbcRepositoryFactory factory) { + return factory.getRepository(EntityWithStringyBigDecimalRepository.class); + } + + @Bean + JdbcCustomConversions jdbcCustomConversions() { + return new JdbcCustomConversions(asList(StringToBigDecimalConverter.INSTANCE, BigDecimalToString.INSTANCE, + CustomIdReadingConverter.INSTANCE, CustomIdWritingConverter.INSTANCE, DirectionToIntegerConverter.INSTANCE, + NumberToDirectionConverter.INSTANCE, IntegerToDirectionConverter.INSTANCE)); + } + } + + @Autowired EntityWithStringyBigDecimalRepository repository; + + /** + * In PostrgreSQL this fails if a simple converter like the following is used. + * + *
+	 *
+	 * @WritingConverter
+	 * enum PlainStringToBigDecimalConverter implements Converter {
+	 *
+	 * 	INSTANCE;
+	 *
+	 * 	@Override
+	 * 	@Nullable
+	 * 	public BigDecimal convert(String source) {
+	 *
+	 * 		return source == new BigDecimal(source);
+	 * 	}
+	 * }
+	 * 
+ */ + @Test // DATAJDBC-327 + public void saveAndLoadAnEntity() { + + EntityWithStringyBigDecimal entity = new EntityWithStringyBigDecimal(); + entity.stringyNumber = "123456.78912"; + + repository.save(entity); + + EntityWithStringyBigDecimal reloaded = repository.findById(entity.id).get(); + + // loading the number from the database might result in additional zeros at the end. + String stringyNumber = reloaded.stringyNumber; + assertThat(stringyNumber).startsWith(entity.stringyNumber); + assertThat(stringyNumber.substring(entity.stringyNumber.length())).matches("0*"); + } + + @Test // DATAJDBC-412 + public void saveAndLoadAnEntityWithReference() { + + EntityWithStringyBigDecimal entity = new EntityWithStringyBigDecimal(); + entity.stringyNumber = "123456.78912"; + entity.reference = new OtherEntity(); + entity.reference.created = new Date(); + + repository.save(entity); + + EntityWithStringyBigDecimal reloaded = repository.findById(entity.id).get(); + + // loading the number from the database might result in additional zeros at the end. + assertSoftly(softly -> { + String stringyNumber = reloaded.stringyNumber; + softly.assertThat(stringyNumber).startsWith(entity.stringyNumber); + softly.assertThat(stringyNumber.substring(entity.stringyNumber.length())).matches("0*"); + + softly.assertThat(entity.id.value).isNotNull(); + softly.assertThat(reloaded.id.value).isEqualTo(entity.id.value); + + softly.assertThat(entity.reference.id.value).isNotNull(); + softly.assertThat(reloaded.reference.id.value).isEqualTo(entity.reference.id.value); + }); + } + + @Test // GH-1212 + void queryByEnumTypeIn() { + + EntityWithStringyBigDecimal entityA = new EntityWithStringyBigDecimal(); + entityA.direction = Direction.LEFT; + EntityWithStringyBigDecimal entityB = new EntityWithStringyBigDecimal(); + entityB.direction = Direction.CENTER; + EntityWithStringyBigDecimal entityC = new EntityWithStringyBigDecimal(); + entityC.direction = Direction.RIGHT; + repository.saveAll(asList(entityA, entityB, entityC)); + + assertThat(repository.findByEnumTypeIn(Set.of(Direction.LEFT, Direction.RIGHT))) + .extracting(entity -> entity.direction).containsExactlyInAnyOrder(Direction.LEFT, Direction.RIGHT); + } + + @Test // GH-1212 + void queryByEnumTypeEqual() { + + EntityWithStringyBigDecimal entityA = new EntityWithStringyBigDecimal(); + entityA.direction = Direction.LEFT; + EntityWithStringyBigDecimal entityB = new EntityWithStringyBigDecimal(); + entityB.direction = Direction.CENTER; + EntityWithStringyBigDecimal entityC = new EntityWithStringyBigDecimal(); + entityC.direction = Direction.RIGHT; + repository.saveAll(asList(entityA, entityB, entityC)); + + assertThat(repository.findByEnumTypeIn(Set.of(Direction.CENTER))).extracting(entity -> entity.direction) + .containsExactly(Direction.CENTER); + } + + interface EntityWithStringyBigDecimalRepository extends CrudRepository { + + @Query("SELECT * FROM ENTITY_WITH_STRINGY_BIG_DECIMAL WHERE DIRECTION IN (:types)") + List findByEnumTypeIn(Set types); + + @Query("SELECT * FROM ENTITY_WITH_STRINGY_BIG_DECIMAL WHERE DIRECTION = :type") + List findByEnumType(Direction type); + } + + private static class EntityWithStringyBigDecimal { + + @Id CustomId id; + String stringyNumber = "1.0"; + OtherEntity reference; + Direction direction = Direction.CENTER; + } + + private static class CustomId { + + private final Long value; + + CustomId(Long value) { + this.value = value; + } + } + + private static class OtherEntity { + + @Id CustomId id; + Date created; + } + + enum Direction { + LEFT, CENTER, RIGHT + } + + @WritingConverter + enum StringToBigDecimalConverter implements Converter { + + INSTANCE; + + @Override + public JdbcValue convert(String source) { + + Object value = new BigDecimal(source); + return JdbcValue.of(value, JDBCType.DECIMAL); + } + } + + @ReadingConverter + enum BigDecimalToString implements Converter { + + INSTANCE; + + @Override + public String convert(BigDecimal source) { + + return source.toString(); + } + } + + @WritingConverter + enum CustomIdWritingConverter implements Converter { + + INSTANCE; + + @Override + public Number convert(CustomId source) { + return source.value.intValue(); + } + } + + @ReadingConverter + enum CustomIdReadingConverter implements Converter { + + INSTANCE; + + @Override + public CustomId convert(Number source) { + return new CustomId(source.longValue()); + } + } + + @WritingConverter + enum DirectionToIntegerConverter implements Converter { + + INSTANCE; + + @Override + public JdbcValue convert(Direction source) { + + int integer = switch (source) { + case LEFT -> -1; + case CENTER -> 0; + case RIGHT -> 1; + }; + return JdbcValue.of(integer, JDBCType.INTEGER); + } + } + + @ReadingConverter // Needed for Oracle since the JDBC driver returns BigDecimal on read + enum NumberToDirectionConverter implements Converter { + + INSTANCE; + + @Override + public Direction convert(Number source) { + int sourceAsInt = source.intValue(); + if (sourceAsInt == 0) { + return Direction.CENTER; + } else if (sourceAsInt < 0) { + return Direction.LEFT; + } else { + return Direction.RIGHT; + } + } + } + + @ReadingConverter + enum IntegerToDirectionConverter implements Converter { + + INSTANCE; + + @Override + public Direction convert(Integer source) { + if (source == 0) { + return Direction.CENTER; + } else if (source < 0) { + return Direction.LEFT; + } else { + return Direction.RIGHT; + } + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryDeclaredLookUpStrategyTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryDeclaredLookUpStrategyTests.java new file mode 100644 index 0000000000..cc31bbbd4a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryDeclaredLookUpStrategyTests.java @@ -0,0 +1,54 @@ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.query.QueryLookupStrategy; + +/** + * Test to verify that + * @EnableJdbcRepositories(queryLookupStrategy = QueryLookupStrategy.Key.USE_DECLARED_QUERY) works as + * intended. + * + * @author Diego Krupitza + */ +class JdbcRepositoryDeclaredLookUpStrategyTests extends AbstractJdbcRepositoryLookUpStrategyTests { + + @Test // GH-1043 + void contextCannotByCreatedDueToFindByNameNotDeclaredQuery() { + + try (AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext()) { + + context.register(JdbcRepositoryDeclaredLookUpStrategyTests.Config.class); + + assertThatThrownBy(() -> { + context.refresh(); + context.getBean(OnesRepository.class); + }).hasMessageContaining("findByName"); + } + } + + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, + queryLookupStrategy = QueryLookupStrategy.Key.USE_DECLARED_QUERY, + includeFilters = @ComponentScan.Filter(value = AbstractJdbcRepositoryLookUpStrategyTests.OnesRepository.class, + type = FilterType.ASSIGNABLE_TYPE)) + static class Config { + + // use a different SQL script to bootstrap the test class. + @Bean + TestClass testClass() { + return TestClass.of(JdbcRepositoryIntegrationTests.class); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedHsqlIntegrationTests.java new file mode 100644 index 0000000000..fcb5b24204 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedHsqlIntegrationTests.java @@ -0,0 +1,407 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.repository.ListCrudRepository; +import org.springframework.data.repository.PagingAndSortingRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Very simple use cases for creation and usage of JdbcRepositories with test {@link Embedded} annotation in Entities. + * + * @author Bastian Wilhelm + * @author Christoph Strobl + * @author Mikhail Polivakha + * @author Jens Schauder + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class JdbcRepositoryEmbeddedHsqlIntegrationTests { + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + @Bean + PersonRepository personRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(PersonRepository.class); + } + + @Bean + WithDotColumnRepo withDotColumnRepo(JdbcRepositoryFactory factory) { + return factory.getRepository(WithDotColumnRepo.class); + } + + @Bean + WithDotEmbeddedRepo withDotEmbeddedRepo(JdbcRepositoryFactory factory) { + return factory.getRepository(WithDotEmbeddedRepo.class); + } + + } + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + @Autowired PersonRepository personRepository; + @Autowired WithDotColumnRepo withDotColumnRepo; + @Autowired WithDotEmbeddedRepo withDotEmbeddedRepo; + + @Test // DATAJDBC-111 + void savesAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(JdbcTestUtils.countRowsInTableWhere(template.getJdbcOperations(), "dummy_entity", + "id = " + entity.getId())).isEqualTo(1); + } + + @Test // DATAJDBC-111 + void saveAndLoadAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getId()).isEqualTo(entity.getId()); + assertThat(it.getPrefixedEmbeddable().getTest()).isEqualTo(entity.getPrefixedEmbeddable().getTest()); + assertThat(it.getPrefixedEmbeddable().getEmbeddable().getAttr()) + .isEqualTo(entity.getPrefixedEmbeddable().getEmbeddable().getAttr()); + assertThat(it.getEmbeddable().getTest()).isEqualTo(entity.getEmbeddable().getTest()); + assertThat(it.getEmbeddable().getEmbeddable().getAttr()) + .isEqualTo(entity.getEmbeddable().getEmbeddable().getAttr()); + }); + } + + @Test // DATAJDBC-111 + void findAllFindsAllEntities() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + Iterable all = repository.findAll(); + + assertThat(all)// + .extracting(DummyEntity::getId)// + .containsExactlyInAnyOrder(entity.getId(), other.getId()); + } + + @Test // GH-1676 + void findAllFindsAllEntitiesWithOnlyReferenceNotNull() { + + DummyEntity entity = createDummyEntity(); + entity.prefixedEmbeddable.test = null; + entity = repository.save(entity); + DummyEntity other = repository.save(createDummyEntity()); + + Iterable all = repository.findAll(); + + assertThat(all)// + .extracting(DummyEntity::getId)// + .containsExactlyInAnyOrder(entity.getId(), other.getId()); + } + + @Test // DATAJDBC-111 + void findByIdReturnsEmptyWhenNoneFound() { + + // NOT saving anything, so DB is empty + assertThat(repository.findById(-1L)).isEmpty(); + } + + @Test // DATAJDBC-111 + void update() { + + DummyEntity entity = repository.save(createDummyEntity()); + + entity.getPrefixedEmbeddable().setTest("something else"); + entity.getPrefixedEmbeddable().getEmbeddable().setAttr(3L); + DummyEntity saved = repository.save(entity); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getPrefixedEmbeddable().getTest()).isEqualTo(saved.getPrefixedEmbeddable().getTest()); + assertThat(it.getPrefixedEmbeddable().getEmbeddable().getAttr()) + .isEqualTo(saved.getPrefixedEmbeddable().getEmbeddable().getAttr()); + }); + } + + @Test // DATAJDBC-111 + void updateMany() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + entity.getEmbeddable().setTest("something else"); + other.getEmbeddable().setTest("others Name"); + + entity.getPrefixedEmbeddable().getEmbeddable().setAttr(3L); + other.getPrefixedEmbeddable().getEmbeddable().setAttr(5L); + + repository.saveAll(asList(entity, other)); + + assertThat(repository.findAll()) // + .extracting(d -> d.getEmbeddable().getTest()) // + .containsExactlyInAnyOrder(entity.getEmbeddable().getTest(), other.getEmbeddable().getTest()); + + assertThat(repository.findAll()) // + .extracting(d -> d.getPrefixedEmbeddable().getEmbeddable().getAttr()) // + .containsExactlyInAnyOrder(entity.getPrefixedEmbeddable().getEmbeddable().getAttr(), + other.getPrefixedEmbeddable().getEmbeddable().getAttr()); + } + + @Test // DATAJDBC-111 + void deleteById() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteById(two.getId()); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(one.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + void deleteByEntity() { + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.delete(one); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + void deleteByList() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteAll(asList(one, three)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId()); + } + + @Test // DATAJDBC-111 + void deleteAll() { + + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + + assertThat(repository.findAll()).isNotEmpty(); + + repository.deleteAll(); + + assertThat(repository.findAll()).isEmpty(); + } + + @Test // DATAJDBC-370 + void saveWithNullValueEmbeddable() { + + DummyEntity entity = repository.save(new DummyEntity()); + + assertThat(JdbcTestUtils.countRowsInTableWhere(template.getJdbcOperations(), "dummy_entity", + "id = " + entity.getId())).isEqualTo(1); + } + + @Test // GH-1286 + void findOrderedByEmbeddedProperty() { + + Person first = new Person(null, "Bob", "Seattle", new PersonContacts("ddd@example.com", "+1 111 1111 11 11")); + Person second = new Person(null, "Alex", "LA", new PersonContacts("aaa@example.com", "+2 222 2222 22 22")); + Person third = new Person(null, "Sarah", "NY", new PersonContacts("ggg@example.com", "+3 333 3333 33 33")); + + List people = (List)personRepository.saveAll(List.of(first, second, third)); + + Iterable fetchedPersons = personRepository + .findAll(Sort.by(new Sort.Order(Sort.Direction.ASC, "personContacts.email"))); + + Assertions.assertThat(fetchedPersons).containsExactly(people.get(1), people.get(0), people.get(2)); + } + + @Test // GH-1286 + void sortingWorksCorrectlyIfColumnHasDotInItsName() { + + WithDotColumn first = new WithDotColumn(null, "Salt Lake City"); + WithDotColumn second = new WithDotColumn(null, "Istanbul"); + WithDotColumn third = new WithDotColumn(null, "Tokyo"); + + List saved = (List) withDotColumnRepo.saveAll(List.of(first, second, third)); + + Iterable fetchedPersons = withDotColumnRepo + .findAll(Sort.by(new Sort.Order(Sort.Direction.ASC, "address"))); + + Assertions.assertThat(fetchedPersons).containsExactly(saved.get(1), saved.get(0), saved.get(2)); + } + + @Test // GH-1565 + void saveAndLoadEmbeddedWithDottedPrefix() { + WithDotEmbedded entity = withDotEmbeddedRepo.save( + new WithDotEmbedded(null, new PersonContacts("jens@jens.de", "123456789"))); + + WithDotEmbedded reloaded = withDotEmbeddedRepo.findById(entity.id).orElseThrow(); + + assertThat(reloaded).isEqualTo(entity); + } + + private static DummyEntity createDummyEntity() { + DummyEntity entity = new DummyEntity(); + + final CascadedEmbeddable prefixedCascadedEmbeddable = new CascadedEmbeddable(); + prefixedCascadedEmbeddable.setTest("c1"); + + final Embeddable embeddable1 = new Embeddable(); + embeddable1.setAttr(1L); + prefixedCascadedEmbeddable.setEmbeddable(embeddable1); + + entity.setPrefixedEmbeddable(prefixedCascadedEmbeddable); + + final CascadedEmbeddable cascadedEmbeddable = new CascadedEmbeddable(); + cascadedEmbeddable.setTest("c2"); + + final Embeddable embeddable2 = new Embeddable(); + embeddable2.setAttr(2L); + cascadedEmbeddable.setEmbeddable(embeddable2); + + entity.setEmbeddable(cascadedEmbeddable); + + return entity; + } + + interface DummyEntityRepository extends CrudRepository {} + + interface PersonRepository extends PagingAndSortingRepository, CrudRepository {} + + interface WithDotColumnRepo + extends PagingAndSortingRepository, CrudRepository {} + + record WithDotColumn(@Id Integer id, @Column("address.city") String address) { + } + + record WithDotEmbedded(@Id Integer id, @Embedded.Nullable(prefix = "prefix.") PersonContacts contact) { + } + + interface WithDotEmbeddedRepo extends ListCrudRepository {} + + @Table("SORT_EMBEDDED_ENTITY") + record Person(@Id Long id, String firstName, String address, @Embedded.Nullable PersonContacts personContacts) { + } + + record PersonContacts(String email, String phoneNumber) { + } + + + static class DummyEntity { + + @Id Long id; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "PREFIX_") CascadedEmbeddable prefixedEmbeddable; + + @Embedded(onEmpty = OnEmpty.USE_NULL) CascadedEmbeddable embeddable; + + public Long getId() { + return this.id; + } + + public CascadedEmbeddable getPrefixedEmbeddable() { + return this.prefixedEmbeddable; + } + + public CascadedEmbeddable getEmbeddable() { + return this.embeddable; + } + + public void setId(Long id) { + this.id = id; + } + + public void setPrefixedEmbeddable(CascadedEmbeddable prefixedEmbeddable) { + this.prefixedEmbeddable = prefixedEmbeddable; + } + + public void setEmbeddable(CascadedEmbeddable embeddable) { + this.embeddable = embeddable; + } + } + + static class CascadedEmbeddable { + String test; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "PREFIX2_") Embeddable embeddable; + + public String getTest() { + return this.test; + } + + public Embeddable getEmbeddable() { + return this.embeddable; + } + + public void setTest(String test) { + this.test = test; + } + + public void setEmbeddable(Embeddable embeddable) { + this.embeddable = embeddable; + } + } + + static class Embeddable { + Long attr; + + public Long getAttr() { + return this.attr; + } + + public void setAttr(Long attr) { + this.attr = attr; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedImmutableIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedImmutableIntegrationTests.java new file mode 100644 index 0000000000..a4b8959736 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedImmutableIntegrationTests.java @@ -0,0 +1,186 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Very simple use cases for creation and usage of JdbcRepositories with {@link Embedded} annotation in Entities. + * + * @author Bastian Wilhelm + */ +@IntegrationTest +public class JdbcRepositoryEmbeddedImmutableIntegrationTests { + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + } + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + + @Test // DATAJDBC-111 + public void saveAndLoadAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getId()).isEqualTo(entity.getId()); + assertThat(it.getPrefixedEmbeddable().getAttr1()).isEqualTo(entity.getPrefixedEmbeddable().getAttr1()); + assertThat(it.getPrefixedEmbeddable().getAttr2()).isEqualTo(entity.getPrefixedEmbeddable().getAttr2()); + }); + } + + private static DummyEntity createDummyEntity() { + return new DummyEntity(null, new Embeddable(1L, "test1")); + } + + interface DummyEntityRepository extends CrudRepository {} + + static final class DummyEntity { + + @Id + private final Long id; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "PREFIX_") + private final Embeddable prefixedEmbeddable; + + public DummyEntity(Long id, Embeddable prefixedEmbeddable) { + this.id = id; + this.prefixedEmbeddable = prefixedEmbeddable; + } + + public Long getId() { + return this.id; + } + + public Embeddable getPrefixedEmbeddable() { + return this.prefixedEmbeddable; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final DummyEntity other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$prefixedEmbeddable = this.getPrefixedEmbeddable(); + final Object other$prefixedEmbeddable = other.getPrefixedEmbeddable(); + return Objects.equals(this$prefixedEmbeddable, other$prefixedEmbeddable); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $prefixedEmbeddable = this.getPrefixedEmbeddable(); + result = result * PRIME + ($prefixedEmbeddable == null ? 43 : $prefixedEmbeddable.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryEmbeddedImmutableIntegrationTests.DummyEntity(id=" + this.getId() + ", prefixedEmbeddable=" + this.getPrefixedEmbeddable() + ")"; + } + + public DummyEntity withId(Long id) { + return this.id == id ? this : new DummyEntity(id, this.prefixedEmbeddable); + } + + public DummyEntity withPrefixedEmbeddable(Embeddable prefixedEmbeddable) { + return this.prefixedEmbeddable == prefixedEmbeddable ? this : new DummyEntity(this.id, prefixedEmbeddable); + } + } + + private static final class Embeddable { + + private final Long attr1; + private final String attr2; + + public Embeddable(Long attr1, String attr2) { + this.attr1 = attr1; + this.attr2 = attr2; + } + + public Long getAttr1() { + return this.attr1; + } + + public String getAttr2() { + return this.attr2; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof final Embeddable other)) + return false; + final Object this$attr1 = this.getAttr1(); + final Object other$attr1 = other.getAttr1(); + if (!Objects.equals(this$attr1, other$attr1)) + return false; + final Object this$attr2 = this.getAttr2(); + final Object other$attr2 = other.getAttr2(); + return Objects.equals(this$attr2, other$attr2); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $attr1 = this.getAttr1(); + result = result * PRIME + ($attr1 == null ? 43 : $attr1.hashCode()); + final Object $attr2 = this.getAttr2(); + result = result * PRIME + ($attr2 == null ? 43 : $attr2.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryEmbeddedImmutableIntegrationTests.Embeddable(attr1=" + this.getAttr1() + ", attr2=" + this.getAttr2() + ")"; + } + + public Embeddable withAttr1(Long attr1) { + return this.attr1 == attr1 ? this : new Embeddable(attr1, this.attr2); + } + + public Embeddable withAttr2(String attr2) { + return this.attr2 == attr2 ? this : new Embeddable(this.attr1, attr2); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests.java new file mode 100644 index 0000000000..0d4ea9e940 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests.java @@ -0,0 +1,310 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; + +import java.sql.SQLException; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Very simple use cases for creation and usage of JdbcRepositories with test {@link Embedded} annotation in Entities. + * + * @author Bastian Wilhelm + */ +@IntegrationTest +public class JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests { + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + @Autowired Dialect dialect; + + private static DummyEntity createDummyEntity() { + DummyEntity entity = new DummyEntity(); + + entity.setTest("rootTest"); + + final DummyEntity2 dummyEntity2 = new DummyEntity2(); + dummyEntity2.setTest("c1"); + + final Embeddable embeddable = new Embeddable(); + embeddable.setAttr(1L); + dummyEntity2.setEmbeddable(embeddable); + + entity.setDummyEntity2(dummyEntity2); + + return entity; + } + + @Test // DATAJDBC-111 + public void savesAnEntity() throws SQLException { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(countRowsInTable("dummy_entity", entity.getId())).isEqualTo(1); + assertThat(countRowsInTable("dummy_entity2", entity.getId())).isEqualTo(1); + } + + private int countRowsInTable(String name, long idValue) { + + SqlIdentifier id = SqlIdentifier.quoted("ID"); + String whereClause = id.toSql(dialect.getIdentifierProcessing()) + " = " + idValue; + + return JdbcTestUtils.countRowsInTableWhere(template.getJdbcOperations(), name, whereClause); + } + + @Test // DATAJDBC-111 + public void saveAndLoadAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getId()).isEqualTo(entity.getId()); + assertThat(it.getDummyEntity2().getTest()).isEqualTo(entity.getDummyEntity2().getTest()); + assertThat(it.getDummyEntity2().getEmbeddable().getAttr()) + .isEqualTo(entity.getDummyEntity2().getEmbeddable().getAttr()); + }); + } + + @Test // DATAJDBC-111 + public void findAllFindsAllEntities() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + Iterable all = repository.findAll(); + + assertThat(all)// + .extracting(DummyEntity::getId)// + .containsExactlyInAnyOrder(entity.getId(), other.getId()); + } + + @Test // DATAJDBC-111 + public void findByIdReturnsEmptyWhenNoneFound() { + + // NOT saving anything, so DB is empty + assertThat(repository.findById(-1L)).isEmpty(); + } + + @Test // DATAJDBC-111 + public void update() { + + DummyEntity entity = repository.save(createDummyEntity()); + + entity.getDummyEntity2().setTest("something else"); + entity.getDummyEntity2().getEmbeddable().setAttr(3L); + DummyEntity saved = repository.save(entity); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getDummyEntity2().getTest()).isEqualTo(saved.getDummyEntity2().getTest()); + assertThat(it.getDummyEntity2().getEmbeddable().getAttr()) + .isEqualTo(saved.getDummyEntity2().getEmbeddable().getAttr()); + }); + } + + @Test // DATAJDBC-111 + public void updateMany() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + entity.getDummyEntity2().setTest("something else"); + other.getDummyEntity2().setTest("others Name"); + + entity.getDummyEntity2().getEmbeddable().setAttr(3L); + other.getDummyEntity2().getEmbeddable().setAttr(5L); + + repository.saveAll(asList(entity, other)); + + assertThat(repository.findAll()) // + .extracting(d -> d.getDummyEntity2().getTest()) // + .containsExactlyInAnyOrder(entity.getDummyEntity2().getTest(), other.getDummyEntity2().getTest()); + + assertThat(repository.findAll()) // + .extracting(d -> d.getDummyEntity2().getEmbeddable().getAttr()) // + .containsExactlyInAnyOrder(entity.getDummyEntity2().getEmbeddable().getAttr(), + other.getDummyEntity2().getEmbeddable().getAttr()); + } + + @Test // DATAJDBC-111 + public void deleteById() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteById(two.getId()); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(one.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + public void deleteByEntity() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.delete(one); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + public void deleteByList() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteAll(asList(one, three)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId()); + } + + @Test // DATAJDBC-111 + public void deleteAll() { + + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + + assertThat(repository.findAll()).isNotEmpty(); + + repository.deleteAll(); + + assertThat(repository.findAll()).isEmpty(); + } + + interface DummyEntityRepository extends CrudRepository {} + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + } + + static class DummyEntity { + @Column("ID") + @Id + Long id; + + String test; + + @Column("ID") + DummyEntity2 dummyEntity2; + + public Long getId() { + return this.id; + } + + public String getTest() { + return this.test; + } + + public DummyEntity2 getDummyEntity2() { + return this.dummyEntity2; + } + + public void setId(Long id) { + this.id = id; + } + + public void setTest(String test) { + this.test = test; + } + + public void setDummyEntity2(DummyEntity2 dummyEntity2) { + this.dummyEntity2 = dummyEntity2; + } + } + + static class DummyEntity2 { + @Column("ID") + @Id + Long id; + + String test; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix_") + Embeddable embeddable; + + public Long getId() { + return this.id; + } + + public String getTest() { + return this.test; + } + + public Embeddable getEmbeddable() { + return this.embeddable; + } + + public void setId(Long id) { + this.id = id; + } + + public void setTest(String test) { + this.test = test; + } + + public void setEmbeddable(Embeddable embeddable) { + this.embeddable = embeddable; + } + } + + static class Embeddable { + Long attr; + + public Long getAttr() { + return this.attr; + } + + public void setAttr(Long attr) { + this.attr = attr; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests.java new file mode 100644 index 0000000000..4e566f054c --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests.java @@ -0,0 +1,336 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Very simple use cases for creation and usage of JdbcRepositories with test {@link Embedded} annotation in Entities. + * + * @author Bastian Wilhelm + * @author Yunyoung LEE + * @author Nikita Konev + */ +@IntegrationTest +class JdbcRepositoryEmbeddedWithCollectionIntegrationTests { + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + } + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + @Autowired Dialect dialect; + + @Test // DATAJDBC-111 + void savesAnEntity() throws SQLException { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(countRowsInTable("dummy_entity", entity.getId(), "ID")).isEqualTo(1); + assertThat(countRowsInTable("dummy_entity2", entity.getId(), "DUMMY_ID")).isEqualTo(2); + } + + private int countRowsInTable(String name, long idValue, String idColumnName) { + + SqlIdentifier id = SqlIdentifier.quoted(idColumnName); + String whereClause = id.toSql(dialect.getIdentifierProcessing()) + " = " + idValue; + + return JdbcTestUtils.countRowsInTableWhere(template.getJdbcOperations(), name, whereClause); + } + + @Test // DATAJDBC-111 + void saveAndLoadAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getId()).isEqualTo(entity.getId()); + assertThat(it.getEmbeddable().getTest()).isEqualTo(entity.getEmbeddable().getTest()); + assertThat(it.getEmbeddable().getList().size()).isEqualTo(entity.getEmbeddable().getList().size()); + assertThat(it.getEmbeddable().getList().get(0).getTest()) + .isEqualTo(entity.getEmbeddable().getList().get(0).getTest()); + assertThat(it.getEmbeddable().getList().get(1).getTest()) + .isEqualTo(entity.getEmbeddable().getList().get(1).getTest()); + }); + } + + @Test // DATAJDBC-111 + void findAllFindsAllEntities() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + Iterable all = repository.findAll(); + + assertThat(all)// + .extracting(DummyEntity::getId)// + .containsExactlyInAnyOrder(entity.getId(), other.getId()); + } + + @Test // DATAJDBC-111 + void findByIdReturnsEmptyWhenNoneFound() { + + // NOT saving anything, so DB is empty + assertThat(repository.findById(-1L)).isEmpty(); + } + + @Test // DATAJDBC-111 + void update() { + + DummyEntity entity = repository.save(createDummyEntity()); + + entity.getEmbeddable().setTest("something else"); + entity.getEmbeddable().getList().get(0).setTest("another"); + DummyEntity saved = repository.save(entity); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getId()).isEqualTo(saved.getId()); + assertThat(it.getEmbeddable().getTest()).isEqualTo(saved.getEmbeddable().getTest()); + assertThat(it.getEmbeddable().getList().size()).isEqualTo(saved.getEmbeddable().getList().size()); + assertThat(it.getEmbeddable().getList().get(0).getTest()) + .isEqualTo(saved.getEmbeddable().getList().get(0).getTest()); + assertThat(it.getEmbeddable().getList().get(1).getTest()) + .isEqualTo(saved.getEmbeddable().getList().get(1).getTest()); + }); + } + + @Test // DATAJDBC-111 + void updateMany() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + entity.getEmbeddable().setTest("something else"); + other.getEmbeddable().setTest("others Name"); + + entity.getEmbeddable().getList().get(0).setTest("else"); + other.getEmbeddable().getList().get(0).setTest("Name"); + + repository.saveAll(asList(entity, other)); + + assertThat(repository.findAll()) // + .extracting(d -> d.getEmbeddable().getTest()) // + .containsExactlyInAnyOrder(entity.getEmbeddable().getTest(), other.getEmbeddable().getTest()); + + assertThat(repository.findAll()) // + .extracting(d -> d.getEmbeddable().getList().get(0).getTest()) // + .containsExactlyInAnyOrder(entity.getEmbeddable().getList().get(0).getTest(), + other.getEmbeddable().getList().get(0).getTest()); + } + + @Test // DATAJDBC-111 + void deleteById() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteById(two.getId()); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(one.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + void deleteByEntity() { + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.delete(one); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + void deleteByList() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteAll(asList(one, three)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId()); + } + + @Test // DATAJDBC-111 + void deleteAll() { + + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + + assertThat(repository.findAll()).isNotEmpty(); + + repository.deleteAll(); + + assertThat(repository.findAll()).isEmpty(); + } + + @Test // GH-771 + void deleteBy() { + + DummyEntity one = repository.save(createDummyEntity("root1")); + DummyEntity two = repository.save(createDummyEntity("root2")); + DummyEntity three = repository.save(createDummyEntity("root3")); + + assertThat(repository.deleteByTest(two.getTest())).isEqualTo(1); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(one.getId(), three.getId()); + + Long count = template.queryForObject("select count(1) from dummy_entity2", Collections.emptyMap(), Long.class); + assertThat(count).isEqualTo(4); + + } + + private static DummyEntity createDummyEntity() { + return createDummyEntity("root"); + } + + private static DummyEntity createDummyEntity(String test) { + + DummyEntity entity = new DummyEntity(); + entity.setTest(test); + + final Embeddable embeddable = new Embeddable(); + embeddable.setTest("embedded"); + + final DummyEntity2 dummyEntity21 = new DummyEntity2(); + dummyEntity21.setTest("entity1"); + + final DummyEntity2 dummyEntity22 = new DummyEntity2(); + dummyEntity22.setTest("entity2"); + + embeddable.getList().add(dummyEntity21); + embeddable.getList().add(dummyEntity22); + + entity.setEmbeddable(embeddable); + + return entity; + } + + interface DummyEntityRepository extends CrudRepository { + int deleteByTest(String test); + } + + private static class DummyEntity { + @Column("ID") + @Id Long id; + + String test; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "PREFIX_") Embeddable embeddable; + + public Long getId() { + return this.id; + } + + public String getTest() { + return this.test; + } + + public Embeddable getEmbeddable() { + return this.embeddable; + } + + public void setId(Long id) { + this.id = id; + } + + public void setTest(String test) { + this.test = test; + } + + public void setEmbeddable(Embeddable embeddable) { + this.embeddable = embeddable; + } + } + + private static class Embeddable { + @MappedCollection(idColumn = "DUMMY_ID", keyColumn = "ORDER_KEY") List list = new ArrayList<>(); + + String test; + + public List getList() { + return this.list; + } + + public String getTest() { + return this.test; + } + + public void setList(List list) { + this.list = list; + } + + public void setTest(String test) { + this.test = test; + } + } + + private static class DummyEntity2 { + String test; + + public String getTest() { + return this.test; + } + + public void setTest(String test) { + this.test = test; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests.java new file mode 100644 index 0000000000..a39a8bf96c --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests.java @@ -0,0 +1,356 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Very simple use cases for creation and usage of JdbcRepositories with test {@link Embedded} annotation in Entities. + * + * @author Bastian Wilhelm + * @author Jens Schauder + */ +@IntegrationTest +public class JdbcRepositoryEmbeddedWithReferenceIntegrationTests { + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + @Autowired Dialect dialect; + + private static DummyEntity createDummyEntity() { + + DummyEntity entity = new DummyEntity(); + entity.setTest("root"); + + final Embeddable embeddable = new Embeddable(); + embeddable.setTest("embedded"); + + final DummyEntity2 dummyEntity2 = new DummyEntity2(); + dummyEntity2.setTest("entity"); + + embeddable.setDummyEntity2(dummyEntity2); + + entity.setEmbeddable(embeddable); + + return entity; + } + + @Test // DATAJDBC-111 + public void savesAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(countRowsInTable("dummy_entity", entity.getId())).isEqualTo(1); + assertThat(countRowsInTable("dummy_entity2", entity.getId())).isEqualTo(1); + } + + private int countRowsInTable(String name, long idValue) { + + SqlIdentifier id = SqlIdentifier.quoted("ID"); + String whereClause = id.toSql(dialect.getIdentifierProcessing()) + " = " + idValue; + + return JdbcTestUtils.countRowsInTableWhere(template.getJdbcOperations(), name, whereClause); + } + + @Test // DATAJDBC-111 + public void saveAndLoadAnEntity() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getId()).isEqualTo(entity.getId()); + assertThat(it.getEmbeddable().getTest()).isEqualTo(entity.getEmbeddable().getTest()); + assertThat(it.getEmbeddable().getDummyEntity2().getTest()) + .isEqualTo(entity.getEmbeddable().getDummyEntity2().getTest()); + }); + } + + @Test // DATAJDBC-111 + public void findAllFindsAllEntities() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + Iterable all = repository.findAll(); + + assertThat(all)// + .extracting(DummyEntity::getId)// + .containsExactlyInAnyOrder(entity.getId(), other.getId()); + } + + @Test // DATAJDBC-111 + public void findByIdReturnsEmptyWhenNoneFound() { + + // NOT saving anything, so DB is empty + assertThat(repository.findById(-1L)).isEmpty(); + } + + @Test // DATAJDBC-111 + public void update() { + + DummyEntity entity = repository.save(createDummyEntity()); + + entity.getEmbeddable().setTest("something else"); + entity.getEmbeddable().getDummyEntity2().setTest("another"); + DummyEntity saved = repository.save(entity); + + assertThat(repository.findById(entity.getId())).hasValueSatisfying(it -> { + assertThat(it.getEmbeddable().getTest()).isEqualTo(saved.getEmbeddable().getTest()); + assertThat(it.getEmbeddable().getDummyEntity2().getTest()) + .isEqualTo(saved.getEmbeddable().getDummyEntity2().getTest()); + }); + } + + @Test // DATAJDBC-111 + public void updateMany() { + + DummyEntity entity = repository.save(createDummyEntity()); + DummyEntity other = repository.save(createDummyEntity()); + + entity.getEmbeddable().setTest("something else"); + other.getEmbeddable().setTest("others Name"); + + entity.getEmbeddable().getDummyEntity2().setTest("else"); + other.getEmbeddable().getDummyEntity2().setTest("Name"); + + repository.saveAll(asList(entity, other)); + + assertThat(repository.findAll()) // + .extracting(d -> d.getEmbeddable().getTest()) // + .containsExactlyInAnyOrder(entity.getEmbeddable().getTest(), other.getEmbeddable().getTest()); + + assertThat(repository.findAll()) // + .extracting(d -> d.getEmbeddable().getDummyEntity2().getTest()) // + .containsExactlyInAnyOrder(entity.getEmbeddable().getDummyEntity2().getTest(), + other.getEmbeddable().getDummyEntity2().getTest()); + } + + @Test // DATAJDBC-111 + public void deleteById() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteById(two.getId()); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(one.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + public void deleteByEntity() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.delete(one); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId(), three.getId()); + } + + @Test // DATAJDBC-111 + public void deleteByList() { + + DummyEntity one = repository.save(createDummyEntity()); + DummyEntity two = repository.save(createDummyEntity()); + DummyEntity three = repository.save(createDummyEntity()); + + repository.deleteAll(asList(one, three)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getId) // + .containsExactlyInAnyOrder(two.getId()); + } + + @Test // DATAJDBC-111 + public void deleteAll() { + + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + + assertThat(repository.findAll()).isNotEmpty(); + + repository.deleteAll(); + + assertThat(repository.findAll()).isEmpty(); + } + + @Test // DATAJDBC-318 + public void queryDerivationLoadsReferencedEntitiesCorrectly() { + + repository.save(createDummyEntity()); + repository.save(createDummyEntity()); + DummyEntity saved = repository.save(createDummyEntity()); + + assertThat(repository.findByTest(saved.test)) // + .extracting( // + e -> e.test, // + e -> e.embeddable.test, // + e -> e.embeddable.dummyEntity2.test // + ).containsExactly( // + tuple(saved.test, saved.embeddable.test, saved.embeddable.dummyEntity2.test), // + tuple(saved.test, saved.embeddable.test, saved.embeddable.dummyEntity2.test), // + tuple(saved.test, saved.embeddable.test, saved.embeddable.dummyEntity2.test) // + ); + + } + + interface DummyEntityRepository extends CrudRepository { + List findByTest(String test); + } + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + } + + private static class DummyEntity { + + @Column("ID") + @Id Long id; + + String test; + + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "PREFIX_") Embeddable embeddable; + + @Embedded(onEmpty = OnEmpty.USE_NULL) Embeddable2 embeddable2; + + public Long getId() { + return this.id; + } + + public String getTest() { + return this.test; + } + + public Embeddable getEmbeddable() { + return this.embeddable; + } + + public Embeddable2 getEmbeddable2() { + return this.embeddable2; + } + + public void setId(Long id) { + this.id = id; + } + + public void setTest(String test) { + this.test = test; + } + + public void setEmbeddable(Embeddable embeddable) { + this.embeddable = embeddable; + } + + public void setEmbeddable2(Embeddable2 embeddable2) { + this.embeddable2 = embeddable2; + } + } + + private static class Embeddable { + + @Column("ID") DummyEntity2 dummyEntity2; + + String test; + + public DummyEntity2 getDummyEntity2() { + return this.dummyEntity2; + } + + public String getTest() { + return this.test; + } + + public void setDummyEntity2(DummyEntity2 dummyEntity2) { + this.dummyEntity2 = dummyEntity2; + } + + public void setTest(String test) { + this.test = test; + } + } + + private static class Embeddable2 { + + @Column("ID") DummyEntity2 dummyEntity2; + + public DummyEntity2 getDummyEntity2() { + return this.dummyEntity2; + } + + public void setDummyEntity2(DummyEntity2 dummyEntity2) { + this.dummyEntity2 = dummyEntity2; + } + } + + private static class DummyEntity2 { + + @Column("ID") + @Id Long id; + + String test; + + public Long getId() { + return this.id; + } + + public String getTest() { + return this.test; + } + + public void setId(Long id) { + this.id = id; + } + + public void setTest(String test) { + this.test = test; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIdGenerationIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIdGenerationIntegrationTests.java new file mode 100644 index 0000000000..95cc6dac55 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIdGenerationIntegrationTests.java @@ -0,0 +1,321 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicLong; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.annotation.Transient; +import org.springframework.data.domain.Persistable; +import org.springframework.data.jdbc.core.convert.IdGeneratingEntityCallback; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.repository.support.SimpleJdbcRepository; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.jdbc.testing.TestDatabaseFeatures; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.Sequence; +import org.springframework.data.relational.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.repository.ListCrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; + +/** + * Testing special cases for id generation with {@link SimpleJdbcRepository}. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Mikhail Polivakha + * @author Mark Paluch + */ +@IntegrationTest +class JdbcRepositoryIdGenerationIntegrationTests { + + @Autowired NamedParameterJdbcOperations operations; + @Autowired ReadOnlyIdEntityRepository readOnlyIdRepository; + @Autowired PrimitiveIdEntityRepository primitiveIdRepository; + @Autowired ImmutableWithManualIdEntityRepository immutableWithManualIdEntityRepository; + + @Autowired SimpleSeqRepository simpleSeqRepository; + @Autowired PersistableSeqRepository persistableSeqRepository; + @Autowired PrimitiveIdSeqRepository primitiveIdSeqRepository; + @Autowired IdGeneratingEntityCallback idGeneratingCallback; + + @Test // DATAJDBC-98 + void idWithoutSetterGetsSet() { + + ReadOnlyIdEntity entity = readOnlyIdRepository.save(new ReadOnlyIdEntity(null, "Entity Name")); + + assertThat(entity.id()).isNotNull(); + + assertThat(readOnlyIdRepository.findById(entity.id())).hasValueSatisfying(it -> { + + assertThat(it.id()).isEqualTo(entity.id()); + assertThat(it.name()).isEqualTo(entity.name()); + }); + } + + @Test // DATAJDBC-98 + void primitiveIdGetsSet() { + + PrimitiveIdEntity entity = new PrimitiveIdEntity(); + entity.setName("Entity Name"); + + PrimitiveIdEntity saved = primitiveIdRepository.save(entity); + + assertThat(saved.getId()).isNotEqualTo(0L); + + assertThat(primitiveIdRepository.findById(saved.getId())).hasValueSatisfying(it -> { + + assertThat(it.getId()).isEqualTo(saved.getId()); + assertThat(it.getName()).isEqualTo(saved.getName()); + }); + } + + @Test // DATAJDBC-393 + void manuallyGeneratedId() { + + ImmutableWithManualIdEntity entity = new ImmutableWithManualIdEntity(null, "immutable"); + ImmutableWithManualIdEntity saved = immutableWithManualIdEntityRepository.save(entity); + + assertThat(saved.id()).isNotNull(); + + assertThat(immutableWithManualIdEntityRepository.findAll()).hasSize(1); + } + + @Test // DATAJDBC-393 + void manuallyGeneratedIdForSaveAll() { + + ImmutableWithManualIdEntity one = new ImmutableWithManualIdEntity(null, "one"); + ImmutableWithManualIdEntity two = new ImmutableWithManualIdEntity(null, "two"); + List saved = immutableWithManualIdEntityRepository.saveAll(List.of(one, two)); + + assertThat(saved).allSatisfy(e -> assertThat(e.id).isNotNull()); + + assertThat(immutableWithManualIdEntityRepository.findAll()).hasSize(2); + } + + @Test // DATAJDBC-2003 + @EnabledOnFeature(TestDatabaseFeatures.Feature.SUPPORTS_SEQUENCES) + void testUpdateAggregateWithSequence() { + + operations.getJdbcOperations().update("INSERT INTO SimpleSeq(id, name) VALUES(1, 'initial value')"); + + SimpleSeq entity = new SimpleSeq(); + entity.id = 1L; + entity.name = "New name"; + CompletableFuture afterCallback = mockIdGeneratingCallback(entity); + + SimpleSeq updated = simpleSeqRepository.save(entity); + + assertThat(updated.id).isEqualTo(1L); + assertThat(afterCallback.join().id).isEqualTo(1L); + } + + @Test // DATAJDBC-2003 + @EnabledOnFeature(TestDatabaseFeatures.Feature.SUPPORTS_SEQUENCES) + void testInsertPersistableAggregateWithSequenceClientIdIsFavored() { + + long initialId = 1L; + PersistableSeq entityWithSeq = PersistableSeq.createNew(initialId, "name"); + CompletableFuture afterCallback = mockIdGeneratingCallback(entityWithSeq); + + PersistableSeq saved = persistableSeqRepository.save(entityWithSeq); + + // We do not expect the SELECT next value from sequence in case we're doing an INSERT with ID provided by the client + assertThat(saved.getId()).isEqualTo(initialId); + assertThat(afterCallback.join().id).isEqualTo(initialId); + } + + @Test // DATAJDBC-2003 + @EnabledOnFeature(TestDatabaseFeatures.Feature.SUPPORTS_SEQUENCES) + void testInsertAggregateWithSequenceAndUnsetPrimitiveId() { + + PrimitiveIdSeq entity = new PrimitiveIdSeq(); + entity.name = "some name"; + CompletableFuture afterCallback = mockIdGeneratingCallback(entity); + + PrimitiveIdSeq saved = primitiveIdSeqRepository.save(entity); + + // 1. Select from sequence + // 2. Actual INSERT + assertThat(afterCallback.join().id).isEqualTo(1L); + assertThat(saved.id).isEqualTo(1L); // sequence starts with 1 + } + + @SuppressWarnings("unchecked") + private CompletableFuture mockIdGeneratingCallback(T entity) { + + CompletableFuture future = new CompletableFuture<>(); + + Mockito.doAnswer(invocationOnMock -> { + future.complete((T) invocationOnMock.callRealMethod()); + return future.join(); + }).when(idGeneratingCallback).onBeforeSave(Mockito.eq(entity), Mockito.any(MutableAggregateChange.class)); + + return future; + } + + interface PrimitiveIdEntityRepository extends ListCrudRepository {} + + interface ReadOnlyIdEntityRepository extends ListCrudRepository {} + + interface ImmutableWithManualIdEntityRepository extends ListCrudRepository {} + + interface SimpleSeqRepository extends ListCrudRepository {} + + interface PersistableSeqRepository extends ListCrudRepository {} + + interface PrimitiveIdSeqRepository extends ListCrudRepository {} + + record ReadOnlyIdEntity(@Id Long id, String name) { + } + + static class SimpleSeq { + + @Id + @Sequence(value = "simple_seq_seq") private Long id; + + private String name; + } + + static class PersistableSeq implements Persistable { + + @Id + @Sequence(value = "persistable_seq_seq") private Long id; + + private String name; + + @Transient private boolean isNew; + + @PersistenceCreator + public PersistableSeq() {} + + public PersistableSeq(Long id, String name, boolean isNew) { + this.id = id; + this.name = name; + this.isNew = isNew; + } + + static PersistableSeq createNew(Long id, String name) { + return new PersistableSeq(id, name, true); + } + + @Override + public Long getId() { + return id; + } + + @Override + public boolean isNew() { + return isNew; + } + } + + static class PrimitiveIdSeq { + + @Id + @Sequence(value = "primitive_seq_seq") private long id; + + private String name; + + } + + static class PrimitiveIdEntity { + + @Id private long id; + String name; + + public long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + } + + record ImmutableWithManualIdEntity(@Id Long id, String name) { + + @Override + public Long id() { + return this.id; + } + + public ImmutableWithManualIdEntity withId(Long id) { + return Objects.equals(this.id, id) ? this : new ImmutableWithManualIdEntity(id, this.name); + } + + public ImmutableWithManualIdEntity withName(String name) { + return Objects.equals(this.name, name) ? this : new ImmutableWithManualIdEntity(this.id, name); + } + } + + @Configuration + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(value = CrudRepository.class, type = FilterType.ASSIGNABLE_TYPE)) + @Import(TestConfiguration.class) + static class Config { + + AtomicLong lastId = new AtomicLong(0); + + /** + * {@link NamingStrategy} that harmlessly uppercases the table name, demonstrating how to inject one while not + * breaking existing SQL operations. + */ + @Bean + NamingStrategy namingStrategy() { + + return new NamingStrategy() { + + @Override + public String getTableName(Class type) { + return type.getSimpleName().toUpperCase(); + } + }; + } + + @Bean + BeforeConvertCallback idGenerator() { + return e -> e.withId(lastId.incrementAndGet()); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIntegrationTests.java new file mode 100644 index 0000000000..7c854b823f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIntegrationTests.java @@ -0,0 +1,2078 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; + +import java.io.IOException; +import java.sql.ResultSet; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.config.PropertiesFactoryBean; +import org.springframework.context.ApplicationListener; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.core.io.ClassPathResource; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.*; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.repository.query.Modifying; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.ConditionalOnDatabase; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.jdbc.testing.TestDatabaseFeatures; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.relational.core.mapping.Sequence; +import org.springframework.data.relational.core.mapping.event.AbstractRelationalEvent; +import org.springframework.data.relational.core.mapping.event.AfterConvertEvent; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.repository.Lock; +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.repository.ListCrudRepository; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.support.PropertiesBasedNamedQueries; +import org.springframework.data.repository.core.support.RepositoryFactoryCustomizer; +import org.springframework.data.repository.query.ExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.data.repository.query.Param; +import org.springframework.data.repository.query.QueryByExampleExecutor; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.support.WindowIterator; +import org.springframework.data.util.Streamable; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.lang.Nullable; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Very simple use cases for creation and usage of JdbcRepositories. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Chirag Tailor + * @author Diego Krupitza + * @author Christopher Klein + * @author Mikhail Polivakha + * @author Paul Jones + */ +@IntegrationTest +public class JdbcRepositoryIntegrationTests { + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + @Autowired MyEventListener eventListener; + @Autowired RootRepository rootRepository; + @Autowired WithDelimitedColumnRepository withDelimitedColumnRepository; + @Autowired EntityWithSequenceRepository entityWithSequenceRepository; + + @BeforeEach + public void before() { + + repository.deleteAll(); + + eventListener.events.clear(); + } + + @Test // DATAJDBC-95 + public void savesAnEntity() { + + DummyEntity entity = repository.save(createEntity()); + + assertThat(JdbcTestUtils.countRowsInTableWhere(template.getJdbcOperations(), "dummy_entity", + "id_Prop = " + entity.getIdProp())).isEqualTo(1); + } + + @Test // GH-1923 + @EnabledOnFeature(value = TestDatabaseFeatures.Feature.SUPPORTS_SEQUENCES) + public void saveEntityWithTargetSequenceSpecified() { + + EntityWithSequence first = entityWithSequenceRepository.save(new EntityWithSequence("first")); + EntityWithSequence second = entityWithSequenceRepository.save(new EntityWithSequence("second")); + + assertThat(first.getId()).isNotNull(); + assertThat(second.getId()).isNotNull(); + assertThat(first.getId()).isLessThan(second.getId()); + assertThat(first.getName()).isEqualTo("first"); + assertThat(second.getName()).isEqualTo("second"); + } + + @Test // GH-1923 + @EnabledOnFeature(value = TestDatabaseFeatures.Feature.SUPPORTS_SEQUENCES) + public void batchInsertEntityWithTargetSequenceSpecified() { + + Iterable results = entityWithSequenceRepository + .saveAll(List.of(new EntityWithSequence("first"), new EntityWithSequence("second"))); + + assertThat(results).hasSize(2).extracting(EntityWithSequence::getId).containsExactly(1L, 2L); + } + + @Test // DATAJDBC-95 + public void saveAndLoadAnEntity() { + + DummyEntity entity = repository.save(createEntity()); + + assertThat(repository.findById(entity.getIdProp())).hasValueSatisfying(it -> { + + assertThat(it.getIdProp()).isEqualTo(entity.getIdProp()); + assertThat(it.getName()).isEqualTo(entity.getName()); + }); + } + + @Test // DATAJDBC-97 + public void insertsManyEntities() { + + DummyEntity entity = createEntity(); + DummyEntity other = createEntity(); + + repository.saveAll(asList(entity, other)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getIdProp) // + .containsExactlyInAnyOrder(entity.getIdProp(), other.getIdProp()); + } + + @Test // DATAJDBC-97 + public void existsReturnsTrueIffEntityExists() { + + DummyEntity entity = repository.save(createEntity()); + + assertThat(repository.existsById(entity.getIdProp())).isTrue(); + assertThat(repository.existsById(entity.getIdProp() + 1)).isFalse(); + } + + @Test // DATAJDBC-97 + public void findAllFindsAllEntities() { + + DummyEntity entity = repository.save(createEntity()); + DummyEntity other = repository.save(createEntity()); + + Iterable all = repository.findAll(); + + assertThat(all)// + .extracting(DummyEntity::getIdProp)// + .containsExactlyInAnyOrder(entity.getIdProp(), other.getIdProp()); + } + + @Test // DATAJDBC-97 + public void findAllFindsAllSpecifiedEntities() { + + DummyEntity entity = repository.save(createEntity()); + DummyEntity other = repository.save(createEntity()); + + assertThat(repository.findAllById(asList(entity.getIdProp(), other.getIdProp())))// + .extracting(DummyEntity::getIdProp)// + .containsExactlyInAnyOrder(entity.getIdProp(), other.getIdProp()); + } + + @Test // DATAJDBC-97 + public void countsEntities() { + + repository.save(createEntity()); + repository.save(createEntity()); + repository.save(createEntity()); + + assertThat(repository.count()).isEqualTo(3L); + } + + @Test // DATAJDBC-97 + public void deleteById() { + + DummyEntity one = repository.save(createEntity()); + DummyEntity two = repository.save(createEntity()); + DummyEntity three = repository.save(createEntity()); + + repository.deleteById(two.getIdProp()); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getIdProp) // + .containsExactlyInAnyOrder(one.getIdProp(), three.getIdProp()); + } + + @Test // DATAJDBC-97 + public void deleteByEntity() { + + DummyEntity one = repository.save(createEntity()); + DummyEntity two = repository.save(createEntity()); + DummyEntity three = repository.save(createEntity()); + + repository.delete(one); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getIdProp) // + .containsExactlyInAnyOrder(two.getIdProp(), three.getIdProp()); + } + + @Test // DATAJDBC-97 + public void deleteByList() { + + DummyEntity one = repository.save(createEntity()); + DummyEntity two = repository.save(createEntity()); + DummyEntity three = repository.save(createEntity()); + + repository.deleteAll(asList(one, three)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getIdProp) // + .containsExactlyInAnyOrder(two.getIdProp()); + } + + @Test // DATAJDBC-629 + public void deleteByIdList() { + + DummyEntity one = repository.save(createEntity()); + DummyEntity two = repository.save(createEntity()); + DummyEntity three = repository.save(createEntity()); + + repository.deleteAllById(asList(one.idProp, three.idProp)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getIdProp) // + .containsExactlyInAnyOrder(two.getIdProp()); + } + + @Test // DATAJDBC-97 + public void deleteAll() { + + repository.save(createEntity()); + repository.save(createEntity()); + repository.save(createEntity()); + + assertThat(repository.findAll()).isNotEmpty(); + + repository.deleteAll(); + + assertThat(repository.findAll()).isEmpty(); + } + + @Test // DATAJDBC-98 + public void update() { + + DummyEntity entity = repository.save(createEntity()); + + entity.setName("something else"); + DummyEntity saved = repository.save(entity); + + assertThat(repository.findById(entity.getIdProp())).hasValueSatisfying(it -> { + assertThat(it.getName()).isEqualTo(saved.getName()); + }); + } + + @Test // DATAJDBC-98 + public void updateMany() { + + DummyEntity entity = repository.save(createEntity()); + DummyEntity other = repository.save(createEntity()); + + entity.setName("something else"); + other.setName("others Name"); + + repository.saveAll(asList(entity, other)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getName) // + .containsExactlyInAnyOrder(entity.getName(), other.getName()); + } + + @Test // GH-537 + void insertsOrUpdatesManyEntities() { + + DummyEntity entity = repository.save(createEntity()); + entity.setName("something else"); + DummyEntity other = createEntity(); + other.setName("others name"); + repository.saveAll(asList(other, entity)); + + assertThat(repository.findAll()) // + .extracting(DummyEntity::getName) // + .containsExactlyInAnyOrder(entity.getName(), other.getName()); + } + + @Test // DATAJDBC-112 + public void findByIdReturnsEmptyWhenNoneFound() { + + // NOT saving anything, so DB is empty + + assertThat(repository.findById(-1L)).isEmpty(); + } + + @Test // DATAJDBC-464, DATAJDBC-318 + public void executeQueryWithParameterRequiringConversion() { + + Instant now = createDummyBeforeAndAfterNow(); + + assertThat(repository.after(now)) // + .extracting(DummyEntity::getName) // + .containsExactly("second"); + + assertThat(repository.findAllByPointInTimeAfter(now)) // + .extracting(DummyEntity::getName) // + .containsExactly("second"); + } + + @Test // DATAJDBC-318 + public void queryMethodShouldEmitEvents() { + + repository.save(createEntity()); + eventListener.events.clear(); + + repository.findAllWithSql(); + + assertThat(eventListener.events).hasSize(1).hasOnlyElementsOfType(AfterConvertEvent.class); + } + + @Test // DATAJDBC-318 + public void queryMethodWithCustomRowMapperDoesNotEmitEvents() { + + repository.save(createEntity()); + eventListener.events.clear(); + + repository.findAllWithCustomMapper(); + + assertThat(eventListener.events).isEmpty(); + } + + @Test // DATAJDBC-234 + public void findAllByQueryName() { + + repository.save(createEntity()); + assertThat(repository.findAllByNamedQuery()).hasSize(1); + } + + @Test + void findAllByFirstnameWithLock() { + + DummyEntity dummyEntity = createEntity(); + repository.save(dummyEntity); + assertThat(repository.findAllByName(dummyEntity.getName())).hasSize(1); + } + + @Test // GH-1022 + public void findAllByCustomQueryName() { + + repository.save(createEntity()); + assertThat(repository.findAllByCustomNamedQuery()).hasSize(1); + } + + @Test // DATAJDBC-341 + public void findWithMissingQuery() { + + DummyEntity dummy = repository.save(createEntity()); + + DummyEntity loaded = repository.withMissingColumn(dummy.idProp); + + assertThat(loaded.idProp).isEqualTo(dummy.idProp); + assertThat(loaded.name).isNull(); + assertThat(loaded.pointInTime).isNull(); + } + + @Test // DATAJDBC-529 + public void existsWorksAsExpected() { + + DummyEntity dummy = repository.save(createEntity()); + + assertSoftly(softly -> { + + softly.assertThat(repository.existsByName(dummy.getName())) // + .describedAs("Positive") // + .isTrue(); + softly.assertThat(repository.existsByName("not an existing name")) // + .describedAs("Positive") // + .isFalse(); + }); + } + + @Test // DATAJDBC-604 + public void existsInWorksAsExpected() { + + DummyEntity dummy = repository.save(createEntity()); + + assertSoftly(softly -> { + + softly.assertThat(repository.existsByNameIn(dummy.getName())) // + .describedAs("Positive") // + .isTrue(); + softly.assertThat(repository.existsByNameIn()) // + .describedAs("Negative") // + .isFalse(); + }); + } + + @Test // DATAJDBC-604 + public void existsNotInWorksAsExpected() { + + DummyEntity dummy = repository.save(createEntity()); + + assertSoftly(softly -> { + + softly.assertThat(repository.existsByNameNotIn(dummy.getName())) // + .describedAs("Positive") // + .isFalse(); + softly.assertThat(repository.existsByNameNotIn()) // + .describedAs("Negative") // + .isTrue(); + }); + } + + @Test // DATAJDBC-534 + public void countByQueryDerivation() { + + DummyEntity one = createEntity(); + DummyEntity two = createEntity(); + two.name = "other"; + DummyEntity three = createEntity(); + + repository.saveAll(asList(one, two, three)); + + assertThat(repository.countByName(one.getName())).isEqualTo(2); + } + + @Test // GH-619 + public void findBySpElWorksAsExpected() { + DummyEntity r = repository.save(createEntity()); + + // assign the new id to the global ID provider holder; this is similar to Spring Security's SecurityContextHolder + MyIdContextProvider.ExtensionRoot.ID = r.getIdProp(); + + // expect, that we can find our newly created entity based upon the ID provider + assertThat(repository.findWithSpEL().getIdProp()).isEqualTo(r.getIdProp()); + } + + @Test // GH-945 + @ConditionalOnDatabase(DatabaseType.POSTGRES) + public void usePrimitiveArrayAsArgument() { + assertThat(repository.unnestPrimitive(new int[] { 1, 2, 3 })).containsExactly(1, 2, 3); + } + + @Test // GH-774 + public void pageByNameShouldReturnCorrectResult() { + + repository.saveAll(Arrays.asList(new DummyEntity("a1"), new DummyEntity("a2"), new DummyEntity("a3"))); + + Page page = repository.findPageByNameContains("a", PageRequest.of(0, 5)); + + assertThat(page.getContent()).hasSize(3); + assertThat(page.getTotalElements()).isEqualTo(3); + assertThat(page.getTotalPages()).isEqualTo(1); + + assertThat(repository.findPageByNameContains("a", PageRequest.of(0, 2)).getContent()).hasSize(2); + assertThat(repository.findPageByNameContains("a", PageRequest.of(1, 2)).getContent()).hasSize(1); + } + + @Test // GH-1654 + public void selectWithLimitShouldReturnCorrectResult() { + + repository.saveAll(Arrays.asList(new DummyEntity("a1"), new DummyEntity("a2"), new DummyEntity("a3"))); + + List page = repository.findByNameContains("a", Limit.of(3)); + assertThat(page).hasSize(3); + + assertThat(repository.findByNameContains("a", Limit.of(2))).hasSize(2); + assertThat(repository.findByNameContains("a", Limit.unlimited())).hasSize(3); + } + + @Test // GH-774 + public void sliceByNameShouldReturnCorrectResult() { + + repository.saveAll(Arrays.asList(new DummyEntity("a1"), new DummyEntity("a2"), new DummyEntity("a3"))); + + Slice slice = repository.findSliceByNameContains("a", PageRequest.of(0, 5)); + + assertThat(slice.getContent()).hasSize(3); + assertThat(slice.hasNext()).isFalse(); + + slice = repository.findSliceByNameContains("a", PageRequest.of(0, 2)); + + assertThat(slice.getContent()).hasSize(2); + assertThat(slice.hasNext()).isTrue(); + } + + @Test // GH-935 + public void queryByOffsetDateTime() { + + Instant now = createDummyBeforeAndAfterNow(); + OffsetDateTime timeArgument = OffsetDateTime.ofInstant(now, ZoneOffset.ofHours(2)); + + List entities = repository.findByOffsetDateTime(timeArgument); + + assertThat(entities).extracting(DummyEntity::getName).containsExactly("second"); + } + + @Test // GH-971 + public void stringQueryProjectionShouldReturnProjectedEntities() { + + repository.save(createEntity()); + + List result = repository.findProjectedWithSql(DummyProjection.class); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Entity Name"); + } + + @Test // GH-971 + public void stringQueryProjectionShouldReturnDtoProjectedEntities() { + + repository.save(createEntity()); + + List result = repository.findProjectedWithSql(DtoProjection.class); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Entity Name"); + } + + @Test // GH-971 + public void partTreeQueryProjectionShouldReturnProjectedEntities() { + + repository.save(createEntity()); + + List result = repository.findProjectedByName("Entity Name"); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Entity Name"); + } + + @Test // GH-971 + public void pageQueryProjectionShouldReturnProjectedEntities() { + + repository.save(createEntity()); + + Page result = repository.findPageProjectionByName("Entity Name", PageRequest.ofSize(10)); + + assertThat(result).hasSize(1); + assertThat(result.getContent().get(0).getName()).isEqualTo("Entity Name"); + } + + @Test // GH-974 + @ConditionalOnDatabase(DatabaseType.POSTGRES) + void intervalCalculation() { + + repository.updateWithIntervalCalculation(23L, LocalDateTime.now()); + } + + @Test // GH-908 + void derivedQueryWithBooleanLiteralFindsCorrectValues() { + + repository.save(createEntity()); + DummyEntity entity = createEntity(); + entity.flag = true; + entity = repository.save(entity); + + List result = repository.findByFlagTrue(); + + assertThat(result).extracting(e -> e.idProp).containsExactly(entity.idProp); + } + + @Test // GH-987 + void queryBySimpleReference() { + + DummyEntity one = repository.save(createEntity()); + DummyEntity two = createEntity(); + two.ref = AggregateReference.to(one.idProp); + two = repository.save(two); + + List result = repository.findByRef(one.idProp.intValue()); + + assertThat(result).extracting(e -> e.idProp).containsExactly(two.idProp); + } + + @Test // GH-987 + void queryByAggregateReference() { + + DummyEntity one = repository.save(createEntity()); + DummyEntity two = createEntity(); + two.ref = AggregateReference.to(one.idProp); + two = repository.save(two); + + List result = repository.findByRef(two.ref); + + assertThat(result).extracting(e -> e.idProp).containsExactly(two.idProp); + } + + @Test // GH-1167 + void stringResult() { + + repository.save(createEntity()); // just ensure we have data in the table + + assertThat(repository.returnInput("HELLO")).isEqualTo("HELLO"); + } + + @Test // GH-1167 + void nullStringResult() { + + repository.save(createEntity()); // just ensure we have data in the table + + assertThat(repository.returnInput(null)).isNull(); + } + + @Test // GH-1212 + void queryByEnumTypeIn() { + + DummyEntity dummyA = new DummyEntity("dummyA"); + dummyA.setDirection(Direction.LEFT); + DummyEntity dummyB = new DummyEntity("dummyB"); + dummyB.setDirection(Direction.CENTER); + DummyEntity dummyC = new DummyEntity("dummyC"); + dummyC.setDirection(Direction.RIGHT); + repository.saveAll(asList(dummyA, dummyB, dummyC)); + + assertThat(repository.findByEnumTypeIn(Set.of(Direction.LEFT, Direction.RIGHT))) + .extracting(DummyEntity::getDirection).containsExactlyInAnyOrder(Direction.LEFT, Direction.RIGHT); + } + + @Test // GH-1212 + void queryByEnumTypeEqual() { + + DummyEntity dummyA = new DummyEntity("dummyA"); + dummyA.setDirection(Direction.LEFT); + DummyEntity dummyB = new DummyEntity("dummyB"); + dummyB.setDirection(Direction.CENTER); + DummyEntity dummyC = new DummyEntity("dummyC"); + dummyC.setDirection(Direction.RIGHT); + repository.saveAll(asList(dummyA, dummyB, dummyC)); + + assertThat(repository.findByEnumType(Direction.CENTER)).extracting(DummyEntity::getDirection) + .containsExactlyInAnyOrder(Direction.CENTER); + } + + @Test // GH-537 + void manyInsertsWithNestedEntities() { + + Root root1 = createRoot("root1"); + Root root2 = createRoot("root2"); + + List savedRoots = rootRepository.saveAll(asList(root1, root2)); + + List reloadedRoots = rootRepository.findAllByOrderByIdAsc(); + assertThat(reloadedRoots).isEqualTo(savedRoots); + assertThat(reloadedRoots).hasSize(2); + assertIsEqualToWithNonNullIds(reloadedRoots.get(0), root1); + assertIsEqualToWithNonNullIds(reloadedRoots.get(1), root2); + } + + @Test // GH-537 + @EnabledOnFeature(TestDatabaseFeatures.Feature.SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) + void manyUpdatesWithNestedEntities() { + + Root root1 = createRoot("root1"); + Root root2 = createRoot("root2"); + List roots = rootRepository.saveAll(asList(root1, root2)); + Root savedRoot1 = roots.get(0); + Root updatedRoot1 = new Root(savedRoot1.id, "updated" + savedRoot1.name, + new Intermediate(savedRoot1.intermediate.id, "updated" + savedRoot1.intermediate.name, + new Leaf(savedRoot1.intermediate.leaf.id, "updated" + savedRoot1.intermediate.leaf.name), emptyList()), + savedRoot1.intermediates); + Root savedRoot2 = roots.get(1); + Root updatedRoot2 = new Root(savedRoot2.id, "updated" + savedRoot2.name, savedRoot2.intermediate, + singletonList( + new Intermediate(savedRoot2.intermediates.get(0).id, "updated" + savedRoot2.intermediates.get(0).name, null, + singletonList(new Leaf(savedRoot2.intermediates.get(0).leaves.get(0).id, + "updated" + savedRoot2.intermediates.get(0).leaves.get(0).name))))); + + List updatedRoots = rootRepository.saveAll(asList(updatedRoot1, updatedRoot2)); + + List reloadedRoots = rootRepository.findAllByOrderByIdAsc(); + assertThat(reloadedRoots).isEqualTo(updatedRoots); + assertThat(reloadedRoots).containsExactly(updatedRoot1, updatedRoot2); + } + + @Test // GH-537 + @EnabledOnFeature(TestDatabaseFeatures.Feature.SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) + void manyInsertsAndUpdatesWithNestedEntities() { + + Root root1 = createRoot("root1"); + Root savedRoot1 = rootRepository.save(root1); + Root updatedRoot1 = new Root(savedRoot1.id, "updated" + savedRoot1.name, + new Intermediate(savedRoot1.intermediate.id, "updated" + savedRoot1.intermediate.name, + new Leaf(savedRoot1.intermediate.leaf.id, "updated" + savedRoot1.intermediate.leaf.name), emptyList()), + savedRoot1.intermediates); + Root root2 = createRoot("root2"); + List savedRoots = rootRepository.saveAll(asList(updatedRoot1, root2)); + + List reloadedRoots = rootRepository.findAllByOrderByIdAsc(); + assertThat(reloadedRoots).isEqualTo(savedRoots); + assertThat(reloadedRoots.get(0)).isEqualTo(updatedRoot1); + assertIsEqualToWithNonNullIds(reloadedRoots.get(1), root2); + } + + @Test // GH-1192 + void findOneByExampleShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + dummyEntity2.setName("Diego"); + repository.save(dummyEntity2); + + Example diegoExample = Example.of(new DummyEntity("Diego")); + Optional foundExampleDiego = repository.findOne(diegoExample); + + assertThat(foundExampleDiego.get().getName()).isEqualTo("Diego"); + } + + @Test // GH-1192 + void findOneByExampleMultipleMatchShouldGetOne() { + + repository.save(createEntity()); + repository.save(createEntity()); + + Example example = Example.of(createEntity()); + + assertThatThrownBy(() -> repository.findOne(example)).isInstanceOf(IncorrectResultSizeDataAccessException.class) + .hasMessageContaining("expected 1, actual 2"); + } + + @Test // GH-1192 + void findOneByExampleShouldGetNone() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + repository.save(dummyEntity1); + + Example diegoExample = Example.of(new DummyEntity("NotExisting")); + + Optional foundExampleDiego = repository.findOne(diegoExample); + + assertThat(foundExampleDiego).isNotPresent(); + } + + @Test // GH-1192 + void findAllByExampleShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + dummyEntity2.setName("Diego"); + repository.save(dummyEntity2); + + Example example = Example.of(new DummyEntity("Diego")); + + Iterable allFound = repository.findAll(example); + + assertThat(allFound).extracting(DummyEntity::getName) // + .containsExactly(example.getProbe().getName()); + } + + @Test // GH-1192 + void findAllByExampleMultipleMatchShouldGetOne() { + + repository.save(createEntity()); + repository.save(createEntity()); + + Example example = Example.of(createEntity()); + + Iterable allFound = repository.findAll(example); + + assertThat(allFound) // + .hasSize(2) // + .extracting(DummyEntity::getName) // + .containsOnly(example.getProbe().getName()); + } + + @Test // GH-1192 + void findAllByExampleShouldGetNone() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + + repository.save(dummyEntity1); + + Example example = Example.of(new DummyEntity("NotExisting")); + + Iterable allFound = repository.findAll(example); + + assertThat(allFound).isEmpty(); + } + + @Test // GH-1192 + void findAllByExamplePageableShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + dummyEntity2.setName("Diego"); + + repository.save(dummyEntity2); + + Example example = Example.of(new DummyEntity("Diego")); + Pageable pageRequest = PageRequest.of(0, 10); + + Iterable allFound = repository.findAll(example, pageRequest); + + assertThat(allFound).extracting(DummyEntity::getName) // + .containsExactly(example.getProbe().getName()); + } + + @Test // GH-1192 + void findAllByExamplePageableMultipleMatchShouldGetOne() { + + repository.save(createEntity()); + repository.save(createEntity()); + + Example example = Example.of(createEntity()); + Pageable pageRequest = PageRequest.of(0, 10); + + Iterable allFound = repository.findAll(example, pageRequest); + + assertThat(allFound) // + .hasSize(2) // + .extracting(DummyEntity::getName) // + .containsOnly(example.getProbe().getName()); + } + + @Test // GH-1192 + void findAllByExamplePageableShouldGetNone() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + + repository.save(dummyEntity1); + + Example example = Example.of(new DummyEntity("NotExisting")); + Pageable pageRequest = PageRequest.of(0, 10); + + Iterable allFound = repository.findAll(example, pageRequest); + + assertThat(allFound).isEmpty(); + } + + @Test // GH-1192 + void findAllByExamplePageableOutsidePageShouldGetNone() { + + repository.save(createEntity()); + repository.save(createEntity()); + + Example example = Example.of(createEntity()); + Pageable pageRequest = PageRequest.of(10, 10); + + Iterable allFound = repository.findAll(example, pageRequest); + + assertThat(allFound) // + .isNotNull() // + .isEmpty(); + } + + @ParameterizedTest // GH-1192 + @MethodSource("findAllByExamplePageableSource") + void findAllByExamplePageable(Pageable pageRequest, int size, int totalPages, List notContains) { + + for (int i = 0; i < 100; i++) { + DummyEntity dummyEntity = createEntity(); + dummyEntity.setFlag(true); + dummyEntity.setName("" + i); + + repository.save(dummyEntity); + } + + DummyEntity dummyEntityExample = createEntity(); + dummyEntityExample.setName(null); + dummyEntityExample.setFlag(true); + + Example example = Example.of(dummyEntityExample); + + Page allFound = repository.findAll(example, pageRequest); + + // page has correct size + assertThat(allFound) // + .isNotNull() // + .hasSize(size); + + // correct number of total + assertThat(allFound.getTotalElements()).isEqualTo(100); + + assertThat(allFound.getTotalPages()).isEqualTo(totalPages); + + if (!notContains.isEmpty()) { + assertThat(allFound) // + .extracting(DummyEntity::getName) // + .doesNotContain(notContains.toArray(new String[0])); + } + } + + public static Stream findAllByExamplePageableSource() { + return Stream.of( // + Arguments.of(PageRequest.of(0, 3), 3, 34, Arrays.asList("3", "4", "100")), // + Arguments.of(PageRequest.of(1, 10), 10, 10, Arrays.asList("9", "20", "30")), // + Arguments.of(PageRequest.of(2, 10), 10, 10, Arrays.asList("1", "2", "3")), // + Arguments.of(PageRequest.of(33, 3), 1, 34, Collections.emptyList()), // + Arguments.of(PageRequest.of(36, 3), 0, 34, Collections.emptyList()), // + Arguments.of(PageRequest.of(0, 10000), 100, 1, Collections.emptyList()), // + Arguments.of(PageRequest.of(100, 10000), 0, 1, Collections.emptyList()) // + ); + } + + @Test // GH-1192 + void existsByExampleShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + dummyEntity2.setName("Diego"); + repository.save(dummyEntity2); + + Example example = Example.of(new DummyEntity("Diego")); + + boolean exists = repository.exists(example); + + assertThat(exists).isTrue(); + } + + @Test // GH-1192 + void existsByExampleMultipleMatchShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + repository.save(dummyEntity2); + + Example example = Example.of(createEntity()); + + boolean exists = repository.exists(example); + assertThat(exists).isTrue(); + } + + @Test // GH-1192 + void existsByExampleShouldGetNone() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + + repository.save(dummyEntity1); + + Example example = Example.of(new DummyEntity("NotExisting")); + + boolean exists = repository.exists(example); + + assertThat(exists).isFalse(); + } + + @Test // GH-1192 + void existsByExampleComplex() { + + Instant pointInTime = Instant.now().truncatedTo(ChronoUnit.MILLIS).minusSeconds(10000); + + repository.save(createEntity()); + + DummyEntity two = createEntity(); + two.setName("Diego"); + two.setPointInTime(pointInTime); + repository.save(two); + + DummyEntity exampleEntitiy = createEntity(); + exampleEntitiy.setName("Diego"); + exampleEntitiy.setPointInTime(pointInTime); + + Example example = Example.of(exampleEntitiy); + + boolean exists = repository.exists(example); + assertThat(exists).isTrue(); + } + + @Test // GH-1192 + void countByExampleShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + dummyEntity2.setName("Diego"); + + repository.save(dummyEntity2); + + Example example = Example.of(new DummyEntity("Diego")); + + long count = repository.count(example); + + assertThat(count).isOne(); + } + + @Test // GH-1192 + void countByExampleMultipleMatchShouldGetOne() { + + DummyEntity dummyEntity1 = createEntity(); + repository.save(dummyEntity1); + + DummyEntity dummyEntity2 = createEntity(); + repository.save(dummyEntity2); + + Example example = Example.of(createEntity()); + + long count = repository.count(example); + assertThat(count).isEqualTo(2); + } + + @Test // GH-1192 + void countByExampleShouldGetNone() { + + DummyEntity dummyEntity1 = createEntity(); + dummyEntity1.setFlag(true); + + repository.save(dummyEntity1); + + Example example = Example.of(new DummyEntity("NotExisting")); + + long count = repository.count(example); + + assertThat(count).isNotNull().isZero(); + } + + @Test // GH-1192 + void countByExampleComplex() { + + Instant pointInTime = Instant.now().minusSeconds(10000).truncatedTo(ChronoUnit.MILLIS); + repository.save(createEntity()); + + DummyEntity two = createEntity(); + two.setName("Diego"); + two.setPointInTime(pointInTime); + repository.save(two); + + DummyEntity exampleEntitiy = createEntity(); + exampleEntitiy.setName("Diego"); + exampleEntitiy.setPointInTime(pointInTime); + + Example example = Example.of(exampleEntitiy); + + long count = repository.count(example); + assertThat(count).isOne(); + } + + @Test // GH-1192 + void fetchByExampleFluentAllSimple() { + + String searchName = "Diego"; + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + + DummyEntity two = createEntity(); + + two.setName(searchName); + two.setPointInTime(now.minusSeconds(10000)); + two = repository.save(two); + // certain databases consider it a great idea to assign default values to timestamp fields. + // I'm looking at you MariaDb. + two = repository.findById(two.idProp).orElseThrow(); + + DummyEntity third = createEntity(); + third.setName(searchName); + third.setPointInTime(now.minusSeconds(200000)); + third = repository.save(third); + // certain databases consider it a great idea to assign default values to timestamp fields. + // I'm looking at you MariaDb. + third = repository.findById(third.idProp).orElseThrow(); + + DummyEntity exampleEntitiy = createEntity(); + exampleEntitiy.setName(searchName); + + Example example = Example.of(exampleEntitiy); + + List matches = repository.findBy(example, p -> p.sortBy(Sort.by("pointInTime").descending()).all()); + assertThat(matches).containsExactly(two, third); + } + + @Test // GH-1609 + void findByScrollPosition() { + + DummyEntity one = new DummyEntity("one"); + one.setFlag(true); + + DummyEntity two = new DummyEntity("two"); + two.setFlag(true); + + DummyEntity three = new DummyEntity("three"); + three.setFlag(true); + + DummyEntity four = new DummyEntity("four"); + four.setFlag(false); + + repository.saveAll(Arrays.asList(one, two, three, four)); + + Example example = Example.of(one, ExampleMatcher.matching().withIgnorePaths("name", "idProp")); + + Window first = repository.findBy(example, q -> q.limit(2).sortBy(Sort.by("name"))) + .scroll(ScrollPosition.offset()); + assertThat(first.map(DummyEntity::getName)).containsExactly("one", "three"); + + Window second = repository.findBy(example, q -> q.limit(2).sortBy(Sort.by("name"))) + .scroll(ScrollPosition.offset(1)); + assertThat(second.map(DummyEntity::getName)).containsExactly("two"); + + WindowIterator iterator = WindowIterator.of( + scrollPosition -> repository.findBy(example, q -> q.limit(2).sortBy(Sort.by("name")).scroll(scrollPosition))) + .startingAt(ScrollPosition.offset()); + + List result = Streamable.of(() -> iterator).stream().map(DummyEntity::getName).toList(); + + assertThat(result).hasSize(3).containsExactly("one", "three", "two"); + } + + @Test // GH-1192 + void fetchByExampleFluentCountSimple() { + + String searchName = "Diego"; + Instant now = Instant.now(); + + repository.save(createEntity()); + + DummyEntity two = createEntity(); + + two.setName(searchName); + two.setPointInTime(now.minusSeconds(10000)); + repository.save(two); + + DummyEntity third = createEntity(); + third.setName(searchName); + third.setPointInTime(now.minusSeconds(200000)); + repository.save(third); + + DummyEntity exampleEntitiy = createEntity(); + exampleEntitiy.setName(searchName); + + Example example = Example.of(exampleEntitiy); + + Long matches = repository.findBy(example, FluentQuery.FetchableFluentQuery::count); + assertThat(matches).isEqualTo(2); + } + + @Test // GH-1192 + void fetchByExampleFluentOnlyInstantFirstSimple() { + + String searchName = "Diego"; + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + + repository.save(createEntity()); + + DummyEntity two = createEntity(); + + two.setName(searchName); + two.setPointInTime(now.minusSeconds(10000)); + two = repository.save(two); + // certain databases consider it a great idea to assign default values to timestamp fields. + // I'm looking at you MariaDb. + two = repository.findById(two.idProp).orElseThrow(); + + DummyEntity third = createEntity(); + third.setName(searchName); + third.setPointInTime(now.minusSeconds(200000)); + repository.save(third); + + DummyEntity exampleEntity = createEntity(); + exampleEntity.setName(searchName); + + Example example = Example.of(exampleEntity); + + Optional matches = repository.findBy(example, + p -> p.sortBy(Sort.by("pointInTime").descending()).first()); + + assertThat(matches).contains(two); + } + + @Test // GH-1192 + void fetchByExampleFluentOnlyInstantOneValueError() { + + String searchName = "Diego"; + Instant now = Instant.now(); + + repository.save(createEntity()); + + DummyEntity two = createEntity(); + two.setName(searchName); + two.setPointInTime(now.minusSeconds(10000)); + repository.save(two); + + DummyEntity third = createEntity(); + third.setName(searchName); + third.setPointInTime(now.minusSeconds(200000)); + repository.save(third); + + DummyEntity exampleEntitiy = createEntity(); + exampleEntitiy.setName(searchName); + + Example example = Example.of(exampleEntitiy); + + assertThatThrownBy(() -> repository.findBy(example, p -> p.sortBy(Sort.by("pointInTime").descending()).one())) + .isInstanceOf(IncorrectResultSizeDataAccessException.class).hasMessageContaining("expected 1, actual 2"); + } + + @Test // GH-1192 + void fetchByExampleFluentOnlyInstantOneValueSimple() { + + String searchName = "Diego"; + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + + repository.save(createEntity()); + + DummyEntity two = createEntity(); + two.setName(searchName); + two.setPointInTime(now.minusSeconds(10000)); + two = repository.save(two); + // certain databases consider it a great idea to assign default values to timestamp fields. + // I'm looking at you MariaDb. + two = repository.findById(two.idProp).orElseThrow(); + + DummyEntity exampleEntitiy = createEntity(); + exampleEntitiy.setName(searchName); + + Example example = Example.of(exampleEntitiy); + + Optional match = repository.findBy(example, p -> p.sortBy(Sort.by("pointInTime").descending()).one()); + + assertThat(match).contains(two); + } + + @Test // GH-1192 + void fetchByExampleFluentOnlyInstantOneValueAsSimple() { + + String searchName = "Diego"; + Instant now = Instant.now(); + + repository.save(createEntity()); + + DummyEntity two = createEntity(); + two.setName(searchName); + two.setPointInTime(now.minusSeconds(10000)); + two = repository.save(two); + + DummyEntity exampleEntity = createEntity(); + exampleEntity.setName(searchName); + + Example example = Example.of(exampleEntity); + + Optional match = repository.findBy(example, p -> p.as(DummyProjectExample.class).one()); + + assertThat(match.get().getName()).contains(two.getName()); + } + + @Test + void fetchDtoWithNoArgsConstructorWithAggregateReferencePopulated() { + + DummyEntity entity = new DummyEntity(); + entity.setRef(AggregateReference.to(20L)); + entity.setName("Test Dto"); + repository.save(entity); + + assertThat(repository.findById(entity.idProp).orElseThrow().getRef()).isEqualTo(AggregateReference.to(20L)); + + DummyDto foundDto = repository.findDtoByIdProp(entity.idProp).orElseThrow(); + assertThat(foundDto.getName()).isEqualTo("Test Dto"); + assertThat(foundDto.getRef()).isEqualTo(AggregateReference.to(20L)); + } + + @Test // GH-1759 + void fetchDtoWithAllArgsConstructorWithAggregateReferencePopulated() { + + DummyEntity entity = new DummyEntity(); + entity.setRef(AggregateReference.to(20L)); + entity.setName("Test Dto"); + repository.save(entity); + + assertThat(repository.findById(entity.idProp).orElseThrow().getRef()).isEqualTo(AggregateReference.to(20L)); + + DummyAllArgsDto foundDto = repository.findAllArgsDtoByIdProp(entity.idProp).orElseThrow(); + assertThat(foundDto.getName()).isEqualTo("Test Dto"); + assertThat(foundDto.getRef()).isEqualTo(AggregateReference.to(20L)); + } + + @Test // GH-1405 + void withDelimitedColumnTest() { + + WithDelimitedColumn withDelimitedColumn = new WithDelimitedColumn(); + withDelimitedColumn.setType("TYPICAL"); + withDelimitedColumn.setIdentifier("UR-123"); + + WithDelimitedColumn saved = withDelimitedColumnRepository.save(withDelimitedColumn); + + assertThat(saved.getId()).isNotNull(); + + Optional inDatabase = withDelimitedColumnRepository.findById(saved.getId()); + + assertThat(inDatabase).isPresent(); + assertThat(inDatabase.get().getIdentifier()).isEqualTo("UR-123"); + } + + @Test // GH-1323 + @EnabledOnFeature(TestDatabaseFeatures.Feature.WHERE_IN_TUPLE) + void queryWithTupleIn() { + + DummyEntity one = repository.save(createEntity("one")); + DummyEntity two = repository.save(createEntity("two")); + DummyEntity three = repository.save(createEntity("three")); + + List tuples = List.of(new Object[] { two.idProp, "two" }, // matches "two" + new Object[] { three.idProp, "two" } // matches nothing + ); + + List result = repository.findByListInTuple(tuples); + + assertThat(result).containsOnly(two); + } + + @Test // GH-1900 + void queryByListOfByteArray() { + + DummyEntity one = repository.save(createEntity("one", it -> it.setBytes(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }))); + DummyEntity two = repository.save(createEntity("two", it -> it.setBytes(new byte[] { 8, 7, 6, 5, 4, 3, 2, 1 }))); + DummyEntity three = repository + .save(createEntity("three", it -> it.setBytes(new byte[] { 3, 3, 3, 3, 3, 3, 3, 3 }))); + + List result = repository.findByBytesIn(List.of(three.getBytes(), one.getBytes())); + + assertThat(result).extracting("idProp").containsExactlyInAnyOrder(one.idProp, three.idProp); + } + + @Test // GH-1900 + void queryByByteArray() { + + DummyEntity one = repository.save(createEntity("one", it -> it.setBytes(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }))); + DummyEntity two = repository.save(createEntity("two", it -> it.setBytes(new byte[] { 8, 7, 6, 5, 4, 3, 2, 1 }))); + DummyEntity three = repository + .save(createEntity("three", it -> it.setBytes(new byte[] { 3, 3, 3, 3, 3, 3, 3, 3 }))); + + List result = repository.findByBytes(two.getBytes()); + + assertThat(result).extracting("idProp").containsExactly(two.idProp); + } + + private Root createRoot(String namePrefix) { + + return new Root(null, namePrefix, + new Intermediate(null, namePrefix + "Intermediate", new Leaf(null, namePrefix + "Leaf"), emptyList()), + singletonList(new Intermediate(null, namePrefix + "QualifiedIntermediate", null, + singletonList(new Leaf(null, namePrefix + "QualifiedLeaf"))))); + } + + private void assertIsEqualToWithNonNullIds(Root reloadedRoot1, Root root1) { + + assertThat(reloadedRoot1.id).isNotNull(); + assertThat(reloadedRoot1.name).isEqualTo(root1.name); + assertThat(reloadedRoot1.intermediate.id).isNotNull(); + assertThat(reloadedRoot1.intermediate.name).isEqualTo(root1.intermediate.name); + assertThat(reloadedRoot1.intermediates.get(0).id).isNotNull(); + assertThat(reloadedRoot1.intermediates.get(0).name).isEqualTo(root1.intermediates.get(0).name); + assertThat(reloadedRoot1.intermediate.leaf.id).isNotNull(); + assertThat(reloadedRoot1.intermediate.leaf.name).isEqualTo(root1.intermediate.leaf.name); + assertThat(reloadedRoot1.intermediates.get(0).leaves.get(0).id).isNotNull(); + assertThat(reloadedRoot1.intermediates.get(0).leaves.get(0).name) + .isEqualTo(root1.intermediates.get(0).leaves.get(0).name); + } + + private Instant createDummyBeforeAndAfterNow() { + + Instant now = Instant.now(); + + DummyEntity first = createEntity(); + Instant earlier = now.minusSeconds(1000L); + OffsetDateTime earlierPlus3 = earlier.atOffset(ZoneOffset.ofHours(3)); + first.setPointInTime(earlier); + first.offsetDateTime = earlierPlus3; + + first.setName("first"); + + DummyEntity second = createEntity(); + Instant later = now.plusSeconds(1000L); + OffsetDateTime laterPlus3 = later.atOffset(ZoneOffset.ofHours(3)); + second.setPointInTime(later); + second.offsetDateTime = laterPlus3; + second.setName("second"); + + repository.saveAll(asList(first, second)); + return now; + } + + interface DummyProjectExample { + String getName(); + } + + interface DummyEntityRepository extends CrudRepository, QueryByExampleExecutor { + + @Lock(LockMode.PESSIMISTIC_WRITE) + List findAllByName(String name); + + List findAllByNamedQuery(); + + @Query(name = "DummyEntity.customQuery") + List findAllByCustomNamedQuery(); + + List findAllByPointInTimeAfter(Instant instant); + + @Query("SELECT * FROM DUMMY_ENTITY") + List findAllWithSql(); + + @Query("SELECT * FROM DUMMY_ENTITY") + List findProjectedWithSql(Class targetType); + + List findProjectedByName(String name); + + @Query(value = "SELECT * FROM DUMMY_ENTITY", rowMapperClass = CustomRowMapper.class) + List findAllWithCustomMapper(); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE POINT_IN_TIME > :threshhold") + List after(@Param("threshhold") Instant threshhold); + + @Query("SELECT id_Prop from dummy_entity where id_Prop = :id") + DummyEntity withMissingColumn(@Param("id") Long id); + + boolean existsByNameIn(String... names); + + boolean existsByNameNotIn(String... names); + + @Query("SELECT * FROM dummy_entity WHERE id_prop = :#{myext.id}") + DummyEntity findWithSpEL(); + + boolean existsByName(String name); + + int countByName(String name); + + @Query("select unnest( :ids )") + List unnestPrimitive(@Param("ids") int[] ids); + + Page findPageByNameContains(String name, Pageable pageable); + + List findByNameContains(String name, Limit limit); + + Page findPageProjectionByName(String name, Pageable pageable); + + Slice findSliceByNameContains(String name, Pageable pageable); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE OFFSET_DATE_TIME > :threshhold") + List findByOffsetDateTime(@Param("threshhold") OffsetDateTime threshhold); + + @Modifying + @Query("UPDATE dummy_entity SET point_in_time = :start - interval '30 minutes' WHERE id_prop = :id") + void updateWithIntervalCalculation(@Param("id") Long id, @Param("start") LocalDateTime start); + + List findByFlagTrue(); + + List findByRef(int ref); + + List findByRef(AggregateReference ref); + + @Query("SELECT CAST(:hello AS CHAR(5)) FROM DUMMY_ENTITY") + @Nullable + String returnInput(@Nullable String hello); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE DIRECTION IN (:directions)") + List findByEnumTypeIn(Set directions); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE DIRECTION = :direction") + List findByEnumType(Direction direction); + + Optional findDtoByIdProp(Long idProp); + + Optional findAllArgsDtoByIdProp(Long idProp); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE (ID_PROP, NAME) IN (:tuples)") + List findByListInTuple(List tuples); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE BYTES IN (:bytes)") + List findByBytesIn(List bytes); + + @Query("SELECT * FROM DUMMY_ENTITY WHERE BYTES = :bytes") + List findByBytes(byte[] bytes); + } + + interface RootRepository extends ListCrudRepository { + List findAllByOrderByIdAsc(); + } + + interface WithDelimitedColumnRepository extends CrudRepository {} + + interface EntityWithSequenceRepository extends CrudRepository {} + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Autowired JdbcRepositoryFactory factory; + + @Bean + DummyEntityRepository dummyEntityRepository() { + return factory.getRepository(DummyEntityRepository.class); + } + + @Bean + RootRepository rootRepository() { + return factory.getRepository(RootRepository.class); + } + + @Bean + WithDelimitedColumnRepository withDelimitedColumnRepository() { + return factory.getRepository(WithDelimitedColumnRepository.class); + } + + @Bean + EntityWithSequenceRepository entityWithSequenceRepository() { + return factory.getRepository(EntityWithSequenceRepository.class); + } + + @Bean + NamedQueries namedQueries() throws IOException { + + PropertiesFactoryBean properties = new PropertiesFactoryBean(); + properties.setLocation(new ClassPathResource("META-INF/jdbc-named-queries.properties")); + properties.afterPropertiesSet(); + return new PropertiesBasedNamedQueries(properties.getObject()); + } + + @Bean + MyEventListener eventListener() { + return new MyEventListener(); + } + + @Bean + public QueryMethodEvaluationContextProvider extensionAware(List exts) { + return new ExtensionAwareQueryMethodEvaluationContextProvider(exts); + } + + @Bean + RepositoryFactoryCustomizer customizer(QueryMethodEvaluationContextProvider provider) { + return repositoryFactory -> repositoryFactory.setEvaluationContextProvider(provider); + } + + @Bean + public EvaluationContextExtension evaluationContextExtension() { + return new MyIdContextProvider(); + } + + } + + static final class Root { + + @Id private final Long id; + private final String name; + private final Intermediate intermediate; + @MappedCollection(idColumn = "ROOT_ID", keyColumn = "ROOT_KEY") private final List intermediates; + + public Root(Long id, String name, Intermediate intermediate, List intermediates) { + this.id = id; + this.name = name; + this.intermediate = intermediate; + this.intermediates = intermediates; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Intermediate getIntermediate() { + return this.intermediate; + } + + public List getIntermediates() { + return this.intermediates; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Root other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (!Objects.equals(this$name, other$name)) + return false; + final Object this$intermediate = this.getIntermediate(); + final Object other$intermediate = other.getIntermediate(); + if (!Objects.equals(this$intermediate, other$intermediate)) + return false; + final Object this$intermediates = this.getIntermediates(); + final Object other$intermediates = other.getIntermediates(); + return Objects.equals(this$intermediates, other$intermediates); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $intermediate = this.getIntermediate(); + result = result * PRIME + ($intermediate == null ? 43 : $intermediate.hashCode()); + final Object $intermediates = this.getIntermediates(); + result = result * PRIME + ($intermediates == null ? 43 : $intermediates.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryIntegrationTests.Root(id=" + this.getId() + ", name=" + this.getName() + ", intermediate=" + + this.getIntermediate() + ", intermediates=" + this.getIntermediates() + ")"; + } + } + + @Table("WITH_DELIMITED_COLUMN") + static class WithDelimitedColumn { + @Id Long id; + @Column("ORG.XTUNIT.IDENTIFIER") String identifier; + @Column("STYPE") String type; + + public Long getId() { + return this.id; + } + + public String getIdentifier() { + return this.identifier; + } + + public String getType() { + return this.type; + } + + public void setId(Long id) { + this.id = id; + } + + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + public void setType(String type) { + this.type = type; + } + } + + static final class Intermediate { + + @Id private final Long id; + private final String name; + private final Leaf leaf; + @MappedCollection(idColumn = "INTERMEDIATE_ID", keyColumn = "INTERMEDIATE_KEY") private final List leaves; + + public Intermediate(Long id, String name, Leaf leaf, List leaves) { + this.id = id; + this.name = name; + this.leaf = leaf; + this.leaves = leaves; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Leaf getLeaf() { + return this.leaf; + } + + public List getLeaves() { + return this.leaves; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Intermediate other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (!Objects.equals(this$name, other$name)) + return false; + final Object this$leaf = this.getLeaf(); + final Object other$leaf = other.getLeaf(); + if (!Objects.equals(this$leaf, other$leaf)) + return false; + final Object this$leaves = this.getLeaves(); + final Object other$leaves = other.getLeaves(); + return Objects.equals(this$leaves, other$leaves); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $leaf = this.getLeaf(); + result = result * PRIME + ($leaf == null ? 43 : $leaf.hashCode()); + final Object $leaves = this.getLeaves(); + result = result * PRIME + ($leaves == null ? 43 : $leaves.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryIntegrationTests.Intermediate(id=" + this.getId() + ", name=" + this.getName() + ", leaf=" + + this.getLeaf() + ", leaves=" + this.getLeaves() + ")"; + } + } + + static final class Leaf { + + @Id private final Long id; + private final String name; + + public Leaf(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Leaf other)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (!Objects.equals(this$id, other$id)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryIntegrationTests.Leaf(id=" + this.getId() + ", name=" + this.getName() + ")"; + } + } + + static class MyEventListener implements ApplicationListener> { + + private final List> events = new ArrayList<>(); + + @Override + public void onApplicationEvent(AbstractRelationalEvent event) { + events.add(event); + } + } + + // DATAJDBC-397 + public static class MyIdContextProvider implements EvaluationContextExtension { + @Override + public String getExtensionId() { + return "myext"; + } + + public static class ExtensionRoot { + // just public for testing purposes + public static Long ID = 1L; + + public Long getId() { + return ID; + } + } + + @Override + public Object getRootObject() { + return new ExtensionRoot(); + } + } + + private static DummyEntity createEntity() { + return createEntity("Entity Name"); + } + + private static DummyEntity createEntity(String entityName) { + return createEntity(entityName, it -> {}); + } + + private static DummyEntity createEntity(String entityName, Consumer customizer) { + + DummyEntity entity = new DummyEntity(); + entity.setName(entityName); + + customizer.accept(entity); + + return entity; + } + + static class EntityWithSequence { + + @Id + @Sequence(sequence = "ENTITY_SEQUENCE") private Long id; + + private String name; + + public EntityWithSequence(Long id, String name) { + this.id = id; + this.name = name; + } + + public EntityWithSequence(String name) { + this.name = name; + } + + public Long getId() { + return id; + } + + public String getName() { + return name; + } + } + + static class DummyEntity { + + String name; + Instant pointInTime; + OffsetDateTime offsetDateTime; + @Id private Long idProp; + boolean flag; + AggregateReference ref; + Direction direction; + byte[] bytes = new byte[] { 0, 0, 0, 0, 0, 0, 0, 0 }; + + public DummyEntity(String name) { + this.name = name; + } + + public DummyEntity() {} + + public String getName() { + return this.name; + } + + public Instant getPointInTime() { + return this.pointInTime; + } + + public OffsetDateTime getOffsetDateTime() { + return this.offsetDateTime; + } + + public Long getIdProp() { + return this.idProp; + } + + public boolean isFlag() { + return this.flag; + } + + public AggregateReference getRef() { + return this.ref; + } + + public Direction getDirection() { + return this.direction; + } + + public void setName(String name) { + this.name = name; + } + + public void setPointInTime(Instant pointInTime) { + this.pointInTime = pointInTime; + } + + public void setOffsetDateTime(OffsetDateTime offsetDateTime) { + this.offsetDateTime = offsetDateTime; + } + + public void setIdProp(Long idProp) { + this.idProp = idProp; + } + + public void setFlag(boolean flag) { + this.flag = flag; + } + + public void setRef(AggregateReference ref) { + this.ref = ref; + } + + public void setDirection(Direction direction) { + this.direction = direction; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + DummyEntity that = (DummyEntity) o; + return flag == that.flag && Objects.equals(name, that.name) && Objects.equals(pointInTime, that.pointInTime) + && Objects.equals(offsetDateTime, that.offsetDateTime) && Objects.equals(idProp, that.idProp) + && Objects.equals(ref, that.ref) && direction == that.direction; + } + + @Override + public int hashCode() { + return Objects.hash(name, pointInTime, offsetDateTime, idProp, flag, ref, direction); + } + + public void setBytes(byte[] bytes) { + this.bytes = bytes; + } + + public byte[] getBytes() { + return bytes; + } + + @Override + public String toString() { + return "DummyEntity{" + "name='" + name + '\'' + ", idProp=" + idProp + '}'; + } + } + + enum Direction { + LEFT, CENTER, RIGHT + } + + static class DummyDto { + @Id Long idProp; + String name; + AggregateReference ref; + + public DummyDto() {} + + public String getName() { + return name; + } + + public AggregateReference getRef() { + return ref; + } + } + + static class DummyAllArgsDto { + @Id Long idProp; + String name; + AggregateReference ref; + + public DummyAllArgsDto(Long idProp, String name, AggregateReference ref) { + this.idProp = idProp; + this.name = name; + this.ref = ref; + } + + public String getName() { + return name; + } + + public AggregateReference getRef() { + return ref; + } + } + + interface DummyProjection { + String getName(); + } + + static final class DtoProjection { + private final String name; + + public DtoProjection(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final DtoProjection other)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryIntegrationTests.DtoProjection(name=" + this.getName() + ")"; + } + } + + static class CustomRowMapper implements RowMapper { + + @Override + public DummyEntity mapRow(ResultSet rs, int rowNum) { + return new DummyEntity(); + } + } +} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryPropertyConversionIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryPropertyConversionIntegrationTests.java similarity index 64% rename from src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryPropertyConversionIntegrationTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryPropertyConversionIntegrationTests.java index 2dacef3e9a..1969900a22 100644 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryPropertyConversionIntegrationTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryPropertyConversionIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,8 +17,7 @@ import static java.util.Collections.*; import static org.assertj.core.api.Assertions.*; - -import lombok.Data; +import static org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature.*; import java.math.BigDecimal; import java.math.BigInteger; @@ -27,68 +26,63 @@ import java.time.ZoneOffset; import java.util.Collections; import java.util.Date; +import java.util.Set; import org.assertj.core.api.Condition; import org.assertj.core.api.SoftAssertions; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; - +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.event.BeforeSave; import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.event.BeforeConvertEvent; import org.springframework.data.repository.CrudRepository; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; /** * Tests storing and retrieving various data types that are considered essential and that might need conversion to * something the database driver can handle. * * @author Jens Schauder + * @author Thomas Lang + * @author Yunyung LEE + * @author Chirag Tailor */ -@ContextConfiguration -@Transactional +@IntegrationTest public class JdbcRepositoryPropertyConversionIntegrationTests { - @Configuration - @Import(TestConfiguration.class) - static class Config { - - @Autowired JdbcRepositoryFactory factory; - - @Bean - Class testClass() { - return JdbcRepositoryPropertyConversionIntegrationTests.class; - } + @Autowired DummyEntityRepository repository; - @Bean - DummyEntityRepository dummyEntityRepository() { - return factory.getRepository(DummyEntityRepository.class); - } + private static EntityWithColumnsRequiringConversions createDummyEntity() { - @Bean - ApplicationListener applicationListener() { - return (ApplicationListener) beforeInsert -> ((EntityWithColumnsRequiringConversions) beforeInsert - .getEntity()).setIdTimestamp(getNow()); - } + EntityWithColumnsRequiringConversions entity = new EntityWithColumnsRequiringConversions(); + entity.setSomeEnum(SomeEnum.VALUE); + entity.setBigDecimal(new BigDecimal("123456789012345678901234567890123456789012345678901234567890")); + entity.setBool(true); + // Postgres doesn't seem to be able to handle BigInts larger then a Long, since the driver reads them as Long + entity.setBigInteger(BigInteger.valueOf(Long.MAX_VALUE)); + entity.setDate(Date.from(getNow().toInstant(ZoneOffset.UTC))); + entity.setLocalDateTime(getNow()); + EntityWithColumnsRequiringConversionsRelation relation = new EntityWithColumnsRequiringConversionsRelation(); + relation.setData("DUMMY"); + entity.setRelation(singleton(relation)); + return entity; } - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - - @Autowired DummyEntityRepository repository; + // DATAJDBC-119 + private static LocalDateTime getNow() { + return LocalDateTime.now().withNano(0); + } @Test // DATAJDBC-95 + @EnabledOnFeature(SUPPORTS_HUGE_NUMBERS) public void saveAndLoadAnEntity() { EntityWithColumnsRequiringConversions entity = repository.save(createDummyEntity()); @@ -107,6 +101,7 @@ public void saveAndLoadAnEntity() { } @Test // DATAJDBC-95 + @EnabledOnFeature(SUPPORTS_HUGE_NUMBERS) public void existsById() { EntityWithColumnsRequiringConversions entity = repository.save(createDummyEntity()); @@ -115,6 +110,7 @@ public void existsById() { } @Test // DATAJDBC-95 + @EnabledOnFeature(SUPPORTS_HUGE_NUMBERS) public void findAllById() { EntityWithColumnsRequiringConversions entity = repository.save(createDummyEntity()); @@ -123,6 +119,7 @@ public void findAllById() { } @Test // DATAJDBC-95 + @EnabledOnFeature(SUPPORTS_HUGE_NUMBERS) public void deleteAll() { EntityWithColumnsRequiringConversions entity = repository.save(createDummyEntity()); @@ -133,6 +130,7 @@ public void deleteAll() { } @Test // DATAJDBC-95 + @EnabledOnFeature(SUPPORTS_HUGE_NUMBERS) public void deleteById() { EntityWithColumnsRequiringConversions entity = repository.save(createDummyEntity()); @@ -142,25 +140,6 @@ public void deleteById() { assertThat(repository.findAll()).hasSize(0); } - private static EntityWithColumnsRequiringConversions createDummyEntity() { - - EntityWithColumnsRequiringConversions entity = new EntityWithColumnsRequiringConversions(); - entity.setSomeEnum(SomeEnum.VALUE); - entity.setBigDecimal(new BigDecimal("123456789012345678901234567890123456789012345678901234567890")); - entity.setBool(true); - // Postgres doesn't seem to be able to handle BigInts larger then a Long, since the driver reads them as Long - entity.setBigInteger(BigInteger.valueOf(Long.MAX_VALUE)); - entity.setDate(Date.from(getNow().toInstant(ZoneOffset.UTC))); - entity.setLocalDateTime(getNow()); - - return entity; - } - - // DATAJDBC-119 - private static LocalDateTime getNow() { - return LocalDateTime.now().withNano(0); - } - private Condition representingTheSameAs(Date other) { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); @@ -169,29 +148,116 @@ private Condition representingTheSameAs(Date other) { return new Condition<>(date -> format.format(date).equals(expected), expected); } + enum SomeEnum { + VALUE + } + interface DummyEntityRepository extends CrudRepository {} - @Data + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + @Bean + ApplicationListener applicationListener() { + return (ApplicationListener) event -> ((EntityWithColumnsRequiringConversions) event + .getEntity()).setIdTimestamp(getNow()); + } + } + static class EntityWithColumnsRequiringConversions { + boolean bool; + SomeEnum someEnum; + BigDecimal bigDecimal; + BigInteger bigInteger; + Date date; + LocalDateTime localDateTime; // ensures conversion on id querying @Id private LocalDateTime idTimestamp; - boolean bool; + @MappedCollection(idColumn = "ID_TIMESTAMP") Set relation; - SomeEnum someEnum; + public boolean isBool() { + return this.bool; + } - BigDecimal bigDecimal; + public SomeEnum getSomeEnum() { + return this.someEnum; + } - BigInteger bigInteger; + public BigDecimal getBigDecimal() { + return this.bigDecimal; + } - Date date; + public BigInteger getBigInteger() { + return this.bigInteger; + } - LocalDateTime localDateTime; + public Date getDate() { + return this.date; + } + + public LocalDateTime getLocalDateTime() { + return this.localDateTime; + } + + public LocalDateTime getIdTimestamp() { + return this.idTimestamp; + } + + public Set getRelation() { + return this.relation; + } + + public void setBool(boolean bool) { + this.bool = bool; + } + + public void setSomeEnum(SomeEnum someEnum) { + this.someEnum = someEnum; + } + + public void setBigDecimal(BigDecimal bigDecimal) { + this.bigDecimal = bigDecimal; + } + + public void setBigInteger(BigInteger bigInteger) { + this.bigInteger = bigInteger; + } + + public void setDate(Date date) { + this.date = date; + } + + public void setLocalDateTime(LocalDateTime localDateTime) { + this.localDateTime = localDateTime; + } + + public void setIdTimestamp(LocalDateTime idTimestamp) { + this.idTimestamp = idTimestamp; + } + public void setRelation(Set relation) { + this.relation = relation; + } } - enum SomeEnum { - VALUE + // DATAJDBC-349 + static class EntityWithColumnsRequiringConversionsRelation { + String data; + + public String getData() { + return this.data; + } + + public void setData(String data) { + this.data = data; + } } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryResultSetExtractorIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryResultSetExtractorIntegrationTests.java new file mode 100644 index 0000000000..ad66908c4f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryResultSetExtractorIntegrationTests.java @@ -0,0 +1,223 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.RecoverableDataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Very simple use cases for creation and usage of {@link ResultSetExtractor}s in JdbcRepository. + * + * @author Evgeni Dimitrov + */ +@IntegrationTest +public class JdbcRepositoryResultSetExtractorIntegrationTests { + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + PersonRepository personEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(PersonRepository.class); + } + + } + + @Autowired NamedParameterJdbcTemplate template; + @Autowired PersonRepository personRepository; + + @Test // DATAJDBC-290 + public void findAllPeopleWithAdressesReturnsEmptyWhenNoneFound() { + + // NOT saving anything, so DB is empty + + assertThat(personRepository.findAllPeopleWithAddresses()).isEmpty(); + } + + @Test // DATAJDBC-290 + public void findAllPeopleWithAddressesReturnsOnePersonWithoutAddresses() { + + personRepository.save(new Person(null, "Joe", null)); + + assertThat(personRepository.findAllPeopleWithAddresses()).hasSize(1); + } + + @Test // DATAJDBC-290 + public void findAllPeopleWithAddressesReturnsOnePersonWithAddresses() { + + final String personName = "Joe"; + Person savedPerson = personRepository.save(new Person(null, personName, null)); + + String street1 = "Some Street"; + String street2 = "Some other Street"; + + MapSqlParameterSource paramsAddress1 = buildAddressParameters(savedPerson.getId(), street1); + template.update("insert into address (street, person_id) values (:street, :personId)", paramsAddress1); + + MapSqlParameterSource paramsAddress2 = buildAddressParameters(savedPerson.getId(), street2); + template.update("insert into address (street, person_id) values (:street, :personId)", paramsAddress2); + + List people = personRepository.findAllPeopleWithAddresses(); + + assertThat(people).hasSize(1); + Person person = people.get(0); + assertThat(person.getName()).isEqualTo(personName); + assertThat(person.getAddresses()).hasSize(2); + assertThat(person.getAddresses()).extracting(a -> a.getStreet()).containsExactlyInAnyOrder(street1, street2); + } + + private MapSqlParameterSource buildAddressParameters(Long id, String streetName) { + + MapSqlParameterSource params = new MapSqlParameterSource(); + params.addValue("street", streetName, Types.VARCHAR); + params.addValue("personId", id, Types.NUMERIC); + + return params; + } + + interface PersonRepository extends CrudRepository { + + @Query( + value = "select p.id, p.name, a.id addrId, a.street from person p left join address a on(p.id = a.person_id)", + resultSetExtractorClass = PersonResultSetExtractor.class) + List findAllPeopleWithAddresses(); + } + + static class Person { + + @Id + private Long id; + private String name; + private List
addresses; + + public Person(Long id, String name, List
addresses) { + this.id = id; + this.name = name; + this.addresses = addresses; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public List
getAddresses() { + return this.addresses; + } + + public void setId(Long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setAddresses(List
addresses) { + this.addresses = addresses; + } + } + + static class Address { + + @Id + private Long id; + private String street; + + public Address(Long id, String street) { + this.id = id; + this.street = street; + } + + public Long getId() { + return this.id; + } + + public String getStreet() { + return this.street; + } + + public void setId(Long id) { + this.id = id; + } + + public void setStreet(String street) { + this.street = street; + } + } + + static class PersonResultSetExtractor implements ResultSetExtractor> { + + @Override + public List extractData(ResultSet rs) throws SQLException, DataAccessException { + + Map peopleById = new HashMap<>(); + + while (rs.next()) { + + long personId = rs.getLong("id"); + Person currentPerson = peopleById.computeIfAbsent(personId, t -> { + + try { + return new Person(personId, rs.getString("name"), new ArrayList<>()); + } catch (SQLException e) { + throw new RecoverableDataAccessException("Error mapping Person", e); + } + }); + + if (currentPerson.getAddresses() == null) { + currentPerson.setAddresses(new ArrayList<>()); + } + + long addrId = rs.getLong("addrId"); + if (!rs.wasNull()) { + currentPerson.getAddresses().add(new Address(addrId, rs.getString("street"))); + } + } + + return new ArrayList<>(peopleById.values()); + } + + } +} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests.java similarity index 76% rename from src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsIntegrationTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests.java index c0f99d7e42..84c696b27b 100644 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsIntegrationTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,58 +18,68 @@ import static org.assertj.core.api.Assertions.*; import junit.framework.AssertionFailedError; -import lombok.Data; -import lombok.RequiredArgsConstructor; import java.util.HashMap; import java.util.HashSet; import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.data.annotation.Id; import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.event.BeforeConvertEvent; import org.springframework.data.repository.CrudRepository; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; /** * Very simple use cases for creation and usage of JdbcRepositories. * * @author Jens Schauder + * @author Chirag Tailor */ -@ContextConfiguration -@Transactional -public class JdbcRepositoryWithCollectionsIntegrationTests { +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests { + + static AtomicLong id = new AtomicLong(0); @Configuration @Import(TestConfiguration.class) static class Config { - @Autowired JdbcRepositoryFactory factory; - @Bean - Class testClass() { - return JdbcRepositoryWithCollectionsIntegrationTests.class; + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); } @Bean - DummyEntityRepository dummyEntityRepository() { - return factory.getRepository(DummyEntityRepository.class); + public ApplicationListener idSetting() { + + return (ApplicationListener) event -> { + + if (event.getEntity() instanceof DummyEntity dummyEntity) { + setIds(dummyEntity); + } + }; } - } - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); + private void setIds(DummyEntity dummyEntity) { + + if (dummyEntity.getId() == null) { + dummyEntity.setId(id.incrementAndGet()); + } + + } + } @Autowired NamedParameterJdbcTemplate template; @Autowired DummyEntityRepository repository; @@ -164,7 +174,7 @@ public void updateSet() { .containsExactlyInAnyOrder( // tuple(element2.id, "two changed"), // tuple(element3.id, "three") // - ); + ); Long count = template.queryForObject("select count(1) from Element", new HashMap<>(), Long.class); assertThat(count).isEqualTo(2); @@ -190,9 +200,6 @@ public void deletingWithSet() { assertThat(count).isEqualTo(0); } - - - private Element createElement(String content) { Element element = new Element(); @@ -209,20 +216,46 @@ private static DummyEntity createDummyEntity() { interface DummyEntityRepository extends CrudRepository {} - @Data static class DummyEntity { - @Id private Long id; + @Id + private Long id; String name; Set content = new HashSet<>(); + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Set getContent() { + return this.content; + } + + public void setId(Long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setContent(Set content) { + this.content = content; + } } - @RequiredArgsConstructor static class Element { - @Id private Long id; + @Id + private Long id; String content; + + public Element() { + } } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsChainHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsChainHsqlIntegrationTests.java new file mode 100644 index 0000000000..fe95e85f94 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsChainHsqlIntegrationTests.java @@ -0,0 +1,121 @@ +package org.springframework.data.jdbc.repository; + +import static org.assertj.core.api.Assertions.*; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Integration tests with collections chain. + * + * @author Yunyoung LEE + * @author Nikita Konev + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +class JdbcRepositoryWithCollectionsChainHsqlIntegrationTests { + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + + private static DummyEntity createDummyEntity() { + + DummyEntity entity = new DummyEntity(); + entity.name = "Entity Name"; + return entity; + } + + @Test // DATAJDBC-551 + void deleteByName() { + + ChildElement element1 = createChildElement("one"); + ChildElement element2 = createChildElement("two"); + + DummyEntity entity = createDummyEntity(); + entity.content.add(element1); + entity.content.add(element2); + + entity = repository.save(entity); + + assertThat(repository.deleteByName("Entity Name")).isEqualTo(1); + + assertThat(repository.findById(entity.id)).isEmpty(); + + Long count = template.queryForObject("select count(1) from grand_child_element", new HashMap<>(), Long.class); + assertThat(count).isEqualTo(0); + } + + private ChildElement createChildElement(String name) { + + ChildElement element = new ChildElement(); + element.name = name; + element.content.add(createGrandChildElement(name + "1")); + element.content.add(createGrandChildElement(name + "2")); + return element; + } + + private GrandChildElement createGrandChildElement(String content) { + + GrandChildElement element = new GrandChildElement(); + element.content = content; + return element; + } + + interface DummyEntityRepository extends CrudRepository { + long deleteByName(String name); + } + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Autowired JdbcRepositoryFactory factory; + + @Bean + Class testClass() { + return JdbcRepositoryWithCollectionsChainHsqlIntegrationTests.class; + } + + @Bean + DummyEntityRepository dummyEntityRepository() { + return factory.getRepository(DummyEntityRepository.class); + } + } + + static class DummyEntity { + + String name; + Set content = new HashSet<>(); + @Id private Long id; + + } + + static class ChildElement { + + String name; + Set content = new HashSet<>(); + @Id private Long id; + } + + static class GrandChildElement { + + String content; + @Id private Long id; + } + +} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithListsIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsIntegrationTests.java similarity index 63% rename from src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithListsIntegrationTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsIntegrationTests.java index 23607d0fe2..3bb36ed3de 100644 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithListsIntegrationTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithCollectionsIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,71 +15,51 @@ */ package org.springframework.data.jdbc.repository; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature.*; + import junit.framework.AssertionFailedError; -import lombok.Data; -import lombok.RequiredArgsConstructor; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; + +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.data.annotation.Id; import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; import org.springframework.data.jdbc.testing.TestConfiguration; import org.springframework.data.repository.CrudRepository; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import static org.assertj.core.api.Assertions.*; /** - * Very simple use cases for creation and usage of JdbcRepositories for Entities that contain {@link List}s. + * Very simple use cases for creation and usage of JdbcRepositories. * * @author Jens Schauder + * @author Thomas Lang + * @author Yunyoung LEE + * @author Nikita Konev */ -@ContextConfiguration -@Transactional -public class JdbcRepositoryWithListsIntegrationTests { +@IntegrationTest +class JdbcRepositoryWithCollectionsIntegrationTests { - @Configuration - @Import(TestConfiguration.class) - static class Config { + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; - @Autowired - JdbcRepositoryFactory factory; - - @Bean - Class testClass() { - return JdbcRepositoryWithListsIntegrationTests.class; - } + private static DummyEntity createDummyEntity() { - @Bean - DummyEntityRepository dummyEntityRepository() { - return factory.getRepository(DummyEntityRepository.class); - } + DummyEntity entity = new DummyEntity(); + entity.setName("Entity Name"); + return entity; } - @ClassRule - public static final SpringClassRule classRule = new SpringClassRule(); - @Rule - public SpringMethodRule methodRule = new SpringMethodRule(); - - @Autowired - NamedParameterJdbcTemplate template; - @Autowired - DummyEntityRepository repository; - - @Test // DATAJDBC-130 - public void saveAndLoadEmptyList() { + @Test // DATAJDBC-113 + void saveAndLoadEmptySet() { DummyEntity entity = repository.save(createDummyEntity()); @@ -92,8 +72,8 @@ public void saveAndLoadEmptyList() { .isEmpty(); } - @Test // DATAJDBC-130 - public void saveAndLoadNonEmptyList() { + @Test // DATAJDBC-113 + void saveAndLoadNonEmptySet() { Element element1 = new Element(); Element element2 = new Element(); @@ -105,7 +85,7 @@ public void saveAndLoadNonEmptyList() { entity = repository.save(entity); assertThat(entity.id).isNotNull(); - assertThat(entity.content).allMatch(v -> v.id != null); + assertThat(entity.content).allMatch(element -> element.id != null); DummyEntity reloaded = repository.findById(entity.id).orElseThrow(AssertionFailedError::new); @@ -115,8 +95,8 @@ public void saveAndLoadNonEmptyList() { .containsExactlyInAnyOrder(element1.id, element2.id); } - @Test // DATAJDBC-130 - public void findAllLoadsList() { + @Test // DATAJDBC-113 + void findAllLoadsCollection() { Element element1 = new Element(); Element element2 = new Element(); @@ -128,19 +108,18 @@ public void findAllLoadsList() { entity = repository.save(entity); assertThat(entity.id).isNotNull(); - assertThat(entity.content).allMatch(v -> v.id != null); + assertThat(entity.content).allMatch(element -> element.id != null); Iterable reloaded = repository.findAll(); - reloaded.forEach(de -> System.out.println("id " + de.id + " content " + de.content.iterator().next().content)); - assertThat(reloaded) // .extracting(e -> e.id, e -> e.content.size()) // .containsExactly(tuple(entity.id, entity.content.size())); } - @Test // DATAJDBC-130 - public void updateList() { + @Test // DATAJDBC-113 + @EnabledOnFeature(SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) + void updateSet() { Element element1 = createElement("one"); Element element2 = createElement("two"); @@ -159,27 +138,25 @@ public void updateList() { entity = repository.save(entity); assertThat(entity.id).isNotNull(); - assertThat(entity.content).allMatch(v -> v.id != null); + assertThat(entity.content).allMatch(element -> element.id != null); DummyEntity reloaded = repository.findById(entity.id).orElseThrow(AssertionFailedError::new); // the elements got properly updated and reloaded assertThat(reloaded.content) // - .isNotNull(); - - assertThat(reloaded.content) // + .isNotNull() // .extracting(e -> e.id, e -> e.content) // - .containsExactly( // + .containsExactlyInAnyOrder( // tuple(element2.id, "two changed"), // tuple(element3.id, "three") // ); - Long count = template.queryForObject("SELECT count(1) FROM Element", new HashMap<>(), Long.class); + Long count = template.queryForObject("select count(1) from Element", new HashMap<>(), Long.class); assertThat(count).isEqualTo(2); } - @Test // DATAJDBC-130 - public void deletingWithList() { + @Test // DATAJDBC-113 + void deletingWithSet() { Element element1 = createElement("one"); Element element2 = createElement("two"); @@ -194,10 +171,30 @@ public void deletingWithList() { assertThat(repository.findById(entity.id)).isEmpty(); - Long count = template.queryForObject("SELECT count(1) FROM Element", new HashMap<>(), Long.class); + Long count = template.queryForObject("select count(1) from Element", new HashMap<>(), Long.class); assertThat(count).isEqualTo(0); } + @Test // GH-771 + void deleteByName() { + + Element element1 = createElement("one"); + Element element2 = createElement("two"); + + DummyEntity entity = createDummyEntity(); + entity.content.add(element1); + entity.content.add(element2); + + entity = repository.save(entity); + + assertThat(repository.deleteByName("Entity Name")).isEqualTo(1); + + assertThat(repository.findById(entity.id)).isEmpty(); + + Long count = template.queryForObject("select count(1) from Element", new HashMap<>(), Long.class); + assertThat(count).isEqualTo(0); + } + private Element createElement(String content) { Element element = new Element(); @@ -205,32 +202,55 @@ private Element createElement(String content) { return element; } - private static DummyEntity createDummyEntity() { + interface DummyEntityRepository extends CrudRepository { + long deleteByName(String name); + } - DummyEntity entity = new DummyEntity(); - entity.setName("Entity Name"); - return entity; - } + @Configuration + @Import(TestConfiguration.class) + static class Config { - interface DummyEntityRepository extends CrudRepository { + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } } - @Data static class DummyEntity { - @Id - private Long id; String name; - List content = new ArrayList<>(); + Set content = new HashSet<>(); + @Id private Long id; + + public String getName() { + return this.name; + } + + public Set getContent() { + return this.content; + } + public Long getId() { + return this.id; + } + + public void setName(String name) { + this.name = name; + } + + public void setContent(Set content) { + this.content = content; + } + + public void setId(Long id) { + this.id = id; + } } - @RequiredArgsConstructor static class Element { - @Id - private Long id; String content; + @Id private Long id; } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithListsIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithListsIntegrationTests.java new file mode 100644 index 0000000000..100a6ae7a3 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithListsIntegrationTests.java @@ -0,0 +1,319 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature.*; + +import junit.framework.AssertionFailedError; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Very simple use cases for creation and usage of JdbcRepositories for Entities that contain {@link List}s. + * + * @author Jens Schauder + * @author Thomas Lang + * @author Chirag Tailor + */ +@IntegrationTest +public class JdbcRepositoryWithListsIntegrationTests { + + @Autowired NamedParameterJdbcTemplate template; + @Autowired DummyEntityRepository repository; + @Autowired RootRepository rootRepository; + + private static DummyEntity createDummyEntity() { + + DummyEntity entity = new DummyEntity(null, "Entity Name", new ArrayList<>()); + return entity; + } + + @Test // DATAJDBC-130 + public void saveAndLoadEmptyList() { + + DummyEntity entity = repository.save(createDummyEntity()); + + assertThat(entity.id).isNotNull(); + + DummyEntity reloaded = repository.findById(entity.id).orElseThrow(AssertionFailedError::new); + + assertThat(reloaded.content) // + .isNotNull() // + .isEmpty(); + } + + @Test // DATAJDBC-130 + public void saveAndLoadNonEmptyList() { + + Element element1 = new Element(); + Element element2 = new Element(); + + DummyEntity entity = createDummyEntity(); + entity.content.add(element1); + entity.content.add(element2); + + entity = repository.save(entity); + + assertThat(entity.id).isNotNull(); + assertThat(entity.content).allMatch(v -> v.id != null); + + DummyEntity reloaded = repository.findById(entity.id).orElseThrow(AssertionFailedError::new); + + assertThat(reloaded.content) // + .isNotNull() // + .extracting(e -> e.id) // + .containsExactlyInAnyOrder(entity.content.get(0).id, entity.content.get(1).id); + } + + @Test // GH-1159 + void saveAndLoadNonEmptyNestedList() { + Root root = new Root(); + Intermediate intermediate1 = new Intermediate(); + root.intermediates.add(intermediate1); + Intermediate intermediate2 = new Intermediate(); + root.intermediates.add(intermediate2); + Leaf leaf1 = new Leaf("leaf1"); + Leaf leaf2 = new Leaf("leaf2"); + intermediate1.leaves.addAll(asList(leaf1, leaf2)); + Leaf leaf3 = new Leaf("leaf3"); + Leaf leaf4 = new Leaf("leaf4"); + intermediate2.leaves.addAll(asList(leaf3, leaf4)); + + rootRepository.save(root); + + assertThat(root.id).isNotNull(); + assertThat(root.intermediates).allMatch(v -> v.id != null); + + Root reloaded = rootRepository.findById(root.id).orElseThrow(AssertionFailedError::new); + assertThat(reloaded.intermediates.get(0).leaves).containsExactly(leaf1, leaf2); + assertThat(reloaded.intermediates.get(1).leaves).containsExactly(leaf3, leaf4); + } + + @Test // DATAJDBC-130 + public void findAllLoadsList() { + + Element element1 = new Element(); + Element element2 = new Element(); + + DummyEntity entity = createDummyEntity(); + entity.content.add(element1); + entity.content.add(element2); + + entity = repository.save(entity); + + assertThat(entity.id).isNotNull(); + assertThat(entity.content).allMatch(v -> v.id != null); + + Iterable reloaded = repository.findAll(); + + assertThat(reloaded) // + .extracting(e -> e.id, e -> e.content.size()) // + .containsExactly(tuple(entity.id, entity.content.size())); + } + + @Test // DATAJDBC-130 + @EnabledOnFeature(SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) + public void updateList() { + + Element element1 = new Element("one"); + Element element2 = new Element("two"); + Element element3 = new Element("three"); + + DummyEntity entity = createDummyEntity(); + entity.content.add(element1); + entity.content.add(element2); + + entity = repository.save(entity); + + entity.content.remove(0); + entity.content.set(0, new Element(entity.content.get(0).id, "two changed")); + entity.content.add(element3); + + entity = repository.save(entity); + + assertThat(entity.id).isNotNull(); + assertThat(entity.content).allMatch(v -> v.id != null); + assertThat(entity.content).hasSize(2); + + DummyEntity reloaded = repository.findById(entity.id).orElseThrow(AssertionFailedError::new); + + // the elements got properly updated and reloaded + assertThat(reloaded.content) // + .isNotNull(); + + assertThat(reloaded.content) // + .extracting(e -> e.id, e -> e.content) // + .containsExactly( // + tuple(entity.content.get(0).id, "two changed"), // + tuple(entity.content.get(1).id, "three") // + ); + + Long count = template.queryForObject("SELECT count(1) FROM Element", new HashMap<>(), Long.class); + assertThat(count).isEqualTo(2); + } + + @Test // DATAJDBC-130 + public void deletingWithList() { + + Element element1 = new Element("one"); + Element element2 = new Element("two"); + + DummyEntity entity = createDummyEntity(); + entity.content.add(element1); + entity.content.add(element2); + + entity = repository.save(entity); + + repository.deleteById(entity.id); + + assertThat(repository.findById(entity.id)).isEmpty(); + + Long count = template.queryForObject("SELECT count(1) FROM Element", new HashMap<>(), Long.class); + assertThat(count).isEqualTo(0); + } + + interface DummyEntityRepository extends CrudRepository {} + + interface RootRepository extends CrudRepository {} + + @Configuration + @Import(TestConfiguration.class) + static class Config { + + @Bean + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(DummyEntityRepository.class); + } + + @Bean + RootRepository rootRepository(JdbcRepositoryFactory factory) { + return factory.getRepository(RootRepository.class); + } + } + + record DummyEntity(@Id Long id, String name, List content) { + } + + record Element(@Id Long id, String content) { + + @PersistenceCreator + Element {} + + Element() { + this(null, null); + } + + Element(String content) { + this(null, content); + } + + } + + static class Root { + @Id private Long id; + List intermediates = new ArrayList<>(); + + public Long getId() { + return this.id; + } + + public List getIntermediates() { + return this.intermediates; + } + + public void setId(Long id) { + this.id = id; + } + + public void setIntermediates(List intermediates) { + this.intermediates = intermediates; + } + } + + static class Intermediate { + @Id private Long id; + List leaves = new ArrayList<>(); + + public Long getId() { + return this.id; + } + + public List getLeaves() { + return this.leaves; + } + + public void setId(Long id) { + this.id = id; + } + + public void setLeaves(List leaves) { + this.leaves = leaves; + } + } + + static final class Leaf { + private final String name; + + public Leaf(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof final Leaf other)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + return Objects.equals(this$name, other$name); + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "JdbcRepositoryWithListsIntegrationTests.Leaf(name=" + this.getName() + ")"; + } + } +} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithMapsIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithMapsIntegrationTests.java similarity index 84% rename from src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithMapsIntegrationTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithMapsIntegrationTests.java index b5e7419108..bd1c07156e 100644 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithMapsIntegrationTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryWithMapsIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,60 +16,45 @@ package org.springframework.data.jdbc.repository; import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature.*; import junit.framework.AssertionFailedError; -import lombok.Data; -import lombok.RequiredArgsConstructor; import java.util.HashMap; import java.util.Map; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.data.annotation.Id; import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.testing.EnabledOnFeature; +import org.springframework.data.jdbc.testing.IntegrationTest; import org.springframework.data.jdbc.testing.TestConfiguration; import org.springframework.data.repository.CrudRepository; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; /** * Very simple use cases for creation and usage of JdbcRepositories for Entities that contain {@link java.util.Map}s. * * @author Jens Schauder + * @author Thomas Lang */ -@ContextConfiguration -@Transactional +@IntegrationTest public class JdbcRepositoryWithMapsIntegrationTests { @Configuration @Import(TestConfiguration.class) static class Config { - @Autowired JdbcRepositoryFactory factory; - @Bean - Class testClass() { - return JdbcRepositoryWithMapsIntegrationTests.class; - } - - @Bean - DummyEntityRepository dummyEntityRepository() { + DummyEntityRepository dummyEntityRepository(JdbcRepositoryFactory factory) { return factory.getRepository(DummyEntityRepository.class); } } - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - @Autowired NamedParameterJdbcTemplate template; @Autowired DummyEntityRepository repository; @@ -91,7 +76,9 @@ public void saveAndLoadEmptyMap() { public void saveAndLoadNonEmptyMap() { Element element1 = new Element(); + element1.content = "element 1"; Element element2 = new Element(); + element2.content = "element 2"; DummyEntity entity = createDummyEntity(); entity.content.put("one", element1); @@ -127,14 +114,13 @@ public void findAllLoadsMap() { Iterable reloaded = repository.findAll(); - reloaded.forEach(de -> System.out.println("id " + de.id + " content " + de.content.values().iterator().next().content)); - assertThat(reloaded) // .extracting(e -> e.id, e -> e.content.size()) // .containsExactly(tuple(entity.id, entity.content.size())); } @Test // DATAJDBC-131 + @EnabledOnFeature(SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES) public void updateMap() { Element element1 = createElement("one"); @@ -167,7 +153,7 @@ public void updateMap() { .containsExactlyInAnyOrder( // tuple("two", element2.id, "two changed"), // tuple("three", element3.id, "three") // - ); + ); Long count = template.queryForObject("select count(1) from Element", new HashMap<>(), Long.class); assertThat(count).isEqualTo(2); @@ -209,20 +195,43 @@ private static DummyEntity createDummyEntity() { interface DummyEntityRepository extends CrudRepository {} - @Data static class DummyEntity { @Id private Long id; String name; Map content = new HashMap<>(); + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Map getContent() { + return this.content; + } + + public void setId(Long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setContent(Map content) { + this.content = content; + } } - @RequiredArgsConstructor static class Element { @Id private Long id; String content; + + public Element() {} } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/SimpleJdbcRepositoryEventsUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/SimpleJdbcRepositoryEventsUnitTests.java new file mode 100644 index 0000000000..e15ce6b68f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/SimpleJdbcRepositoryEventsUnitTests.java @@ -0,0 +1,362 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Arrays.asList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.groups.Tuple.tuple; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.stubbing.Answer; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.jdbc.core.convert.DefaultDataAccessStrategy; +import org.springframework.data.jdbc.core.convert.DefaultJdbcTypeFactory; +import org.springframework.data.jdbc.core.convert.DelegatingDataAccessStrategy; +import org.springframework.data.jdbc.core.convert.InsertStrategyFactory; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.jdbc.core.convert.SqlGeneratorSource; +import org.springframework.data.jdbc.core.convert.SqlParametersFactory; +import org.springframework.data.jdbc.core.dialect.JdbcH2Dialect; +import org.springframework.data.jdbc.core.dialect.JdbcHsqlDbDialect; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.jdbc.repository.support.SimpleJdbcRepository; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.event.AfterConvertEvent; +import org.springframework.data.relational.core.mapping.event.AfterDeleteEvent; +import org.springframework.data.relational.core.mapping.event.AfterSaveEvent; +import org.springframework.data.relational.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.relational.core.mapping.event.BeforeDeleteEvent; +import org.springframework.data.relational.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.relational.core.mapping.event.Identifier; +import org.springframework.data.relational.core.mapping.event.RelationalEvent; +import org.springframework.data.relational.core.mapping.event.WithId; +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.repository.PagingAndSortingRepository; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.jdbc.support.KeyHolder; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + + +/** + * Unit tests for application events via {@link SimpleJdbcRepository}. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Oliver Gierke + * @author Myeonghyeon Lee + * @author Milan Milanov + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +class SimpleJdbcRepositoryEventsUnitTests { + + private static final long generatedId = 4711L; + + private final CollectingEventPublisher publisher = new CollectingEventPublisher(); + + private DummyEntityRepository repository; + private DefaultDataAccessStrategy dataAccessStrategy; + + @BeforeEach + void before() { + + RelationalMappingContext context = new JdbcMappingContext(); + NamedParameterJdbcOperations operations = createIdGeneratingOperations(); + + Dialect dialect = JdbcHsqlDbDialect.INSTANCE; + DelegatingDataAccessStrategy delegatingDataAccessStrategy = new DelegatingDataAccessStrategy(); + JdbcConverter converter = new MappingJdbcConverter(context, delegatingDataAccessStrategy, + new JdbcCustomConversions(), new DefaultJdbcTypeFactory(operations.getJdbcOperations())); + SqlGeneratorSource generatorSource = new SqlGeneratorSource(context, converter, dialect); + SqlParametersFactory sqlParametersFactory = new SqlParametersFactory(context, converter); + InsertStrategyFactory insertStrategyFactory = new InsertStrategyFactory(operations, dialect); + + this.dataAccessStrategy = spy(new DefaultDataAccessStrategy(generatorSource, context, converter, operations, + sqlParametersFactory, insertStrategyFactory)); + delegatingDataAccessStrategy.setDelegate(dataAccessStrategy); + doReturn(true).when(dataAccessStrategy).update(any(), any()); + + JdbcRepositoryFactory factory = new JdbcRepositoryFactory(dataAccessStrategy, context, converter, + JdbcH2Dialect.INSTANCE, publisher, operations); + + this.repository = factory.getRepository(DummyEntityRepository.class); + } + + @Test // DATAJDBC-99 + @SuppressWarnings("rawtypes") + void publishesEventsOnSave() { + + DummyEntity entity = new DummyEntity(23L); + + repository.save(entity); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + BeforeConvertEvent.class, // + BeforeSaveEvent.class, // + AfterSaveEvent.class // + ); + } + + @Test // DATAJDBC-99 + @SuppressWarnings("rawtypes") + void publishesEventsOnSaveMany() { + + DummyEntity entity1 = new DummyEntity(null); + DummyEntity entity2 = new DummyEntity(23L); + + repository.saveAll(asList(entity1, entity2)); + + assertThat(publisher.events) // + .extracting(RelationalEvent::getClass, e -> ((DummyEntity) e.getEntity()).getId()) // + .containsExactly( // + tuple(BeforeConvertEvent.class, null), // + tuple(BeforeSaveEvent.class, null), // + tuple(BeforeConvertEvent.class, 23L), // + tuple(BeforeSaveEvent.class, 23L), // + tuple(AfterSaveEvent.class, generatedId), // + tuple(AfterSaveEvent.class, 23L) // + ); + } + + @Test // DATAJDBC-99 + void publishesEventsOnDelete() { + + DummyEntity entity = new DummyEntity(23L); + + repository.delete(entity); + + assertThat(publisher.events).extracting( // + RelationalEvent::getClass, // + this::getEntity, // + this::getId // + ).containsExactly( // + tuple(BeforeDeleteEvent.class, entity, Identifier.of(23L)), // + tuple(AfterDeleteEvent.class, entity, Identifier.of(23L)) // + ); + } + + private Identifier getId(RelationalEvent e) { + return ((WithId) e).getId(); + } + + @Nullable + private Object getEntity(RelationalEvent e) { + return e.getEntity(); + } + + @Test // DATAJDBC-99 + @SuppressWarnings("rawtypes") + void publishesEventsOnDeleteById() { + + repository.deleteById(23L); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + BeforeDeleteEvent.class, // + AfterDeleteEvent.class // + ); + } + + @Test // DATAJDBC-197 + @SuppressWarnings("rawtypes") + void publishesEventsOnFindAll() { + + DummyEntity entity1 = new DummyEntity(42L); + DummyEntity entity2 = new DummyEntity(23L); + + doReturn(asList(entity1, entity2)).when(dataAccessStrategy).findAll(any()); + + repository.findAll(); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + AfterConvertEvent.class, // + AfterConvertEvent.class // + ); + } + + @Test // DATAJDBC-197 + @SuppressWarnings("rawtypes") + void publishesEventsOnFindAllById() { + + DummyEntity entity1 = new DummyEntity(42L); + DummyEntity entity2 = new DummyEntity(23L); + + doReturn(asList(entity1, entity2)).when(dataAccessStrategy).findAllById(any(), any()); + + repository.findAllById(asList(42L, 23L)); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + AfterConvertEvent.class, // + AfterConvertEvent.class // + ); + } + + @Test // DATAJDBC-197 + @SuppressWarnings("rawtypes") + void publishesEventsOnFindById() { + + DummyEntity entity1 = new DummyEntity(23L); + + doReturn(entity1).when(dataAccessStrategy).findById(eq(23L), any()); + + repository.findById(23L); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + AfterConvertEvent.class // + ); + } + + @Test // DATAJDBC-101 + @SuppressWarnings("rawtypes") + void publishesEventsOnFindAllSorted() { + + DummyEntity entity1 = new DummyEntity(42L); + DummyEntity entity2 = new DummyEntity(23L); + + doReturn(asList(entity1, entity2)).when(dataAccessStrategy).findAll(any(), any(Sort.class)); + + repository.findAll(Sort.by("field")); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + AfterConvertEvent.class, // + AfterConvertEvent.class // + ); + } + + @Test // DATAJDBC-101 + @SuppressWarnings("rawtypes") + void publishesEventsOnFindAllPaged() { + + DummyEntity entity1 = new DummyEntity(42L); + DummyEntity entity2 = new DummyEntity(23L); + + doReturn(asList(entity1, entity2)).when(dataAccessStrategy).findAll(any(), any(Pageable.class)); + doReturn(2L).when(dataAccessStrategy).count(any()); + + repository.findAll(PageRequest.of(0, 20)); + + assertThat(publisher.events) // + .extracting(e -> (Class) e.getClass()) // + .containsExactly( // + AfterConvertEvent.class, // + AfterConvertEvent.class // + ); + } + + private static NamedParameterJdbcOperations createIdGeneratingOperations() { + + Answer setIdInKeyHolder = invocation -> { + + HashMap keys = new HashMap<>(); + keys.put("id", generatedId); + KeyHolder keyHolder = invocation.getArgument(2); + keyHolder.getKeyList().add(keys); + + return 1; + }; + + NamedParameterJdbcOperations operations = mock(NamedParameterJdbcOperations.class); + when(operations.update(anyString(), any(SqlParameterSource.class), any(KeyHolder.class))) + .thenAnswer(setIdInKeyHolder); + when(operations.getJdbcOperations()).thenReturn(mock(JdbcOperations.class)); + return operations; + } + + interface DummyEntityRepository + extends CrudRepository, PagingAndSortingRepository {} + + static final class DummyEntity { + private final @Id Long id; + + public DummyEntity(Long id) { + this.id = id; + } + + public Long getId() { + return this.id; + } + + public DummyEntity withId(Long id) { + return this.id == id ? this : new DummyEntity(id); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + DummyEntity that = (DummyEntity) o; + + return ObjectUtils.nullSafeEquals(id, that.id); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(id); + } + + public String toString() { + return "SimpleJdbcRepositoryEventsUnitTests.DummyEntity(id=" + this.getId() + ")"; + } + + } + + static class CollectingEventPublisher implements ApplicationEventPublisher { + + List events = new ArrayList<>(); + + @Override + public void publishEvent(Object o) { + events.add((RelationalEvent) o); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests.java new file mode 100644 index 0000000000..7156cae4c9 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests.java @@ -0,0 +1,216 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.dao.DataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.repository.config.DefaultQueryMappingConfiguration; +import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +/** + * Very simple use cases for creation and usage of {@link ResultSetExtractor}s in JdbcRepository. + * + * @author Evgeni Dimitrov + * @author Hebert Coelho + */ +@IntegrationTest +public class StringBasedJdbcQueryMappingConfigurationIntegrationTests { + + private final static String CAR_MODEL = "ResultSetExtractor Car"; + private final static String VALUE_PROCESSED_BY_SERVICE = "Value Processed by Service"; + + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(value = CarRepository.class, type = FilterType.ASSIGNABLE_TYPE)) + static class Config { + + @Bean + QueryMappingConfiguration mappers() { + return new DefaultQueryMappingConfiguration(); + } + + @Bean(value = "CarResultSetExtractorBean") + public CarResultSetExtractorBean resultSetExtractorBean() { + return new CarResultSetExtractorBean(); + } + + @Bean + public CustomerService service() { + return new CustomerService(); + } + + @Bean(value = "CustomRowMapperBean") + public CustomRowMapperBean rowMapperBean() { + return new CustomRowMapperBean(); + } + + } + + public static class CarResultSetExtractorBean implements ResultSetExtractor> { + + @Autowired private CustomerService customerService; + + @Override + public List extractData(ResultSet rs) throws SQLException, DataAccessException { + return List.of(new Car(1L, customerService.process())); + } + + } + + public static class CustomRowMapperBean implements RowMapper { + + @Autowired private CustomerService customerService; + + @Override + public String mapRow(ResultSet rs, int rowNum) throws SQLException { + return customerService.process(); + } + } + + public static class CustomerService { + public String process() { + return VALUE_PROCESSED_BY_SERVICE; + } + } + + public static class Car { + + @Id private Long id; + private String model; + + public Car(Long id, String model) { + this.id = id; + this.model = model; + } + + public Long getId() { + return this.id; + } + + public String getModel() { + return this.model; + } + + public void setId(Long id) { + this.id = id; + } + + public void setModel(String model) { + this.model = model; + } + } + + static class CarResultSetExtractor implements ResultSetExtractor> { + + @Override + public List extractData(ResultSet rs) throws SQLException, DataAccessException { + return singletonList(new Car(1L, CAR_MODEL)); + } + } + + public static class RowMapperResultSetExtractor implements ResultSetExtractor { + + final RowMapper rowMapper; + + public RowMapperResultSetExtractor(RowMapper rowMapper) { + this.rowMapper = rowMapper; + } + + @Override + public RowMapper extractData(ResultSet rs) throws SQLException, DataAccessException { + return rowMapper; + } + } + + interface CarRepository extends CrudRepository { + + @Query(value = "select * from car", resultSetExtractorClass = CarResultSetExtractor.class) + List customFindAll(); + + @Query(value = "select * from car", resultSetExtractorRef = "CarResultSetExtractorBean") + List findByNameWithResultSetExtractor(); + + @Query(value = "select model from car", rowMapperRef = "CustomRowMapperBean") + List findByNameWithRowMapperBean(); + + @Query(value = "select * from car", resultSetExtractorClass = RowMapperResultSetExtractor.class) + RowMapper customFindAllWithRowMapper(); + + } + + @Autowired NamedParameterJdbcTemplate template; + @Autowired CarRepository carRepository; + + @Test // DATAJDBC-290 + void customFindAllCarsUsesConfiguredResultSetExtractor() { + + carRepository.save(new Car(null, "Some model")); + Iterable cars = carRepository.customFindAll(); + + assertThat(cars).hasSize(1); + assertThat(cars).allMatch(car -> CAR_MODEL.equals(car.getModel())); + } + + @Test // DATAJDBC-430 + void customFindWithRowMapperBeanSupportingInjection() { + + carRepository.save(new Car(null, "Some model")); + List names = carRepository.findByNameWithRowMapperBean(); + + assertThat(names).hasSize(1); + assertThat(names).allMatch(name -> VALUE_PROCESSED_BY_SERVICE.equals(name)); + } + + @Test // DATAJDBC-430 + void customFindWithResultSetExtractorBeanSupportingInjection() { + + carRepository.save(new Car(null, "Some model")); + Iterable cars = carRepository.findByNameWithResultSetExtractor(); + + assertThat(cars).hasSize(1); + assertThat(cars).allMatch(car -> VALUE_PROCESSED_BY_SERVICE.equals(car.getModel())); + } + + @Test // DATAJDBC-620 + void defaultRowMapperGetsInjectedIntoCustomResultSetExtractor() { + + RowMapper rowMapper = carRepository.customFindAllWithRowMapper(); + + assertThat(rowMapper).isNotNull(); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/AbstractJdbcConfigurationIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/AbstractJdbcConfigurationIntegrationTests.java new file mode 100644 index 0000000000..9c8ee97388 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/AbstractJdbcConfigurationIntegrationTests.java @@ -0,0 +1,222 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.function.Consumer; + +import org.junit.jupiter.api.Test; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.jdbc.core.JdbcAggregateTemplate; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.JdbcCustomConversions; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.relational.RelationalManagedTypes; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.LimitClause; +import org.springframework.data.relational.core.dialect.LockClause; +import org.springframework.data.relational.core.sql.render.SelectRenderContext; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Integration tests for {@link AbstractJdbcConfiguration}. + * + * @author Oliver Drotbohm + * @author Mark Paluch + */ +class AbstractJdbcConfigurationIntegrationTests { + + @Test // DATAJDBC-395 + void configuresInfrastructureComponents() { + + assertApplicationContext(context -> { + + List> expectedBeanTypes = asList(DataAccessStrategy.class, // + JdbcMappingContext.class, // + JdbcConverter.class, // + JdbcCustomConversions.class, // + JdbcAggregateTemplate.class); + + expectedBeanTypes.stream() // + .map(context::getBean) // + .forEach(it -> assertThat(it).isNotNull()); + + }, AbstractJdbcConfigurationUnderTest.class, Infrastructure.class); + } + + @Test // GH-975 + void registersSimpleTypesFromCustomConversions() { + + assertApplicationContext(context -> { + + JdbcMappingContext mappingContext = context.getBean(JdbcMappingContext.class); + assertThat( // + mappingContext.getPersistentEntity(AbstractJdbcConfigurationUnderTest.Blah.class) // + ).describedAs("Blah should not be an entity, since there is a WritingConversion configured for it") // + .isNull(); + + }, AbstractJdbcConfigurationUnderTest.class, Infrastructure.class); + } + + @Test // GH-908 + void userProvidedConversionsOverwriteDialectSpecificConversions() { + + assertApplicationContext(applicationContext -> { + + Optional> customWriteTarget = applicationContext.getBean(JdbcCustomConversions.class) + .getCustomWriteTarget(Boolean.class); + + assertThat(customWriteTarget).contains(String.class); + + }, AbstractJdbcConfigurationUnderTest.class, Infrastructure.class); + } + + @Test // GH-1269 + void detectsInitialEntities() { + + assertApplicationContext(context -> { + + JdbcMappingContext mappingContext = context.getBean(JdbcMappingContext.class); + RelationalManagedTypes managedTypes = (RelationalManagedTypes) ReflectionTestUtils.getField(mappingContext, + "managedTypes"); + + assertThat(managedTypes.toList()).contains(JdbcRepositoryConfigExtensionUnitTests.Sample.class, + TopLevelEntity.class); + + }, AbstractJdbcConfigurationUnderTest.class, Infrastructure.class); + } + + static void assertApplicationContext(Consumer verification, + Class... configurationClasses) { + + try (AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext()) { + + context.register(configurationClasses); + context.refresh(); + + verification.accept(context); + } + } + + @Configuration + static class Infrastructure { + + @Bean + public NamedParameterJdbcOperations jdbcOperations() { + + JdbcOperations jdbcOperations = mock(JdbcOperations.class); + return new NamedParameterJdbcTemplate(jdbcOperations); + } + } + + static class AbstractJdbcConfigurationUnderTest extends AbstractJdbcConfiguration { + + @Override + @Bean + public Dialect jdbcDialect(NamedParameterJdbcOperations operations) { + return new DummyDialect(); + } + + @Override + protected List userConverters() { + return asList(Blah2BlubbConverter.INSTANCE, BooleanToYnConverter.INSTANCE); + } + + @WritingConverter + enum Blah2BlubbConverter implements Converter { + INSTANCE; + + @Override + public Blubb convert(Blah blah) { + return new Blubb(); + } + } + + private static class Blah {} + + private static class Blubb {} + + private static class DummyDialect implements Dialect { + @Override + public LimitClause limit() { + return null; + } + + @Override + public LockClause lock() { + return null; + } + + @Override + public SelectRenderContext getSelectContext() { + return null; + } + + @Override + public Collection getConverters() { + return asList(BooleanToNumberConverter.INSTANCE, NumberToBooleanConverter.INSTANCE); + } + } + + @WritingConverter + enum BooleanToNumberConverter implements Converter { + INSTANCE; + + @Override + public Number convert(Boolean source) { + return source ? 1 : 0; + } + } + + @ReadingConverter + enum NumberToBooleanConverter implements Converter { + INSTANCE; + + @Override + public Boolean convert(Number source) { + return source.intValue() == 0; + } + } + + @WritingConverter + enum BooleanToYnConverter implements Converter { + INSTANCE; + + @Override + public String convert(Boolean source) { + return source ? "Y" : "N"; + } + } + + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/ConfigurableRowMapperMapUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/ConfigurableRowMapperMapUnitTests.java new file mode 100644 index 0000000000..7f0246f438 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/ConfigurableRowMapperMapUnitTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.jdbc.core.RowMapper; + +/** + * Unit tests for {@link DefaultQueryMappingConfiguration}. + * + * @author Jens Schauder + * @author Evgeni Dimitrov + */ +public class ConfigurableRowMapperMapUnitTests { + + @Test // DATAJDBC-166 + public void freshInstanceReturnsNull() { + + QueryMappingConfiguration map = new DefaultQueryMappingConfiguration(); + + assertThat(map.getRowMapper(Object.class)).isNull(); + } + + @Test // DATAJDBC-166 + public void returnsConfiguredInstanceForClass() { + + RowMapper rowMapper = mock(RowMapper.class); + + QueryMappingConfiguration map = new DefaultQueryMappingConfiguration().registerRowMapper(Object.class, rowMapper); + + assertThat(map.getRowMapper(Object.class)).isEqualTo(rowMapper); + } + + @Test // DATAJDBC-166 + public void returnsNullForClassNotConfigured() { + + RowMapper rowMapper = mock(RowMapper.class); + + QueryMappingConfiguration map = new DefaultQueryMappingConfiguration().registerRowMapper(Number.class, rowMapper); + + assertThat(map.getRowMapper(Integer.class)).isNull(); + assertThat(map.getRowMapper(String.class)).isNull(); + } + + @Test // DATAJDBC-166 + public void returnsInstanceRegisteredForSubClass() { + + RowMapper rowMapper = mock(RowMapper.class); + + QueryMappingConfiguration map = new DefaultQueryMappingConfiguration().registerRowMapper(String.class, rowMapper); + + assertThat(map.getRowMapper(Object.class)).isEqualTo(rowMapper); + } + + @Test // DATAJDBC-166 + public void prefersExactTypeMatchClass() { + + RowMapper rowMapper = mock(RowMapper.class); + + QueryMappingConfiguration map = new DefaultQueryMappingConfiguration() // + .registerRowMapper(Object.class, mock(RowMapper.class)) // + .registerRowMapper(Integer.class, rowMapper) // + .registerRowMapper(Number.class, mock(RowMapper.class)); + + assertThat(map.getRowMapper(Integer.class)).isEqualTo(rowMapper); + } + + @Test // DATAJDBC-166 + public void prefersLatestRegistrationForSuperTypeMatch() { + + RowMapper rowMapper = mock(RowMapper.class); + + QueryMappingConfiguration map = new DefaultQueryMappingConfiguration() // + .registerRowMapper(Integer.class, mock(RowMapper.class)) // + .registerRowMapper(Number.class, rowMapper); + + assertThat(map.getRowMapper(Object.class)).isEqualTo(rowMapper); + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcAuditingHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcAuditingHsqlIntegrationTests.java new file mode 100644 index 0000000000..04c7b52361 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcAuditingHsqlIntegrationTests.java @@ -0,0 +1,519 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.*; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import org.junit.jupiter.api.Test; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.Primary; +import org.springframework.data.annotation.CreatedBy; +import org.springframework.data.annotation.CreatedDate; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedBy; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.auditing.DateTimeProvider; +import org.springframework.data.domain.AuditorAware; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestClass; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.relational.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.repository.CrudRepository; +import org.springframework.stereotype.Component; + +/** + * Tests the {@link EnableJdbcAuditing} annotation. + * + * @author Kazuki Shimizu + * @author Jens Schauder + * @author Salim Achouche + */ +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) +public class EnableJdbcAuditingHsqlIntegrationTests { + + @Test // DATAJDBC-204 + public void auditForAnnotatedEntity() { + + configureRepositoryWith( // + AuditingAnnotatedDummyEntityRepository.class, // + Config.class, // + AuditingConfiguration.class) // + .accept(repository -> { + + AuditingConfiguration.currentAuditor = "user01"; + LocalDateTime now = LocalDateTime.now(); + + AuditingAnnotatedDummyEntity entity = repository.save(new AuditingAnnotatedDummyEntity()); + + assertThat(entity.id).as("id not null").isNotNull(); + assertThat(entity.getCreatedBy()).as("created by set").isEqualTo("user01"); + assertThat(entity.getCreatedDate()).as("created date set").isAfter(now); + assertThat(entity.getLastModifiedBy()).as("modified by set").isEqualTo("user01"); + assertThat(entity.getLastModifiedDate()).as("modified date set") + .isAfterOrEqualTo(entity.getCreatedDate()); + assertThat(entity.getLastModifiedDate()).as("modified date after instance creation").isAfter(now); + + AuditingAnnotatedDummyEntity reloaded = repository.findById(entity.id).get(); + + assertThat(reloaded.getCreatedBy()).as("reload created by").isNotNull(); + assertThat(reloaded.getCreatedDate()).as("reload created date").isNotNull(); + assertThat(reloaded.getLastModifiedBy()).as("reload modified by").isNotNull(); + assertThat(reloaded.getLastModifiedDate()).as("reload modified date").isNotNull(); + + LocalDateTime beforeCreatedDate = entity.getCreatedDate(); + LocalDateTime beforeLastModifiedDate = entity.getLastModifiedDate(); + + sleepMillis(10); + + AuditingConfiguration.currentAuditor = "user02"; + + entity = repository.save(entity); + + assertThat(entity.getCreatedBy()).as("created by unchanged").isEqualTo("user01"); + assertThat(entity.getCreatedDate()).as("created date unchanged").isEqualTo(beforeCreatedDate); + assertThat(entity.getLastModifiedBy()).as("modified by updated").isEqualTo("user02"); + assertThat(entity.getLastModifiedDate()).as("modified date updated") + .isAfter(beforeLastModifiedDate); + + reloaded = repository.findById(entity.id).get(); + + assertThat(reloaded.getCreatedBy()).as("2. reload created by").isNotNull(); + assertThat(reloaded.getCreatedDate()).as("2. reload created date").isNotNull(); + assertThat(reloaded.getLastModifiedBy()).as("2. reload modified by").isNotNull(); + assertThat(reloaded.getLastModifiedDate()).as("2. reload modified date").isNotNull(); + }); + } + + @Test // DATAJDBC-204 + public void noAnnotatedEntity() { + + configureRepositoryWith( // + DummyEntityRepository.class, // + Config.class, // + AuditingConfiguration.class) // + .accept(repository -> { + + DummyEntity entity = repository.save(new DummyEntity()); + + assertThat(entity.id).isNotNull(); + assertThat(repository.findById(entity.id).get()).isEqualTo(entity); + + entity = repository.save(entity); + + assertThat(repository.findById(entity.id)).contains(entity); + }); + } + + @Test // DATAJDBC-204 + public void customDateTimeProvider() { + + configureRepositoryWith( // + AuditingAnnotatedDummyEntityRepository.class, // + Config.class, // + CustomizeAuditorAwareAndDateTimeProvider.class) // + .accept(repository -> { + + LocalDateTime currentDateTime = LocalDate.of(2018, 4, 14).atStartOfDay(); + CustomizeAuditorAwareAndDateTimeProvider.currentDateTime = currentDateTime; + + AuditingAnnotatedDummyEntity entity = repository.save(new AuditingAnnotatedDummyEntity()); + + assertThat(entity.id).isNotNull(); + assertThat(entity.getCreatedBy()).isEqualTo("custom user"); + assertThat(entity.getCreatedDate()).isEqualTo(currentDateTime); + assertThat(entity.getLastModifiedBy()).isNull(); + assertThat(entity.getLastModifiedDate()).isNull(); + }); + } + + @Test // DATAJDBC-204 + public void customAuditorAware() { + + configureRepositoryWith( // + AuditingAnnotatedDummyEntityRepository.class, // + Config.class, // + CustomizeAuditorAware.class) // + .accept(repository -> { + + AuditingAnnotatedDummyEntity entity = repository.save(new AuditingAnnotatedDummyEntity()); + + assertThat(entity.id).isNotNull(); + assertThat(entity.getCreatedBy()).isEqualTo("user"); + assertThat(entity.getCreatedDate()).isNull(); + assertThat(entity.getLastModifiedBy()).isEqualTo("user"); + assertThat(entity.getLastModifiedDate()).isNull(); + }); + } + + @Test // DATAJDBC-390 + public void auditingListenerTriggersBeforeDefaultListener() { + + configureRepositoryWith( // + AuditingAnnotatedDummyEntityRepository.class, // + Config.class, // + AuditingConfiguration.class, // + OrderAssertingEventListener.class, // + OrderAssertingCallback.class // + ) // + .accept(repository -> { + + AuditingAnnotatedDummyEntity entity = repository.save(new AuditingAnnotatedDummyEntity()); + + assertThat(entity.id).isNotNull(); + }); + } + + + @Test // DATAJDBC-1694 + public void auditEmbeddedRecord() { + + configureRepositoryWith( // + DummyEntityWithEmbeddedRecordRepository.class, // + Config.class, // + AuditingConfiguration.class) // + .accept(repository -> { + + AuditingConfiguration.currentAuditor = "user01"; + LocalDateTime now = LocalDateTime.now(); + + DummyEntityWithEmbeddedRecord entity = repository.save(new DummyEntityWithEmbeddedRecord(null, new EmbeddedAuditing(null, null, null, null))); + + assertThat(entity.id).as("id not null").isNotNull(); + assertThat(entity.auditing.createdBy).as("created by set").isEqualTo("user01"); + assertThat(entity.auditing.createdDate()).as("created date set").isAfter(now); + assertThat(entity.auditing.lastModifiedBy()).as("modified by set").isEqualTo("user01"); + assertThat(entity.auditing.lastModifiedDate()).as("modified date set") + .isAfterOrEqualTo(entity.auditing.createdDate()); + assertThat(entity.auditing.lastModifiedDate()).as("modified date after instance creation").isAfter(now); + + DummyEntityWithEmbeddedRecord reloaded = repository.findById(entity.id).get(); + + assertThat(reloaded.auditing.createdBy()).as("reload created by").isNotNull(); + assertThat(reloaded.auditing.createdDate()).as("reload created date").isNotNull(); + assertThat(reloaded.auditing.lastModifiedBy()).as("reload modified by").isNotNull(); + assertThat(reloaded.auditing.lastModifiedDate()).as("reload modified date").isNotNull(); + + LocalDateTime beforeCreatedDate = entity.auditing().createdDate; + LocalDateTime beforeLastModifiedDate = entity.auditing().lastModifiedDate; + + sleepMillis(10); + + AuditingConfiguration.currentAuditor = "user02"; + + entity = repository.save(entity); + + assertThat(entity.auditing.createdBy()).as("created by unchanged").isEqualTo("user01"); + assertThat(entity.auditing.createdDate()).as("created date unchanged").isEqualTo(beforeCreatedDate); + assertThat(entity.auditing.lastModifiedBy()).as("modified by updated").isEqualTo("user02"); + assertThat(entity.auditing.lastModifiedDate()).as("modified date updated") + .isAfter(beforeLastModifiedDate); + + reloaded = repository.findById(entity.id).get(); + + assertThat(reloaded.auditing.createdBy()).as("2. reload created by").isNotNull(); + assertThat(reloaded.auditing.createdDate()).as("2. reload created date").isNotNull(); + assertThat(reloaded.auditing.lastModifiedBy()).as("2. reload modified by").isNotNull(); + assertThat(reloaded.auditing.lastModifiedDate()).as("2. reload modified date").isNotNull(); + }); + } + @Test // DATAJDBC-1694 + public void auditEmbeddedNullRecordStaysNull() { + + configureRepositoryWith( // + DummyEntityWithEmbeddedRecordRepository.class, // + Config.class, // + AuditingConfiguration.class) // + .accept(repository -> { + + AuditingConfiguration.currentAuditor = "user01"; + + DummyEntityWithEmbeddedRecord entity = repository.save(new DummyEntityWithEmbeddedRecord(null, null)); + + assertThat(entity.id).as("id not null").isNotNull(); + assertThat(entity.auditing).isNull(); + + DummyEntityWithEmbeddedRecord reloaded = repository.findById(entity.id).get(); + + assertThat(reloaded.auditing).isNull(); + }); + } + + + /** + * Usage looks like this: + *

+ * {@code configure(MyRepository.class, MyConfiguration) .accept(repository -> { // perform tests on repository here + * }); } + * + * @param repositoryType the type of repository you want to perform tests on. + * @param configurationClasses the classes containing the configuration for the + * {@link org.springframework.context.ApplicationContext}. + * @param type of the entity managed by the repository. + * @param type of the repository. + * @return a Consumer for repositories of type {@code R}. + */ + private > Consumer> configureRepositoryWith(Class repositoryType, + Class... configurationClasses) { + + return (Consumer test) -> { + + try (ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(configurationClasses)) { + + test.accept(context.getBean(repositoryType)); + } + }; + } + + private void sleepMillis(int timeout) { + + try { + TimeUnit.MILLISECONDS.sleep(timeout); + } catch (InterruptedException e) { + + throw new RuntimeException("Failed to sleep", e); + } + } + + interface AuditingAnnotatedDummyEntityRepository extends CrudRepository { + } + + static class AuditingAnnotatedDummyEntity { + + @Id + long id; + @CreatedBy + String createdBy; + @CreatedDate + LocalDateTime createdDate; + @LastModifiedBy + String lastModifiedBy; + @LastModifiedDate + LocalDateTime lastModifiedDate; + + public long getId() { + return this.id; + } + + public String getCreatedBy() { + return this.createdBy; + } + + public LocalDateTime getCreatedDate() { + return this.createdDate; + } + + public String getLastModifiedBy() { + return this.lastModifiedBy; + } + + public LocalDateTime getLastModifiedDate() { + return this.lastModifiedDate; + } + + public void setId(long id) { + this.id = id; + } + + public void setCreatedBy(String createdBy) { + this.createdBy = createdBy; + } + + public void setCreatedDate(LocalDateTime createdDate) { + this.createdDate = createdDate; + } + + public void setLastModifiedBy(String lastModifiedBy) { + this.lastModifiedBy = lastModifiedBy; + } + + public void setLastModifiedDate(LocalDateTime lastModifiedDate) { + this.lastModifiedDate = lastModifiedDate; + } + } + + interface DummyEntityRepository extends CrudRepository { + } + + static class DummyEntity { + + @Id + private Long id; + // not actually used, exists just to avoid empty value list during insert. + String name; + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(Long id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + DummyEntity that = (DummyEntity) o; + return Objects.equals(id, that.id) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + } + + record DummyEntityWithEmbeddedRecord( + @Id Long id, + @Embedded.Nullable EmbeddedAuditing auditing + ) { + } + + record EmbeddedAuditing( + @CreatedBy String createdBy, + @CreatedDate LocalDateTime createdDate, + @LastModifiedBy String lastModifiedBy, + @LastModifiedDate LocalDateTime lastModifiedDate + ) { + } + interface DummyEntityWithEmbeddedRecordRepository extends CrudRepository { + } + + @Configuration + @EnableJdbcRepositories(considerNestedRepositories = true) + @Import(TestConfiguration.class) + static class Config { + + @Bean + TestClass testClass() { + return TestClass.of(EnableJdbcAuditingHsqlIntegrationTests.class); + } + + @Bean + NamingStrategy namingStrategy() { + + return new NamingStrategy() { + + @Override + public String getTableName(Class type) { + return "DummyEntity"; + } + }; + } + } + + @EnableJdbcAuditing + static class AuditingConfiguration { + static String currentAuditor; + + @Bean + AuditorAware auditorAware() { + return () -> Optional.ofNullable(currentAuditor); + } + } + + @EnableJdbcAuditing(auditorAwareRef = "customAuditorAware", dateTimeProviderRef = "customDateTimeProvider", + modifyOnCreate = false) + static class CustomizeAuditorAwareAndDateTimeProvider { + static LocalDateTime currentDateTime; + + @Bean + @Primary + AuditorAware auditorAware() { + return () -> Optional.of("default user"); + } + + @Bean + AuditorAware customAuditorAware() { + return () -> Optional.of("custom user"); + } + + @Bean + DateTimeProvider customDateTimeProvider() { + return () -> Optional.ofNullable(currentDateTime); + } + } + + @EnableJdbcAuditing(setDates = false) + static class CustomizeAuditorAware { + + @Bean + AuditorAware auditorAware() { + return () -> Optional.of("user"); + } + } + + /** + * An event listener asserting that it is running after {@link AuditingConfiguration#auditorAware()} was invoked and + * set the auditing data. + */ + @Component + static class OrderAssertingEventListener implements ApplicationListener { + + @Override + public void onApplicationEvent(BeforeSaveEvent event) { + + Object entity = event.getEntity(); + assertThat(entity).isInstanceOf(AuditingAnnotatedDummyEntity.class); + assertThat(((AuditingAnnotatedDummyEntity) entity).createdDate).isNotNull(); + } + } + + /** + * An event listener asserting that it is running after {@link AuditingConfiguration#auditorAware()} was invoked and + * set the auditing data. + */ + @Component + static class OrderAssertingCallback implements BeforeConvertCallback { + + @Override + public Object onBeforeConvert(Object entity) { + + assertThat(entity).isInstanceOf(AuditingAnnotatedDummyEntity.class); + assertThat(((AuditingAnnotatedDummyEntity) entity).createdDate).isNotNull(); + + return entity; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests.java new file mode 100644 index 0000000000..db06720206 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.repository.CrudRepository; + +/** + * Tests the {@link EnableJdbcRepositories} annotation. + * + * @author Jens Schauder + */ +@IntegrationTest +public class EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests { + + @Autowired DummyRepository repository; + + @Test // DATAJDBC-622 + public void missingTransactionManagerCausesException() { + assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() -> repository.findAll()); + } + + interface DummyRepository extends CrudRepository { + + } + + static class DummyEntity { + @Id private Long id; + + public Long getId() { + return this.id; + } + + public void setId(Long id) { + this.id = id; + } + } + + @Configuration + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = DummyRepository.class), + transactionManagerRef = "no-such-transaction-manager") + @Import(TestConfiguration.class) + static class Config { + + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesIntegrationTests.java new file mode 100644 index 0000000000..6cb7524d2c --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesIntegrationTests.java @@ -0,0 +1,247 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Field; +import java.util.Optional; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.Import; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.JdbcAggregateTemplate; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.DataAccessStrategyFactory; +import org.springframework.data.jdbc.core.convert.InsertStrategyFactory; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.SqlGeneratorSource; +import org.springframework.data.jdbc.core.convert.SqlParametersFactory; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactoryBean; +import org.springframework.data.jdbc.testing.IntegrationTest; +import org.springframework.data.jdbc.testing.TestConfiguration; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.util.ReflectionUtils; + +/** + * Tests the {@link EnableJdbcRepositories} annotation. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Evgeni Dimitrov + * @author Fei Dong + * @author Chirag Tailor + * @author Diego Krupitza + */ +@IntegrationTest +public class EnableJdbcRepositoriesIntegrationTests { + + static final Field MAPPER_MAP = ReflectionUtils.findField(JdbcRepositoryFactoryBean.class, + "queryMappingConfiguration"); + static final Field OPERATIONS = ReflectionUtils.findField(JdbcRepositoryFactoryBean.class, "operations"); + static final Field DATA_ACCESS_STRATEGY = ReflectionUtils.findField(JdbcRepositoryFactoryBean.class, + "dataAccessStrategy"); + public static final RowMapper DUMMY_ENTITY_ROW_MAPPER = mock(RowMapper.class); + public static final RowMapper STRING_ROW_MAPPER = mock(RowMapper.class); + + @Autowired JdbcRepositoryFactoryBean factoryBean; + @Autowired DummyRepository repository; + @Autowired + @Qualifier("namedParameterJdbcTemplate") NamedParameterJdbcOperations defaultOperations; + @Autowired + @Qualifier("defaultDataAccessStrategy") DataAccessStrategy defaultDataAccessStrategy; + @Autowired + @Qualifier("qualifierJdbcOperations") NamedParameterJdbcOperations qualifierJdbcOperations; + @Autowired + @Qualifier("qualifierDataAccessStrategy") DataAccessStrategy qualifierDataAccessStrategy; + + @BeforeAll + public static void setup() { + + MAPPER_MAP.setAccessible(true); + OPERATIONS.setAccessible(true); + DATA_ACCESS_STRATEGY.setAccessible(true); + } + + @Test // DATAJDBC-100 + public void repositoryGetsPickedUp() { + + assertThat(repository).isNotNull(); + + long count = repository.count(); + + // the custom base class has a result of 23 hard wired. + assertThat(count).isEqualTo(23L); + } + + @Test // DATAJDBC-166 + public void customRowMapperConfigurationGetsPickedUp() { + + QueryMappingConfiguration mapping = (QueryMappingConfiguration) ReflectionUtils.getField(MAPPER_MAP, factoryBean); + + assertThat(mapping.getRowMapper(String.class)).isEqualTo(STRING_ROW_MAPPER); + assertThat(mapping.getRowMapper(DummyEntity.class)).isEqualTo(DUMMY_ENTITY_ROW_MAPPER); + } + + @Test // DATAJDBC-293 + public void jdbcOperationsRef() { + + NamedParameterJdbcOperations operations = (NamedParameterJdbcOperations) ReflectionUtils.getField(OPERATIONS, + factoryBean); + assertThat(operations).isNotSameAs(defaultOperations).isSameAs(qualifierJdbcOperations); + + DataAccessStrategy dataAccessStrategy = (DataAccessStrategy) ReflectionUtils.getField(DATA_ACCESS_STRATEGY, + factoryBean); + assertThat(dataAccessStrategy).isNotSameAs(defaultDataAccessStrategy).isSameAs(qualifierDataAccessStrategy); + } + + interface DummyRepository extends CrudRepository { + + } + + static class DummyEntity { + @Id private Long id; + + public Long getId() { + return this.id; + } + + public void setId(Long id) { + this.id = id; + } + } + + @Configuration + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = DummyRepository.class), + jdbcOperationsRef = "qualifierJdbcOperations", dataAccessStrategyRef = "qualifierDataAccessStrategy", + repositoryBaseClass = DummyRepositoryBaseClass.class) + @Import(TestConfiguration.class) + static class Config { + + @Bean + QueryMappingConfiguration rowMappers() { + + return new DefaultQueryMappingConfiguration() // + .registerRowMapper(DummyEntity.class, DUMMY_ENTITY_ROW_MAPPER) // + .registerRowMapper(String.class, STRING_ROW_MAPPER); + } + + @Bean("qualifierJdbcOperations") + NamedParameterJdbcOperations qualifierJdbcOperations(DataSource dataSource) { + return new NamedParameterJdbcTemplate(dataSource); + } + + @Bean("qualifierDataAccessStrategy") + DataAccessStrategy defaultDataAccessStrategy( + @Qualifier("namedParameterJdbcTemplate") NamedParameterJdbcOperations template, + RelationalMappingContext context, JdbcConverter converter, Dialect dialect) { + return new DataAccessStrategyFactory(new SqlGeneratorSource(context, converter, dialect), converter, template, + new SqlParametersFactory(context, converter), + new InsertStrategyFactory(template, dialect)).create(); + } + + @Bean + Dialect jdbcDialect(@Qualifier("qualifierJdbcOperations") NamedParameterJdbcOperations operations) { + return DialectResolver.getDialect(operations.getJdbcOperations()); + } + } + + private static class DummyRepositoryBaseClass implements CrudRepository { + + DummyRepositoryBaseClass(JdbcAggregateTemplate template, PersistentEntity persistentEntity, + JdbcConverter converter) { + + } + + @Override + public S save(S s) { + return null; + } + + @Override + public Iterable saveAll(Iterable iterable) { + return null; + } + + @Override + public Optional findById(ID id) { + return Optional.empty(); + } + + @Override + public boolean existsById(ID id) { + return false; + } + + @Override + public Iterable findAll() { + return null; + } + + @Override + public Iterable findAllById(Iterable iterable) { + return null; + } + + @Override + public long count() { + return 23; + } + + @Override + public void deleteById(ID id) { + + } + + @Override + public void delete(T t) { + + } + + @Override + public void deleteAll(Iterable iterable) { + + } + + @Override + public void deleteAll() { + + } + + @Override + public void deleteAllById(Iterable ids) { + + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrarUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrarUnitTests.java new file mode 100644 index 0000000000..260e5fdc92 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/JdbcRepositoriesRegistrarUnitTests.java @@ -0,0 +1,91 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.annotation.AnnotationBeanNameGenerator; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.data.repository.CrudRepository; + +/** + * @author Christoph Strobl + */ +public class JdbcRepositoriesRegistrarUnitTests { + + private BeanDefinitionRegistry registry; + + @BeforeEach + void setUp() { + registry = new DefaultListableBeanFactory(); + } + + @ParameterizedTest // GH-1853 + @MethodSource(value = { "args" }) + void configuresRepositoriesCorrectly(AnnotationMetadata metadata, String[] beanNames) { + + JdbcRepositoriesRegistrar registrar = new JdbcRepositoriesRegistrar(); + registrar.setResourceLoader(new DefaultResourceLoader()); + registrar.setEnvironment(new StandardEnvironment()); + registrar.registerBeanDefinitions(metadata, registry); + + Iterable names = Arrays.asList(registry.getBeanDefinitionNames()); + assertThat(names).contains(beanNames); + } + + static Stream args() { + return Stream.of( + Arguments.of(AnnotationMetadata.introspect(Config.class), + new String[] { "jdbcRepositoriesRegistrarUnitTests.PersonRepository" }), + Arguments.of(AnnotationMetadata.introspect(ConfigWithBeanNameGenerator.class), + new String[] { "jdbcRepositoriesRegistrarUnitTests.PersonREPO" })); + } + + @EnableJdbcRepositories(basePackageClasses = PersonRepository.class, considerNestedRepositories = true) + private class Config { + + } + + @EnableJdbcRepositories(basePackageClasses = PersonRepository.class, nameGenerator = MyBeanNameGenerator.class, + considerNestedRepositories = true) + private class ConfigWithBeanNameGenerator { + + } + + static class MyBeanNameGenerator extends AnnotationBeanNameGenerator { + + @Override + public String generateBeanName(BeanDefinition definition, BeanDefinitionRegistry registry) { + return super.generateBeanName(definition, registry).replaceAll("Repository", "REPO"); + } + } + + static class Person {} + + interface PersonRepository extends CrudRepository {} +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtensionUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtensionUnitTests.java new file mode 100644 index 0000000000..b0a5a9acba --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtensionUnitTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Collection; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.ResourceLoader; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; +import org.springframework.data.repository.config.RepositoryConfiguration; +import org.springframework.data.repository.config.RepositoryConfigurationSource; + +/** + * Unit tests for {@link JdbcRepositoryConfigExtension}. + * + * @author Jens Schauder + */ +public class JdbcRepositoryConfigExtensionUnitTests { + + AnnotationMetadata metadata = AnnotationMetadata.introspect(Config.class); + ResourceLoader loader = new PathMatchingResourcePatternResolver(); + Environment environment = new StandardEnvironment(); + BeanDefinitionRegistry registry = new DefaultListableBeanFactory(); + + RepositoryConfigurationSource configurationSource = new AnnotationRepositoryConfigurationSource(metadata, + EnableJdbcRepositories.class, loader, environment, registry, null); + + @Test // DATAJPA-437 + public void isStrictMatchOnlyIfDomainTypeIsAnnotatedWithDocument() { + + JdbcRepositoryConfigExtension extension = new JdbcRepositoryConfigExtension(); + + Collection> configs = extension + .getRepositoryConfigurations(configurationSource, loader, true); + + assertThat(configs).extracting(config -> config.getRepositoryInterface()) + .containsExactly(SampleRepository.class.getName()); + } + + @EnableJdbcRepositories(considerNestedRepositories = true) + static class Config { + + } + + @Table + static class Sample {} + + interface SampleRepository extends Repository {} + + static class Unannotated {} + + interface UnannotatedRepository extends Repository {} +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/MyBatisJdbcConfigurationIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/MyBatisJdbcConfigurationIntegrationTests.java new file mode 100644 index 0000000000..b0ad7a4b1a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/MyBatisJdbcConfigurationIntegrationTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.List; + +import org.apache.ibatis.session.SqlSession; +import org.junit.jupiter.api.Test; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.jdbc.core.convert.CascadingDataAccessStrategy; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.dialect.JdbcHsqlDbDialect; +import org.springframework.data.jdbc.mybatis.MyBatisDataAccessStrategy; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.HsqlDbDialect; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Integration tests for {@link MyBatisJdbcConfiguration}. + * + * @author Oliver Drotbohm + */ +public class MyBatisJdbcConfigurationIntegrationTests extends AbstractJdbcConfigurationIntegrationTests { + + @Test // DATAJDBC-395 + public void bootstrapsMyBatisDataAccessStrategy() { + + assertApplicationContext(context -> { + + assertThat(context.getBean(DataAccessStrategy.class)) // + .isInstanceOfSatisfying(CascadingDataAccessStrategy.class, it -> { + + List strategies = (List) ReflectionTestUtils.getField(it, "strategies"); + + assertThat(strategies).hasSize(2); + assertThat(strategies.get(0)).isInstanceOf(MyBatisDataAccessStrategy.class); + }); + + }, MyBatisJdbcConfigurationUnderTest.class, MyBatisInfrastructure.class); + } + + @Configuration + static class MyBatisInfrastructure extends Infrastructure { + + @Bean + public SqlSession session() { + return mock(SqlSession.class); + } + } + + public static class MyBatisJdbcConfigurationUnderTest extends MyBatisJdbcConfiguration { + + @Override + @Bean + public Dialect jdbcDialect(NamedParameterJdbcOperations operations) { + return JdbcHsqlDbDialect.INSTANCE; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/TopLevelEntity.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/TopLevelEntity.java new file mode 100644 index 0000000000..df26963f6c --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/config/TopLevelEntity.java @@ -0,0 +1,26 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.config; + +import org.springframework.data.relational.core.mapping.Table; + +/** + * Empty test entity annotated with {@code @Table}. + * + * @author Mark Paluch + */ +@Table +class TopLevelEntity {} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/EscapingParameterSourceUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/EscapingParameterSourceUnitTests.java new file mode 100644 index 0000000000..c6367cdb83 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/EscapingParameterSourceUnitTests.java @@ -0,0 +1,112 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.repository.query; + +import static org.assertj.core.api.Assertions.*; + +import java.sql.Types; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.data.relational.core.query.ValueFunction; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; + +/** + * Tests for the {@link EscapingParameterSource}. + * + * @author Jens Schauder + */ +class EscapingParameterSourceUnitTests { + + MapSqlParameterSource delegate = new MapSqlParameterSource(); + Escaper escaper = Escaper.of('x'); + EscapingParameterSource escapingParameterSource = new EscapingParameterSource(delegate, escaper); + + @Nested + class EmptyParameterSource { + + @Test + void getParameterNames() { + assertThat(escapingParameterSource.getParameterNames()).isEmpty(); + } + + @Test + void hasValue() { + assertThat(escapingParameterSource.hasValue("one")).isFalse(); + } + + @Test + void getNonExistingValue() { + assertThatIllegalArgumentException().isThrownBy(() -> escapingParameterSource.getValue("two")); + } + + } + + @Nested + class NonEmptyParameterSource { + + @BeforeEach + void before() { + delegate.addValue("one", 1, Types.INTEGER); + delegate.registerTypeName("one", "integer"); + delegate.addValue("needsEscaping", (ValueFunction) escaper -> escaper.escape("a%a") + "%", Types.VARCHAR); + delegate.registerTypeName("needsEscaping", "varchar"); + } + + @Test + void getParameterNames() { + assertThat(escapingParameterSource.getParameterNames()).containsExactlyInAnyOrder("one", "needsEscaping"); + } + + @Test + void hasValue() { + assertThat(escapingParameterSource.hasValue("one")).isTrue(); + assertThat(escapingParameterSource.hasValue("two")).isFalse(); + } + + @Test + void getNonExistingValue() { + assertThatIllegalArgumentException().isThrownBy(() -> escapingParameterSource.getValue("two")); + } + + @Test + void getValue() { + assertThat(escapingParameterSource.getValue("one")).isEqualTo(1); + } + + @Test + void getEscapedValue() { + assertThat(escapingParameterSource.getValue("needsEscaping")).isEqualTo("ax%a%"); + } + + @Test + void getSqlType() { + + assertThat(escapingParameterSource.getSqlType("one")).isEqualTo(Types.INTEGER); + assertThat(escapingParameterSource.getSqlType("needsEscaping")).isEqualTo(Types.VARCHAR); + } + + @Test + void getTypeName() { + + assertThat(escapingParameterSource.getTypeName("one")).isEqualTo("integer"); + assertThat(escapingParameterSource.getTypeName("needsEscaping")).isEqualTo("varchar"); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/JdbcQueryMethodUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/JdbcQueryMethodUnitTests.java new file mode 100644 index 0000000000..ef3b390d52 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/JdbcQueryMethodUnitTests.java @@ -0,0 +1,179 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.sql.ResultSet; +import java.util.Properties; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.repository.Lock; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.PropertiesBasedNamedQueries; +import org.springframework.data.util.TypeInformation; +import org.springframework.jdbc.core.RowMapper; + +/** + * Unit tests for {@link JdbcQueryMethod}. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Moises Cisneros + * @author Mark Paluch + * @author Diego Krupitza + */ +public class JdbcQueryMethodUnitTests { + + public static final String QUERY_NAME = "DUMMY.SELECT"; + public static final String QUERY = "SELECT something"; + public static final String METHOD_WITHOUT_QUERY_ANNOTATION = "methodWithImplicitlyNamedQuery"; + public static final String QUERY2 = "SELECT something NAME AND VALUE"; + + JdbcMappingContext mappingContext = new JdbcMappingContext(); + NamedQueries namedQueries; + RepositoryMetadata metadata; + + @BeforeEach + public void before() { + + Properties properties = new Properties(); + properties.setProperty(QUERY_NAME, QUERY); + // String is used as domain class because the methods used for testing aren't part of a repository and therefore the + // return type is used as the domain type. + properties.setProperty("String." + METHOD_WITHOUT_QUERY_ANNOTATION, QUERY2); + namedQueries = new PropertiesBasedNamedQueries(properties); + + metadata = mock(RepositoryMetadata.class); + doReturn(String.class).when(metadata).getReturnedDomainClass(any(Method.class)); + doReturn(TypeInformation.of(String.class)).when(metadata).getReturnType(any(Method.class)); + } + + @Test // DATAJDBC-165 + public void returnsSqlStatement() throws NoSuchMethodException { + + JdbcQueryMethod queryMethod = createJdbcQueryMethod("queryMethod"); + + assertThat(queryMethod.getDeclaredQuery()).isEqualTo(QUERY); + } + + @Test // DATAJDBC-165 + public void returnsSpecifiedRowMapperClass() throws NoSuchMethodException { + + JdbcQueryMethod queryMethod = createJdbcQueryMethod("queryMethod"); + + assertThat(queryMethod.getRowMapperClass()).isEqualTo(CustomRowMapper.class); + } + + @Test // DATAJDBC-234 + public void returnsSqlStatementName() throws NoSuchMethodException { + + JdbcQueryMethod queryMethod = createJdbcQueryMethod("queryMethodName"); + assertThat(queryMethod.getDeclaredQuery()).isEqualTo(QUERY); + + } + + @Test // DATAJDBC-234 + public void returnsSpecifiedSqlStatementIfNameAndValueAreGiven() throws NoSuchMethodException { + + JdbcQueryMethod queryMethod = createJdbcQueryMethod("queryMethodWithNameAndValue"); + assertThat(queryMethod.getDeclaredQuery()).isEqualTo(QUERY2); + + } + + private JdbcQueryMethod createJdbcQueryMethod(String methodName) throws NoSuchMethodException { + + Method method = JdbcQueryMethodUnitTests.class.getDeclaredMethod(methodName); + return new JdbcQueryMethod(method, metadata, mock(ProjectionFactory.class), namedQueries, mappingContext); + } + + @Test // DATAJDBC-234 + public void returnsImplicitlyNamedQuery() throws NoSuchMethodException { + + JdbcQueryMethod queryMethod = createJdbcQueryMethod("methodWithImplicitlyNamedQuery"); + assertThat(queryMethod.getDeclaredQuery()).isEqualTo(QUERY2); + } + + @Test // DATAJDBC-234 + public void returnsNullIfNoQueryIsFound() throws NoSuchMethodException { + + JdbcQueryMethod queryMethod = createJdbcQueryMethod("methodWithoutAnyQuery"); + assertThat(queryMethod.getDeclaredQuery()).isEqualTo(null); + } + + @Test // GH-1041 + void returnsQueryMethodWithCorrectLockTypeWriteLock() throws NoSuchMethodException { + + JdbcQueryMethod queryMethodWithWriteLock = createJdbcQueryMethod("queryMethodWithWriteLock"); + + assertThat(queryMethodWithWriteLock.lookupLockAnnotation()).isPresent(); + assertThat(queryMethodWithWriteLock.lookupLockAnnotation().get().value()).isEqualTo(LockMode.PESSIMISTIC_WRITE); + } + + @Test // GH-1041 + void returnsQueryMethodWithCorrectLockTypeReadLock() throws NoSuchMethodException { + + JdbcQueryMethod queryMethodWithReadLock = createJdbcQueryMethod("queryMethodWithReadLock"); + + assertThat(queryMethodWithReadLock.lookupLockAnnotation()).isPresent(); + assertThat(queryMethodWithReadLock.lookupLockAnnotation().get().value()).isEqualTo(LockMode.PESSIMISTIC_READ); + } + + @Test // GH-1041 + void returnsQueryMethodWithCorrectLockTypeNoLock() throws NoSuchMethodException { + + JdbcQueryMethod queryMethodWithWriteLock = createJdbcQueryMethod("queryMethodName"); + + assertThat(queryMethodWithWriteLock.lookupLockAnnotation()).isEmpty(); + } + + @Lock(LockMode.PESSIMISTIC_WRITE) + @Query + private void queryMethodWithWriteLock() {} + + @Lock(LockMode.PESSIMISTIC_READ) + @Query + private void queryMethodWithReadLock() {} + + @Query(value = QUERY, rowMapperClass = CustomRowMapper.class) + private void queryMethod() {} + + @Query(name = QUERY_NAME) + private void queryMethodName() {} + + @Query(value = QUERY2, name = QUERY_NAME) + private void queryMethodWithNameAndValue() {} + + private void methodWithImplicitlyNamedQuery() {} + + private void methodWithoutAnyQuery() {} + + private class CustomRowMapper implements RowMapper { + + @Override + public Object mapRow(ResultSet rs, int rowNum) { + return null; + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/PartTreeJdbcQueryUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/PartTreeJdbcQueryUnitTests.java new file mode 100644 index 0000000000..a941d1830c --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/PartTreeJdbcQueryUnitTests.java @@ -0,0 +1,807 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Properties; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.jdbc.core.convert.RelationResolver; +import org.springframework.data.jdbc.core.dialect.JdbcH2Dialect; +import org.springframework.data.jdbc.core.mapping.AggregateReference; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.data.relational.core.dialect.H2Dialect; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.MappedCollection; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.repository.Lock; +import org.springframework.data.relational.repository.query.RelationalParametersParameterAccessor; +import org.springframework.data.repository.NoRepositoryBean; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.core.support.PropertiesBasedNamedQueries; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; + +/** + * Unit tests for {@link PartTreeJdbcQuery}. + * + * @author Roman Chigvintsev + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @author Diego Krupitza + */ +@ExtendWith(MockitoExtension.class) +public class PartTreeJdbcQueryUnitTests { + + private static final String TABLE = "\"users\""; + private static final String ALL_FIELDS = "\"users\".\"ID\" AS \"ID\", \"users\".\"AGE\" AS \"AGE\", \"users\".\"ACTIVE\" AS \"ACTIVE\", \"users\".\"LAST_NAME\" AS \"LAST_NAME\", \"users\".\"FIRST_NAME\" AS \"FIRST_NAME\", \"users\".\"DATE_OF_BIRTH\" AS \"DATE_OF_BIRTH\", \"users\".\"HOBBY_REFERENCE\" AS \"HOBBY_REFERENCE\", \"hated\".\"NAME\" AS \"HATED_NAME\", \"users\".\"USER_CITY\" AS \"USER_CITY\", \"users\".\"USER_STREET\" AS \"USER_STREET\""; + private static final String JOIN_CLAUSE = "FROM \"users\" LEFT OUTER JOIN \"HOBBY\" \"hated\" ON \"hated\".\"USERS\" = \"users\".\"ID\""; + private static final String BASE_SELECT = "SELECT " + ALL_FIELDS + " " + JOIN_CLAUSE; + + JdbcMappingContext mappingContext = new JdbcMappingContext(); + JdbcConverter converter = new MappingJdbcConverter(mappingContext, mock(RelationResolver.class)); + ReturnedType returnedType = mock(ReturnedType.class); + + @Test // DATAJDBC-318 + public void shouldFailForQueryByReference() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByHated", Hobby.class); + assertThatIllegalArgumentException().isThrownBy(() -> createQuery(queryMethod)); + } + + @Test // GH-922 + public void createQueryByAggregateReference() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByHobbyReference", Hobby.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + Hobby hobby = new Hobby(); + hobby.name = "twentythree"; + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { hobby }), returnedType); + + assertSoftly(softly -> { + + softly.assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"HOBBY_REFERENCE\" = :hobby_reference"); + + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("hobby_reference")).isEqualTo("twentythree"); + }); + } + + @Test // GH-922 + void createQueryWithPessimisticWriteLock() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameAndLastName", String.class, String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + + String firstname = "Diego"; + String lastname = "Krupitza"; + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { firstname, lastname }), + returnedType); + + assertSoftly(softly -> { + + softly.assertThat(query.getQuery().toUpperCase()).endsWith("FOR UPDATE"); + + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("first_name")).isEqualTo(firstname); + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("last_name")).isEqualTo(lastname); + }); + } + + @Test // GH-922 + void createQueryWithPessimisticReadLock() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameAndAge", String.class, Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + + String firstname = "Diego"; + Integer age = 22; + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { firstname, age }), + returnedType); + + assertSoftly(softly -> { + + // this is also for update since h2 dialect does not distinguish between lockmodes + softly.assertThat(query.getQuery().toUpperCase()).endsWith("FOR UPDATE"); + + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("first_name")).isEqualTo(firstname); + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("age")).isEqualTo(age); + }); + } + + @Test // DATAJDBC-318 + public void shouldFailForQueryByList() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByHobbies", Object.class); + assertThatIllegalArgumentException().isThrownBy(() -> createQuery(queryMethod)); + } + + @Test // DATAJDBC-318 + public void shouldFailForQueryByEmbeddedList() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findByAnotherEmbeddedList", Object.class); + assertThatIllegalArgumentException().isThrownBy(() -> createQuery(queryMethod)); + } + + @Test // GH-922 + public void createQueryForQueryByAggregateReference() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findViaReferenceByHobbyReference", AggregateReference.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + AggregateReference hobby = AggregateReference.to("twentythree"); + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { hobby }), returnedType); + + assertSoftly(softly -> { + + softly.assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"HOBBY_REFERENCE\" = :hobby_reference"); + + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("hobby_reference")).isEqualTo("twentythree"); + }); + } + + @Test // GH-922 + public void createQueryForQueryByAggregateReferenceId() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findViaIdByHobbyReference", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + String hobby = "twentythree"; + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { hobby }), returnedType); + + assertSoftly(softly -> { + + softly.assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"HOBBY_REFERENCE\" = :hobby_reference"); + + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("hobby_reference")).isEqualTo("twentythree"); + }); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttribute() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { "John" }), returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" = :first_name"); + } + + @Test // GH-971 + public void createsQueryToFindAllEntitiesByProjectionAttribute() throws Exception { + + when(returnedType.needsCustomConstruction()).thenReturn(true); + when(returnedType.getInputProperties()).thenReturn(Collections.singletonList("firstName")); + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { "John" }), returnedType); + + assertThat(query.getQuery()).isEqualTo("SELECT " + TABLE + ".\"FIRST_NAME\" AS \"FIRST_NAME\" FROM \"users\"" + + " WHERE " + TABLE + ".\"FIRST_NAME\" = :first_name"); + } + + @Test // DATAJDBC-318 + public void createsQueryWithIsNullCondition() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery((getAccessor(queryMethod, new Object[] { null })), returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" IS NULL"); + } + + @Test // DATAJDBC-318 + public void createsQueryWithLimitForExistsProjection() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("existsByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery((getAccessor(queryMethod, new Object[] { "John" })), returnedType); + + assertThat(query.getQuery()).isEqualTo( + "SELECT " + TABLE + ".\"ID\" FROM " + TABLE + " WHERE " + TABLE + ".\"FIRST_NAME\" = :first_name LIMIT 1"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByTwoStringAttributes() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByLastNameAndFirstName", String.class, String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { "Doe", "John" }), + returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"LAST_NAME\" = :last_name AND (" + TABLE + + ".\"FIRST_NAME\" = :first_name)"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByOneOfTwoStringAttributes() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByLastNameOrFirstName", String.class, String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { "Doe", "John" }), + returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"LAST_NAME\" = :last_name OR (" + TABLE + + ".\"FIRST_NAME\" = :first_name)"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByDateAttributeBetween() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByDateOfBirthBetween", Date.class, Date.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + Date from = new Date(); + Date to = new Date(); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { from, to }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertSoftly(softly -> { + + softly.assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"DATE_OF_BIRTH\" BETWEEN :date_of_birth AND :date_of_birth1"); + + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("date_of_birth")).isEqualTo(from); + softly.assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("date_of_birth1")).isEqualTo(to); + }); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeLessThan() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeLessThan", Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { 30 }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" < :age"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeLessThanEqual() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeLessThanEqual", Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { 30 }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" <= :age"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeGreaterThan() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeGreaterThan", Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { 30 }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" > :age"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeGreaterThanEqual() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeGreaterThanEqual", Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { 30 }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" >= :age"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByDateAttributeAfter() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByDateOfBirthAfter", Date.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { new Date() }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"DATE_OF_BIRTH\" > :date_of_birth"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByDateAttributeBefore() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByDateOfBirthBefore", Date.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { new Date() }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"DATE_OF_BIRTH\" < :date_of_birth"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeIsNull() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeIsNull"); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[0]); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" IS NULL"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeIsNotNull() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeIsNotNull"); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[0]); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" IS NOT NULL"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeLike() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameLike", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "%John%" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeNotLike() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameNotLike", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "%John%" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" NOT LIKE :first_name"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeStartingWith() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameStartingWith", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "Jo" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + } + + @Test // DATAJDBC-318 + public void appendsLikeOperatorParameterWithPercentSymbolForStartingWithQuery() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameStartingWith", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "Jo" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("first_name")).isEqualTo("Jo%"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeEndingWith() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameEndingWith", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "hn" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + } + + @Test // DATAJDBC-318 + public void prependsLikeOperatorParameterWithPercentSymbolForEndingWithQuery() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameEndingWith", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "hn" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("first_name")).isEqualTo("%hn"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeContaining() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameContaining", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "oh" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + } + + @Test // DATAJDBC-318 + public void wrapsLikeOperatorParameterWithPercentSymbolsForContainingQuery() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameContaining", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "oh" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" LIKE :first_name"); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("first_name")).isEqualTo("%oh%"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeNotContaining() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameNotContaining", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "oh" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" NOT LIKE :first_name"); + } + + @Test // DATAJDBC-318 + public void wrapsLikeOperatorParameterWithPercentSymbolsForNotContainingQuery() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameNotContaining", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "oh" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" NOT LIKE :first_name"); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("first_name")).isEqualTo("%oh%"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeWithDescendingOrderingByStringAttribute() + throws Exception { + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeOrderByLastNameDesc", Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { 123 }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" = :age ORDER BY \"users\".\"LAST_NAME\" DESC"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeWithAscendingOrderingByStringAttribute() throws Exception { + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeOrderByLastNameAsc", Integer.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { 123 }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" = :age ORDER BY \"users\".\"LAST_NAME\" ASC"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeNot() throws Exception { + JdbcQueryMethod queryMethod = getQueryMethod("findAllByLastNameNot", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "Doe" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"LAST_NAME\" != :last_name"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeIn() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeIn", Collection.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, + new Object[] { Collections.singleton(25) }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" IN (:age)"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByIntegerAttributeNotIn() throws Exception { + JdbcQueryMethod queryMethod = getQueryMethod("findAllByAgeNotIn", Collection.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, + new Object[] { Collections.singleton(25) }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"AGE\" NOT IN (:age)"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByBooleanAttributeTrue() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByActiveTrue"); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[0]); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"ACTIVE\" = :active"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByBooleanAttributeFalse() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByActiveFalse"); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[0]); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()).isEqualTo(BASE_SELECT + " WHERE " + TABLE + ".\"ACTIVE\" = :active"); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindAllEntitiesByStringAttributeIgnoringCase() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstNameIgnoreCase", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "John" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + assertThat(query.getQuery()) + .isEqualTo(BASE_SELECT + " WHERE UPPER(" + TABLE + ".\"FIRST_NAME\") = UPPER(:first_name)"); + } + + @Test // DATAJDBC-318 + public void throwsExceptionWhenIgnoringCaseIsImpossible() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findByIdIgnoringCase", Long.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + + assertThatIllegalStateException() + .isThrownBy(() -> jdbcQuery.createQuery(getAccessor(queryMethod, new Object[] { 1L }), returnedType)); + } + + @Test // DATAJDBC-318 + public void throwsExceptionWhenConditionKeywordIsUnsupported() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByIdIsEmpty"); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + + assertThatIllegalArgumentException() + .isThrownBy(() -> jdbcQuery.createQuery(getAccessor(queryMethod, new Object[0]), returnedType)); + } + + @Test // DATAJDBC-318 + public void throwsExceptionWhenInvalidNumberOfParameterIsGiven() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findAllByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + + assertThatIllegalArgumentException() + .isThrownBy(() -> jdbcQuery.createQuery(getAccessor(queryMethod, new Object[0]), returnedType)); + } + + @Test // DATAJDBC-318 + public void createsQueryWithLimitToFindEntitiesByStringAttribute() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findTop3ByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "John" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + String expectedSql = BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" = :first_name LIMIT 3"; + assertThat(query.getQuery()).isEqualTo(expectedSql); + } + + @Test // DATAJDBC-318 + public void createsQueryToFindFirstEntityByStringAttribute() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findFirstByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "John" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + String expectedSql = BASE_SELECT + " WHERE " + TABLE + ".\"FIRST_NAME\" = :first_name LIMIT 1"; + assertThat(query.getQuery()).isEqualTo(expectedSql); + } + + @Test // DATAJDBC-318 + public void createsQueryByEmbeddedObject() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findByAddress", Address.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, + new Object[] { new Address("Hello", "World") }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + String actualSql = query.getQuery(); + + assertThat(actualSql) // + .startsWith(BASE_SELECT + " WHERE (" + TABLE + ".\"USER_") // + .endsWith(")") // + .contains(TABLE + ".\"USER_STREET\" = :user_street", // + " AND ", // + TABLE + ".\"USER_CITY\" = :user_city"); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("user_street")).isEqualTo("Hello"); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("user_city")).isEqualTo("World"); + } + + @Test // DATAJDBC-318 + public void createsQueryByEmbeddedProperty() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("findByAddressStreet", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, new Object[] { "Hello" }); + ParametrizedQuery query = jdbcQuery.createQuery(accessor, returnedType); + + String expectedSql = BASE_SELECT + " WHERE " + TABLE + ".\"USER_STREET\" = :user_street"; + + assertThat(query.getQuery()).isEqualTo(expectedSql); + assertThat(query.getParameterSource(Escaper.DEFAULT).getValue("user_street")).isEqualTo("Hello"); + } + + @Test // DATAJDBC-534 + public void createsQueryForCountProjection() throws Exception { + + JdbcQueryMethod queryMethod = getQueryMethod("countByFirstName", String.class); + PartTreeJdbcQuery jdbcQuery = createQuery(queryMethod); + ParametrizedQuery query = jdbcQuery.createQuery((getAccessor(queryMethod, new Object[] { "John" })), returnedType); + + assertThat(query.getQuery()) + .isEqualTo("SELECT COUNT(*) FROM " + TABLE + " WHERE " + TABLE + ".\"FIRST_NAME\" = :first_name"); + } + + private PartTreeJdbcQuery createQuery(JdbcQueryMethod queryMethod) { + return new PartTreeJdbcQuery(mappingContext, queryMethod, JdbcH2Dialect.INSTANCE, converter, + mock(NamedParameterJdbcOperations.class), mock(RowMapper.class)); + } + + private JdbcQueryMethod getQueryMethod(String methodName, Class... parameterTypes) throws Exception { + Method method = UserRepository.class.getMethod(methodName, parameterTypes); + return new JdbcQueryMethod(method, new DefaultRepositoryMetadata(UserRepository.class), + new SpelAwareProxyProjectionFactory(), new PropertiesBasedNamedQueries(new Properties()), mappingContext); + } + + private RelationalParametersParameterAccessor getAccessor(JdbcQueryMethod queryMethod, Object[] values) { + return new RelationalParametersParameterAccessor(queryMethod, values); + } + + @NoRepositoryBean + interface UserRepository extends Repository { + + @Lock(LockMode.PESSIMISTIC_WRITE) + List findAllByFirstNameAndLastName(String firstName, String lastName); + + @Lock(LockMode.PESSIMISTIC_READ) + List findAllByFirstNameAndAge(String firstName, Integer age); + + List findAllByFirstName(String firstName); + + List findAllByHated(Hobby hobby); + + List findAllByHatedName(String name); + + List findAllByHobbies(Object hobbies); + + List findAllByHobbyReference(Hobby hobby); + + List findViaReferenceByHobbyReference(AggregateReference hobby); + + List findViaIdByHobbyReference(String hobby); + + List findAllByLastNameAndFirstName(String lastName, String firstName); + + List findAllByLastNameOrFirstName(String lastName, String firstName); + + Boolean existsByFirstName(String firstName); + + List findAllByDateOfBirthBetween(Date from, Date to); + + List findAllByAgeLessThan(Integer age); + + List findAllByAgeLessThanEqual(Integer age); + + List findAllByAgeGreaterThan(Integer age); + + List findAllByAgeGreaterThanEqual(Integer age); + + List findAllByDateOfBirthAfter(Date date); + + List findAllByDateOfBirthBefore(Date date); + + List findAllByAgeIsNull(); + + List findAllByAgeIsNotNull(); + + List findAllByFirstNameLike(String like); + + List findAllByFirstNameNotLike(String like); + + List findAllByFirstNameStartingWith(String starting); + + List findAllByFirstNameEndingWith(String ending); + + List findAllByFirstNameContaining(String containing); + + List findAllByFirstNameNotContaining(String notContaining); + + List findAllByAgeOrderByLastNameAsc(Integer age); + + List findAllByAgeOrderByLastNameDesc(Integer age); + + List findAllByLastNameNot(String lastName); + + List findAllByAgeIn(Collection ages); + + List findAllByAgeNotIn(Collection ages); + + List findAllByActiveTrue(); + + List findAllByActiveFalse(); + + List findAllByFirstNameIgnoreCase(String firstName); + + User findByIdIgnoringCase(Long id); + + List findAllByIdIsEmpty(); + + List findTop3ByFirstName(String firstName); + + User findFirstByFirstName(String firstName); + + User findByAddress(Address address); + + User findByAddressStreet(String street); + + User findByAnotherEmbeddedList(Object list); + + long countByFirstName(String name); + } + + @Table("users") + static class User { + + @Id Long id; + String firstName; + String lastName; + Date dateOfBirth; + Integer age; + Boolean active; + + @Embedded(prefix = "user_", onEmpty = Embedded.OnEmpty.USE_NULL) Address address; + @Embedded.Nullable AnotherEmbedded anotherEmbedded; + + List hobbies; + Hobby hated; + + AggregateReference hobbyReference; + } + + record Address(String street, String city) { + } + + record AnotherEmbedded(@MappedCollection(idColumn = "ID", keyColumn = "ORDER_KEY") List list) { + } + + static class Hobby { + @Id String name; + } +} diff --git a/src/test/java/org/springframework/data/jdbc/repository/query/QueryAnnotationHsqlIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/QueryAnnotationHsqlIntegrationTests.java similarity index 70% rename from src/test/java/org/springframework/data/jdbc/repository/query/QueryAnnotationHsqlIntegrationTests.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/QueryAnnotationHsqlIntegrationTests.java index b227f7ab27..bd322ee8aa 100644 --- a/src/test/java/org/springframework/data/jdbc/repository/query/QueryAnnotationHsqlIntegrationTests.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/query/QueryAnnotationHsqlIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ package org.springframework.data.jdbc.repository.query; import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; import java.time.LocalDateTime; import java.util.Date; @@ -23,38 +24,44 @@ import java.util.Optional; import java.util.stream.Stream; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.Import; import org.springframework.dao.DataAccessException; import org.springframework.data.annotation.Id; import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; +import org.springframework.data.jdbc.testing.DatabaseType; +import org.springframework.data.jdbc.testing.EnabledOnDatabase; +import org.springframework.data.jdbc.testing.IntegrationTest; import org.springframework.data.jdbc.testing.TestConfiguration; import org.springframework.data.repository.CrudRepository; import org.springframework.data.repository.query.Param; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; +import org.springframework.lang.Nullable; /** * Tests the execution of queries from {@link Query} annotations on repository methods. * * @author Jens Schauder * @author Kazuki Shimizu + * @author Mark Paluch + * @author Dennis Effing */ -@ContextConfiguration -@Transactional +@IntegrationTest +@EnabledOnDatabase(DatabaseType.HSQL) public class QueryAnnotationHsqlIntegrationTests { - @Autowired DummyEntityRepository repository; + @Configuration + @Import(TestConfiguration.class) + @EnableJdbcRepositories(considerNestedRepositories = true, + includeFilters = @ComponentScan.Filter(value = DummyEntityRepository.class, type = FilterType.ASSIGNABLE_TYPE)) + static class Config { - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); + } + + @Autowired DummyEntityRepository repository; @Test // DATAJDBC-164 public void executeCustomQueryWithoutParameter() { @@ -68,7 +75,6 @@ public void executeCustomQueryWithoutParameter() { assertThat(entities) // .extracting(e -> e.name) // .containsExactlyInAnyOrder("Example", "EXAMPLE"); - } @Test // DATAJDBC-164 @@ -83,7 +89,6 @@ public void executeCustomQueryWithNamedParameters() { assertThat(entities) // .extracting(e -> e.name) // .containsExactlyInAnyOrder("b"); - } @Test // DATAJDBC-172 @@ -94,7 +99,6 @@ public void executeCustomQueryWithReturnTypeIsOptional() { Optional entity = repository.findByNameAsOptional("a"); assertThat(entity).map(e -> e.name).contains("a"); - } @Test // DATAJDBC-172 @@ -105,7 +109,6 @@ public void executeCustomQueryWithReturnTypeIsOptionalWhenEntityNotFound() { Optional entity = repository.findByNameAsOptional("x"); assertThat(entity).isNotPresent(); - } @Test // DATAJDBC-172 @@ -117,7 +120,6 @@ public void executeCustomQueryWithReturnTypeIsEntity() { assertThat(entity).isNotNull(); assertThat(entity.name).isEqualTo("a"); - } @Test // DATAJDBC-172 @@ -128,7 +130,6 @@ public void executeCustomQueryWithReturnTypeIsEntityWhenEntityNotFound() { DummyEntity entity = repository.findByNameAsEntity("x"); assertThat(entity).isNull(); - } @Test // DATAJDBC-172 @@ -162,11 +163,25 @@ public void executeCustomQueryWithReturnTypeIsStream() { assertThat(entities) // .extracting(e -> e.name) // .containsExactlyInAnyOrder("a", "b"); + } + + @Test // GH-578 + public void executeCustomQueryWithNamedParameterAndReturnTypeIsStream() { + + repository.save(dummyEntity("a")); + repository.save(dummyEntity("b")); + repository.save(dummyEntity("c")); + + Stream entities = repository.findByNamedRangeWithNamedParameterAndReturnTypeIsStream("a", "c"); + + assertThat(entities) // + .extracting(e -> e.name) // + .containsExactlyInAnyOrder("b"); } @Test // DATAJDBC-175 - public void executeCustomQueryWithReturnTypeIsNubmer() { + public void executeCustomQueryWithReturnTypeIsNumber() { repository.save(dummyEntity("aaa")); repository.save(dummyEntity("bbb")); @@ -175,7 +190,6 @@ public void executeCustomQueryWithReturnTypeIsNubmer() { int count = repository.countByNameContaining("a"); assertThat(count).isEqualTo(2); - } @Test // DATAJDBC-175 @@ -185,26 +199,25 @@ public void executeCustomQueryWithReturnTypeIsBoolean() { repository.save(dummyEntity("bbb")); repository.save(dummyEntity("cac")); - assertThat(repository.existsByNameContaining("a")).isTrue(); - assertThat(repository.existsByNameContaining("d")).isFalse(); + assertSoftly(softly -> { + softly.assertThat(repository.existsByNameContaining("a")).describedAs("entities with A in the name").isTrue(); + softly.assertThat(repository.existsByNameContaining("d")).describedAs("entities with D in the name").isFalse(); + }); } @Test // DATAJDBC-175 public void executeCustomQueryWithReturnTypeIsDate() { - Date now = new Date(); - assertThat(repository.nowWithDate()).isAfterOrEqualsTo(now); - + assertThat(repository.nowWithDate()).isInstanceOf(Date.class); } @Test // DATAJDBC-175 public void executeCustomQueryWithReturnTypeIsLocalDateTimeList() { - LocalDateTime now = LocalDateTime.now(); - repository.nowWithLocalDateTimeList() // - .forEach(d -> assertThat(d).isAfterOrEqualTo(now)); - + assertThat(repository.nowWithLocalDateTimeList()) // + .hasSize(2) // + .allSatisfy(d -> assertThat(d).isInstanceOf(LocalDateTime.class)); } @Test // DATAJDBC-182 @@ -244,6 +257,12 @@ public void executeCustomModifyingQueryWithReturnTypeVoid() { assertThat(repository.findByNameAsEntity("Spring Data JDBC")).isNotNull(); } + @Test // DATAJDBC-175 + public void executeCustomQueryWithImmutableResultType() { + + assertThat(repository.immutableTuple()).isEqualTo(new DummyEntityRepository.ImmutableTuple("one", "two", 3)); + } + private DummyEntity dummyEntity(String name) { DummyEntity entity = new DummyEntity(); @@ -251,17 +270,6 @@ private DummyEntity dummyEntity(String name) { return entity; } - @Configuration - @Import(TestConfiguration.class) - @EnableJdbcRepositories(considerNestedRepositories = true) - static class Config { - - @Bean - Class testClass() { - return QueryAnnotationHsqlIntegrationTests.class; - } - } - private static class DummyEntity { @Id Long id; @@ -272,30 +280,35 @@ private static class DummyEntity { private interface DummyEntityRepository extends CrudRepository { // DATAJDBC-164 - @Query("SELECT * FROM DUMMYENTITY WHERE lower(name) <> name") + @Query("SELECT * FROM DUMMY_ENTITY WHERE lower(name) <> name") List findByNameContainingCapitalLetter(); // DATAJDBC-164 - @Query("SELECT * FROM DUMMYENTITY WHERE name < :upper and name > :lower") + @Query("SELECT * FROM DUMMY_ENTITY WHERE name < :upper and name > :lower") List findByNamedRangeWithNamedParameter(@Param("lower") String lower, @Param("upper") String upper); - @Query("SELECT * FROM DUMMYENTITY WHERE name = :name") + @Query("SELECT * FROM DUMMY_ENTITY WHERE name = :name") Optional findByNameAsOptional(@Param("name") String name); // DATAJDBC-172 - @Query("SELECT * FROM DUMMYENTITY WHERE name = :name") + @Nullable + @Query("SELECT * FROM DUMMY_ENTITY WHERE name = :name") DummyEntity findByNameAsEntity(@Param("name") String name); // DATAJDBC-172 - @Query("SELECT * FROM DUMMYENTITY") + @Query("SELECT * FROM DUMMY_ENTITY") Stream findAllWithReturnTypeIsStream(); + @Query("SELECT * FROM DUMMY_ENTITY WHERE name < :upper and name > :lower") + Stream findByNamedRangeWithNamedParameterAndReturnTypeIsStream(@Param("lower") String lower, + @Param("upper") String upper); + // DATAJDBC-175 - @Query("SELECT count(*) FROM DUMMYENTITY WHERE name like '%' || :name || '%'") + @Query("SELECT count(*) FROM DUMMY_ENTITY WHERE name like concat('%', :name, '%')") int countByNameContaining(@Param("name") String name); // DATAJDBC-175 - @Query("SELECT count(*) FROM DUMMYENTITY WHERE name like '%' || :name || '%'") + @Query("SELECT case when count(*) > 0 THEN 'true' ELSE 'false' END FROM DUMMY_ENTITY WHERE name like '%' || :name || '%'") boolean existsByNameContaining(@Param("name") String name); // DATAJDBC-175 @@ -308,18 +321,24 @@ private interface DummyEntityRepository extends CrudRepository defaultRowMapper; + NamedParameterJdbcOperations operations; + RelationalMappingContext context; + JdbcConverter converter; + ValueExpressionDelegate delegate; + + @BeforeEach + void setup() { + + this.defaultRowMapper = mock(RowMapper.class); + this.operations = mock(NamedParameterJdbcOperations.class); + this.context = mock(RelationalMappingContext.class, RETURNS_DEEP_STUBS); + this.converter = new MappingJdbcConverter(context, mock(RelationResolver.class)); + this.delegate = ValueExpressionDelegate.create(); + } + + @Test // DATAJDBC-165 + void emptyQueryThrowsException() { + + JdbcQueryMethod queryMethod = createMethod("noAnnotation"); + + Assertions.assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> createQuery(queryMethod).execute(new Object[] {})); + } + + @Test // DATAJDBC-165 + void defaultRowMapperIsUsedByDefault() { + + JdbcQueryMethod queryMethod = createMethod("findAll"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + assertThat(query.determineRowMapper(queryMethod.getResultProcessor(), false)).isEqualTo(defaultRowMapper); + } + + @Test // DATAJDBC-165, DATAJDBC-318 + void customRowMapperIsUsedWhenSpecified() { + + JdbcQueryMethod queryMethod = createMethod("findAllWithCustomRowMapper"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + assertThat(query.determineRowMapper(queryMethod.getResultProcessor(), false)).isInstanceOf(CustomRowMapper.class); + } + + @Test // DATAJDBC-290 + void customResultSetExtractorIsUsedWhenSpecified() { + + JdbcQueryMethod queryMethod = createMethod("findAllWithCustomResultSetExtractor"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + ResultSetExtractor resultSetExtractor1 = query.determineResultSetExtractor(() -> defaultRowMapper); + ResultSetExtractor resultSetExtractor2 = query.determineResultSetExtractor(() -> defaultRowMapper); + + assertThat(resultSetExtractor1) // + .isInstanceOf(CustomResultSetExtractor.class) // + .matches(crse -> ((CustomResultSetExtractor) crse).rowMapper == defaultRowMapper, + "RowMapper is expected to be default."); + + assertThat(resultSetExtractor1).isNotSameAs(resultSetExtractor2); + } + + @Test // GH-1721 + void cachesCustomMapperAndExtractorInstances() { + + JdbcQueryMethod queryMethod = createMethod("findAllCustomRowMapperResultSetExtractor"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + ResultSetExtractor resultSetExtractor1 = query.determineResultSetExtractor(() -> { + throw new UnsupportedOperationException(); + }); + + ResultSetExtractor resultSetExtractor2 = query.determineResultSetExtractor(() -> { + throw new UnsupportedOperationException(); + }); + + assertThat(resultSetExtractor1).isSameAs(resultSetExtractor2); + assertThat(resultSetExtractor1).extracting("rowMapper").isInstanceOf(CustomRowMapper.class); + + assertThat(ReflectionTestUtils.getField(resultSetExtractor1, "rowMapper")) + .isSameAs(ReflectionTestUtils.getField(resultSetExtractor2, "rowMapper")); + } + + @Test // GH-1721 + void obtainsCustomRowMapperRef() { + + CustomRowMapper customRowMapper = new CustomRowMapper(); + JdbcQueryMethod queryMethod = createMethod("findAllCustomRowMapperRef"); + StringBasedJdbcQuery query = createQuery(queryMethod, "CustomRowMapper", customRowMapper); + + RowMapper rowMapper = query.determineRowMapper(queryMethod.getResultProcessor(), false); + ResultSetExtractor resultSetExtractor = query.determineResultSetExtractor(() -> { + throw new UnsupportedOperationException(); + }); + + assertThat(rowMapper).isSameAs(customRowMapper); + assertThat(resultSetExtractor).isNull(); + } + + @Test // GH-1721 + void obtainsCustomResultSetExtractorRef() { + + CustomResultSetExtractor cre = new CustomResultSetExtractor(); + JdbcQueryMethod queryMethod = createMethod("findAllCustomResultSetExtractorRef"); + StringBasedJdbcQuery query = createQuery(queryMethod, "CustomResultSetExtractor", cre); + + RowMapper rowMapper = query.determineRowMapper(queryMethod.getResultProcessor(), false); + ResultSetExtractor resultSetExtractor = query.determineResultSetExtractor(() -> { + throw new UnsupportedOperationException(); + }); + + assertThat(rowMapper).isSameAs(defaultRowMapper); + assertThat(resultSetExtractor).isSameAs(cre); + } + + @Test // GH-1721 + void failsOnRowMapperRefAndClassDeclaration() { + assertThatIllegalArgumentException().isThrownBy(() -> createQuery(createMethod("invalidMapperRefAndClass"))) + .withMessageContaining("Invalid RowMapper configuration"); + } + + @Test // GH-1721 + void failsOnResultSetExtractorRefAndClassDeclaration() { + assertThatIllegalArgumentException().isThrownBy(() -> createQuery(createMethod("invalidExtractorRefAndClass"))) + .withMessageContaining("Invalid ResultSetExtractor configuration"); + } + + @Test // DATAJDBC-290 + void customResultSetExtractorAndRowMapperGetCombined() { + + JdbcQueryMethod queryMethod = createMethod("findAllWithCustomRowMapperAndResultSetExtractor"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + ResultSetExtractor resultSetExtractor = query + .determineResultSetExtractor(() -> query.determineRowMapper(queryMethod.getResultProcessor(), false)); + + assertThat(resultSetExtractor) // + .isInstanceOf(CustomResultSetExtractor.class) // + .matches(crse -> ((CustomResultSetExtractor) crse).rowMapper instanceof CustomRowMapper, + "RowMapper is not expected to be custom"); + } + + @Test // GH-578 + void streamQueryCallsQueryForStreamOnOperations() { + + JdbcQueryMethod queryMethod = createMethod("findAllWithStreamReturnType"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + query.execute(new Object[] {}); + + verify(operations).queryForStream(eq("some sql statement"), any(SqlParameterSource.class), any(RowMapper.class)); + } + + @Test // GH-578 + void streamQueryFallsBackToCollectionQueryWhenCustomResultSetExtractorIsSpecified() { + + JdbcQueryMethod queryMethod = createMethod("findAllWithStreamReturnTypeAndResultSetExtractor"); + StringBasedJdbcQuery query = createQuery(queryMethod); + + query.execute(new Object[] {}); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(ResultSetExtractor.class); + verify(operations).query(eq("some sql statement"), any(SqlParameterSource.class), captor.capture()); + assertThat(captor.getValue()).isInstanceOf(CustomResultSetExtractor.class); + } + + @Test // GH-774 + void sliceQueryNotSupported() { + + JdbcQueryMethod queryMethod = createMethod("sliceAll", Pageable.class); + + assertThatThrownBy( + () -> new StringBasedJdbcQuery(queryMethod, operations, result -> defaultRowMapper, converter, delegate)) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessageContaining("Slice queries are not supported using string-based queries"); + } + + @Test // GH-774 + void pageQueryNotSupported() { + + JdbcQueryMethod queryMethod = createMethod("pageAll", Pageable.class); + + assertThatThrownBy( + () -> new StringBasedJdbcQuery(queryMethod, operations, result -> defaultRowMapper, converter, delegate)) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessageContaining("Page queries are not supported using string-based queries"); + } + + @Test // GH-1654 + void limitNotSupported() { + + JdbcQueryMethod queryMethod = createMethod("unsupportedLimitQuery", String.class, Limit.class); + + assertThatThrownBy( + () -> new StringBasedJdbcQuery(queryMethod, operations, result -> defaultRowMapper, converter, delegate)) + .isInstanceOf(UnsupportedOperationException.class); + } + + @Test // GH-1212 + void convertsEnumCollectionParameterIntoStringCollectionParameter() { + + SqlParameterSource sqlParameterSource = forMethod("findByEnumTypeIn", Set.class) + .withArguments(Set.of(Direction.LEFT, Direction.RIGHT)).extractParameterSource(); + + assertThat(sqlParameterSource.getValue("directions")).asList().containsExactlyInAnyOrder("LEFT", "RIGHT"); + } + + @Test // GH-1212 + void convertsEnumCollectionParameterUsingCustomConverterWhenRegisteredForType() { + + SqlParameterSource sqlParameterSource = forMethod("findByEnumTypeIn", Set.class) // + .withCustomConverters(DirectionToIntegerConverter.INSTANCE, IntegerToDirectionConverter.INSTANCE) + .withArguments(Set.of(Direction.LEFT, Direction.RIGHT)) // + .extractParameterSource(); + + assertThat(sqlParameterSource.getValue("directions")).asList().containsExactlyInAnyOrder(-1, 1); + } + + @Test // GH-1212 + void doesNotConvertNonCollectionParameter() { + + SqlParameterSource sqlParameterSource = forMethod("findBySimpleValue", Integer.class) // + .withArguments(1) // + .extractParameterSource(); + + assertThat(sqlParameterSource.getValue("value")).isEqualTo(1); + } + + @Test // GH-1343 + void appliesConverterToIterable() { + + SqlParameterSource sqlParameterSource = forMethod("findByListContainer", ListContainer.class) // + .withCustomConverters(ListContainerToStringConverter.INSTANCE) + .withArguments(new ListContainer("one", "two", "three")) // + .extractParameterSource(); + + assertThat(sqlParameterSource.getValue("value")).isEqualTo("one"); + } + + @Test // GH-1323 + void queryByListOfTuples() { + + String[][] tuples = { new String[] { "Albert", "Einstein" }, new String[] { "Richard", "Feynman" } }; + + SqlParameterSource parameterSource = forMethod("findByListOfTuples", List.class) // + .withArguments(Arrays.asList(tuples))// + .extractParameterSource(); + + assertThat(parameterSource.getValue("tuples")).asInstanceOf(LIST)// + .containsExactly(tuples); + + assertThat(parameterSource.getSqlType("tuples")).isEqualTo(JdbcUtil.TYPE_UNKNOWN.getVendorTypeNumber()); + } + + @Test // GH-1323 + void queryByListOfConvertableTuples() { + + SqlParameterSource parameterSource = forMethod("findByListOfTuples", List.class) // + .withCustomConverters(DirectionToIntegerConverter.INSTANCE) // + .withArguments( + Arrays.asList(new Object[] { Direction.LEFT, "Einstein" }, new Object[] { Direction.RIGHT, "Feynman" })) + .extractParameterSource(); + + assertThat(parameterSource.getValue("tuples")).asInstanceOf(LIST) // + .containsExactly(new Object[][] { new Object[] { -1, "Einstein" }, new Object[] { 1, "Feynman" } }); + } + + @Test // GH-619 + void spelCanBeUsedInsideQueries() { + + JdbcQueryMethod queryMethod = createMethod("findBySpelExpression", Object.class); + + List list = new ArrayList<>(); + list.add(new MyEvaluationContextProvider()); + + QueryMethodValueEvaluationContextAccessor accessor = new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), list); + this.delegate = new ValueExpressionDelegate(accessor, ValueExpressionParser.create()); + + StringBasedJdbcQuery sut = new StringBasedJdbcQuery(queryMethod, operations, result -> defaultRowMapper, converter, delegate); + + ArgumentCaptor paramSource = ArgumentCaptor.forClass(SqlParameterSource.class); + ArgumentCaptor query = ArgumentCaptor.forClass(String.class); + + sut.execute(new Object[] { "myValue" }); + + verify(this.operations).queryForObject(query.capture(), paramSource.capture(), any(RowMapper.class)); + + assertThat(query.getValue()) + .isEqualTo("SELECT * FROM table WHERE c = :__$synthetic$__1 AND c2 = :__$synthetic$__2"); + assertThat(paramSource.getValue().getValue("__$synthetic$__1")).isEqualTo("test-value1"); + assertThat(paramSource.getValue().getValue("__$synthetic$__2")).isEqualTo("test-value2"); + } + + QueryFixture forMethod(String name, Class... paramTypes) { + return new QueryFixture(createMethod(name, paramTypes)); + } + + private class QueryFixture { + + private final JdbcQueryMethod method; + private Object[] arguments; + private MappingJdbcConverter converter; + + public QueryFixture(JdbcQueryMethod method) { + this.method = method; + } + + public QueryFixture withArguments(Object... arguments) { + + this.arguments = arguments; + + return this; + } + + public SqlParameterSource extractParameterSource() { + + MappingJdbcConverter converter = this.converter == null // + ? new MappingJdbcConverter(mock(RelationalMappingContext.class), // + mock(RelationResolver.class)) + : this.converter; + + StringBasedJdbcQuery query = new StringBasedJdbcQuery(method.getDeclaredQuery(), method, operations, result -> mock(RowMapper.class), + converter, delegate); + + query.execute(arguments); + + ArgumentCaptor captor = ArgumentCaptor.forClass(SqlParameterSource.class); + verify(operations).queryForObject(anyString(), captor.capture(), any(RowMapper.class)); + + return captor.getValue(); + } + + public QueryFixture withConverter(MappingJdbcConverter converter) { + + this.converter = converter; + + return this; + } + + public QueryFixture withCustomConverters(Object... converters) { + + return withConverter(new MappingJdbcConverter(mock(RelationalMappingContext.class), mock(RelationResolver.class), + new JdbcCustomConversions(List.of(converters)), JdbcTypeFactory.unsupported())); + } + } + + private JdbcQueryMethod createMethod(String methodName, Class... paramTypes) { + + Method method = ReflectionUtils.findMethod(MyRepository.class, methodName, paramTypes); + return new JdbcQueryMethod(method, new DefaultRepositoryMetadata(MyRepository.class), + new SpelAwareProxyProjectionFactory(), new PropertiesBasedNamedQueries(new Properties()), this.context); + } + + private StringBasedJdbcQuery createQuery(JdbcQueryMethod queryMethod) { + return createQuery(queryMethod, null, null); + } + + private StringBasedJdbcQuery createQuery(JdbcQueryMethod queryMethod, String preparedReference, Object value) { + return new StringBasedJdbcQuery(queryMethod, operations, new StubRowMapperFactory(preparedReference, value), converter, delegate); + } + + interface MyRepository extends Repository { + + @Query(value = "some sql statement") + List findAll(); + + @Query(value = "some sql statement", rowMapperClass = CustomRowMapper.class) + List findAllWithCustomRowMapper(); + + @Query(value = "some sql statement", resultSetExtractorClass = CustomResultSetExtractor.class) + List findAllWithCustomResultSetExtractor(); + + @Query(value = "some sql statement", rowMapperClass = CustomRowMapper.class, + resultSetExtractorClass = CustomResultSetExtractor.class) + List findAllWithCustomRowMapperAndResultSetExtractor(); + + @Query(value = "some sql statement") + Stream findAllWithStreamReturnType(); + + @Query(value = "some sql statement", resultSetExtractorClass = CustomResultSetExtractor.class) + Stream findAllWithStreamReturnTypeAndResultSetExtractor(); + + @Query(value = "some sql statement", rowMapperClass = CustomRowMapper.class, + resultSetExtractorClass = CustomResultSetExtractor.class) + Stream findAllCustomRowMapperResultSetExtractor(); + + @Query(value = "some sql statement", rowMapperRef = "CustomRowMapper") + Stream findAllCustomRowMapperRef(); + + @Query(value = "some sql statement", resultSetExtractorRef = "CustomResultSetExtractor") + Stream findAllCustomResultSetExtractorRef(); + + @Query(value = "some sql statement", rowMapperRef = "CustomResultSetExtractor", + rowMapperClass = CustomRowMapper.class) + Stream invalidMapperRefAndClass(); + + @Query(value = "some sql statement", resultSetExtractorRef = "CustomResultSetExtractor", + resultSetExtractorClass = CustomResultSetExtractor.class) + Stream invalidExtractorRefAndClass(); + + List noAnnotation(); + + @Query(value = "some sql statement") + Page pageAll(Pageable pageable); + + @Query(value = "some sql statement") + Slice sliceAll(Pageable pageable); + + @Query(value = "some sql statement") + List findByEnumTypeIn(Set directions); + + @Query(value = "some sql statement") + List findBySimpleValue(Integer value); + + @Query(value = "some sql statement") + List findByListContainer(ListContainer value); + + @Query("SELECT * FROM table WHERE c = :#{myext.testValue} AND c2 = :#{myext.doSomething()}") + Object findBySpelExpression(Object object); + + @Query("SELECT * FROM person WHERE lastname = $1") + Object unsupportedLimitQuery(@Param("lastname") String lastname, Limit limit); + + @Query("select count(1) from person where (firstname, lastname) in (:tuples)") + Object findByListOfTuples(@Param("tuples") List tuples); + } + + private static class CustomRowMapper implements RowMapper { + + @Override + public Object mapRow(ResultSet rs, int rowNum) { + return null; + } + } + + private static class CustomResultSetExtractor implements ResultSetExtractor { + + private final RowMapper rowMapper; + + CustomResultSetExtractor() { + rowMapper = null; + } + + public CustomResultSetExtractor(RowMapper rowMapper) { + + this.rowMapper = rowMapper; + } + + @Override + public Object extractData(ResultSet rs) throws DataAccessException { + return null; + } + } + + private enum Direction { + LEFT, CENTER, RIGHT + } + + @WritingConverter + enum DirectionToIntegerConverter implements Converter { + + INSTANCE; + + @Override + public JdbcValue convert(Direction source) { + + int integer = switch (source) { + case LEFT -> -1; + case CENTER -> 0; + case RIGHT -> 1; + }; + return JdbcValue.of(integer, JDBCType.INTEGER); + } + } + + @ReadingConverter + enum IntegerToDirectionConverter implements Converter { + + INSTANCE; + + @Override + public Direction convert(Integer source) { + + if (source == 0) { + return Direction.CENTER; + } else if (source < 0) { + return Direction.LEFT; + } else { + return Direction.RIGHT; + } + } + } + + static class ListContainer implements Iterable { + + private final List values; + + ListContainer(String... values) { + this.values = List.of(values); + } + + @Override + public Iterator iterator() { + return values.iterator(); + } + } + + @WritingConverter + enum ListContainerToStringConverter implements Converter { + + INSTANCE; + + @Override + public String convert(ListContainer source) { + return source.values.get(0); + } + } + + private static class DummyEntity { + private final Long id; + + public DummyEntity(Long id) { + this.id = id; + } + + Long getId() { + return id; + } + } + + // DATAJDBC-397 + static class MyEvaluationContextProvider implements EvaluationContextExtension { + @Override + public String getExtensionId() { + return "myext"; + } + + public static class ExtensionRoot { + public String getTestValue() { + return "test-value1"; + } + + public String doSomething() { + return "test-value2"; + } + } + + @Override + public Object getRootObject() { + return new ExtensionRoot(); + } + } + + private class StubRowMapperFactory implements AbstractJdbcQuery.RowMapperFactory { + + private final String preparedReference; + private final Object value; + + StubRowMapperFactory(String preparedReference, Object value) { + this.preparedReference = preparedReference; + this.value = value; + } + + @Override + public RowMapper create(Class result) { + return defaultRowMapper; + } + + @Override + public RowMapper getRowMapper(String reference) { + + if (preparedReference.equals(reference)) { + return (RowMapper) value; + } + return AbstractJdbcQuery.RowMapperFactory.super.getRowMapper(reference); + } + + @Override + public ResultSetExtractor getResultSetExtractor(String reference) { + + if (preparedReference.equals(reference)) { + return (ResultSetExtractor) value; + } + return AbstractJdbcQuery.RowMapperFactory.super.getResultSetExtractor(reference); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategyUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategyUnitTests.java new file mode 100644 index 0000000000..80ec8594b4 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategyUnitTests.java @@ -0,0 +1,179 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.text.NumberFormat; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.jdbc.core.dialect.JdbcH2Dialect; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.data.jdbc.repository.config.DefaultQueryMappingConfiguration; +import org.springframework.data.jdbc.repository.query.Query; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.relational.core.dialect.H2Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.util.TypeInformation; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.util.ReflectionUtils; + +/** + * Unit tests for {@link JdbcQueryLookupStrategy}. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Mark Paluch + * @author Maciej Walkowiak + * @author Evgeni Dimitrov + * @author Mark Paluch + * @author Hebert Coelho + * @author Diego Krupitza + * @author Christopher Klein + */ +class JdbcQueryLookupStrategyUnitTests { + + private ApplicationEventPublisher publisher = mock(ApplicationEventPublisher.class); + private EntityCallbacks callbacks = mock(EntityCallbacks.class); + private RelationalMappingContext mappingContext = mock(RelationalMappingContext.class, RETURNS_DEEP_STUBS); + private JdbcConverter converter = mock(JdbcConverter.class); + private ProjectionFactory projectionFactory = mock(ProjectionFactory.class); + private RepositoryMetadata metadata; + private NamedQueries namedQueries = mock(NamedQueries.class); + private NamedParameterJdbcOperations operations = mock(NamedParameterJdbcOperations.class); + QueryMethodEvaluationContextProvider evaluationContextProvider = mock(QueryMethodEvaluationContextProvider.class); + + @BeforeEach + void setup() { + + this.metadata = mock(RepositoryMetadata.class); + + doReturn(NumberFormat.class).when(metadata).getReturnedDomainClass(any(Method.class)); + doReturn(TypeInformation.of(NumberFormat.class)).when(metadata).getReturnType(any(Method.class)); + doReturn(TypeInformation.of(NumberFormat.class)).when(metadata).getDomainTypeInformation(); + } + + @Test // DATAJDBC-166 + @SuppressWarnings("unchecked") + void typeBasedRowMapperGetsUsedForQuery() { + + RowMapper numberFormatMapper = mock(RowMapper.class); + QueryMappingConfiguration mappingConfiguration = new DefaultQueryMappingConfiguration() + .registerRowMapper(NumberFormat.class, numberFormatMapper); + + RepositoryQuery repositoryQuery = getRepositoryQuery(QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND, + "returningNumberFormat", mappingConfiguration); + + repositoryQuery.execute(new Object[] {}); + + verify(operations).queryForObject(anyString(), any(SqlParameterSource.class), any(RowMapper.class)); + } + + @Test // GH-1061 + void prefersDeclaredQuery() { + + RowMapper numberFormatMapper = mock(RowMapper.class); + QueryMappingConfiguration mappingConfiguration = new DefaultQueryMappingConfiguration() + .registerRowMapper(NumberFormat.class, numberFormatMapper); + + RepositoryQuery repositoryQuery = getRepositoryQuery(QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND, + "annotatedQueryWithQueryAndQueryName", mappingConfiguration); + + repositoryQuery.execute(new Object[] {}); + + verify(operations).queryForObject(eq("some SQL"), any(SqlParameterSource.class), any(RowMapper.class)); + } + + @Test // GH-1043 + void shouldFailOnMissingDeclaredQuery() { + + RowMapper numberFormatMapper = mock(RowMapper.class); + QueryMappingConfiguration mappingConfiguration = new DefaultQueryMappingConfiguration() + .registerRowMapper(NumberFormat.class, numberFormatMapper); + + assertThatThrownBy( + () -> getRepositoryQuery(QueryLookupStrategy.Key.USE_DECLARED_QUERY, "findByName", mappingConfiguration)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Did neither find a NamedQuery nor an annotated query for method") + .hasMessageContaining("findByName"); + } + + @ParameterizedTest + @MethodSource("correctLookUpStrategyForKeySource") + void correctLookUpStrategyForKey(QueryLookupStrategy.Key key, Class expectedClass) { + + RowMapper numberFormatMapper = mock(RowMapper.class); + QueryMappingConfiguration mappingConfiguration = new DefaultQueryMappingConfiguration() + .registerRowMapper(NumberFormat.class, numberFormatMapper); + + QueryLookupStrategy queryLookupStrategy = JdbcQueryLookupStrategy.create(key, publisher, callbacks, mappingContext, + converter, JdbcH2Dialect.INSTANCE, mappingConfiguration, operations, null, ValueExpressionDelegate.create()); + + assertThat(queryLookupStrategy).isInstanceOf(expectedClass); + } + + private static Stream correctLookUpStrategyForKeySource() { + + return Stream.of( // + Arguments.of(QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND, + JdbcQueryLookupStrategy.CreateIfNotFoundQueryLookupStrategy.class), // + Arguments.of(QueryLookupStrategy.Key.CREATE, JdbcQueryLookupStrategy.CreateQueryLookupStrategy.class), // + Arguments.of(QueryLookupStrategy.Key.USE_DECLARED_QUERY, + JdbcQueryLookupStrategy.DeclaredQueryLookupStrategy.class) // + ); + } + + private RepositoryQuery getRepositoryQuery(QueryLookupStrategy.Key key, String name, + QueryMappingConfiguration mappingConfiguration) { + + QueryLookupStrategy queryLookupStrategy = JdbcQueryLookupStrategy.create(key, publisher, callbacks, mappingContext, + converter, JdbcH2Dialect.INSTANCE, mappingConfiguration, operations, null, ValueExpressionDelegate.create()); + + Method method = ReflectionUtils.findMethod(MyRepository.class, name); + return queryLookupStrategy.resolveQuery(method, metadata, projectionFactory, namedQueries); + } + + interface MyRepository { + + // NumberFormat is just used as an arbitrary non simple type. + @Query("some SQL") + NumberFormat returningNumberFormat(); + + @Query(value = "some SQL", name = "query-name") + void annotatedQueryWithQueryAndQueryName(); + + NumberFormat findByName(); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBeanUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBeanUnitTests.java new file mode 100644 index 0000000000..f065a248a1 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBeanUnitTests.java @@ -0,0 +1,137 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.function.Supplier; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.mockito.stubbing.Answer; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.ObjectProvider; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.annotation.Id; +import org.springframework.data.jdbc.core.convert.DataAccessStrategy; +import org.springframework.data.jdbc.core.convert.DefaultDataAccessStrategy; +import org.springframework.data.jdbc.core.convert.MappingJdbcConverter; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.repository.QueryMappingConfiguration; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.CrudRepository; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Tests the dependency injection for {@link JdbcRepositoryFactoryBean}. + * + * @author Jens Schauder + * @author Greg Turnquist + * @author Christoph Strobl + * @author Oliver Gierke + * @author Mark Paluch + * @author Evgeni Dimitrov + */ +@MockitoSettings(strictness = Strictness.LENIENT) +@ExtendWith(MockitoExtension.class) +public class JdbcRepositoryFactoryBeanUnitTests { + + JdbcRepositoryFactoryBean factoryBean; + + @Mock DataAccessStrategy dataAccessStrategy; + @Mock ApplicationEventPublisher publisher; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) ListableBeanFactory beanFactory; + @Mock Dialect dialect; + + RelationalMappingContext mappingContext; + + @BeforeEach + public void setUp() { + + this.mappingContext = new JdbcMappingContext(); + + // Setup standard configuration + factoryBean = new JdbcRepositoryFactoryBean<>(DummyEntityRepository.class); + + when(beanFactory.getBean(NamedParameterJdbcOperations.class)).thenReturn(mock(NamedParameterJdbcOperations.class)); + + ObjectProvider provider = mock(ObjectProvider.class); + when(beanFactory.getBeanProvider(DataAccessStrategy.class)).thenReturn(provider); + when(provider.getIfAvailable(any())) + .then((Answer) invocation -> ((Supplier) invocation.getArgument(0)).get()); + } + + @Test // DATAJDBC-151 + public void setsUpBasicInstanceCorrectly() { + + factoryBean.setDataAccessStrategy(dataAccessStrategy); + factoryBean.setMappingContext(mappingContext); + factoryBean.setConverter(new MappingJdbcConverter(mappingContext, dataAccessStrategy)); + factoryBean.setApplicationEventPublisher(publisher); + factoryBean.setBeanFactory(beanFactory); + factoryBean.setDialect(dialect); + factoryBean.afterPropertiesSet(); + + assertThat(factoryBean.getObject()).isNotNull(); + } + + @Test // DATAJDBC-151 + public void requiresListableBeanFactory() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> factoryBean.setBeanFactory(mock(BeanFactory.class))); + } + + @Test // DATAJDBC-155 + public void afterPropertiesThrowsExceptionWhenNoMappingContextSet() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> factoryBean.setMappingContext(null)); + } + + @Test // DATAJDBC-155 + public void afterPropertiesSetDefaultsNullablePropertiesCorrectly() { + + factoryBean.setMappingContext(mappingContext); + factoryBean.setConverter(new MappingJdbcConverter(mappingContext, dataAccessStrategy)); + factoryBean.setApplicationEventPublisher(publisher); + factoryBean.setBeanFactory(beanFactory); + factoryBean.setDialect(dialect); + factoryBean.afterPropertiesSet(); + + assertThat(factoryBean.getObject()).isNotNull(); + assertThat(ReflectionTestUtils.getField(factoryBean, "dataAccessStrategy")) + .isInstanceOf(DefaultDataAccessStrategy.class); + assertThat(ReflectionTestUtils.getField(factoryBean, "queryMappingConfiguration")) + .isEqualTo(QueryMappingConfiguration.EMPTY); + } + + private static class DummyEntity { + + @Id private Long id; + } + + private interface DummyEntityRepository extends CrudRepository {} +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/SimpleJdbcRepositoryUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/SimpleJdbcRepositoryUnitTests.java new file mode 100644 index 0000000000..f39fe0c63b --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/repository/support/SimpleJdbcRepositoryUnitTests.java @@ -0,0 +1,54 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.jdbc.core.JdbcAggregateOperations; +import org.springframework.data.jdbc.core.convert.JdbcConverter; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; + +/** + * Unit tests for {@link SimpleJdbcRepository}. + * + * @author Oliver Gierke + */ +@ExtendWith(MockitoExtension.class) +public class SimpleJdbcRepositoryUnitTests { + + @Mock JdbcAggregateOperations operations; + @Mock RelationalPersistentEntity entity; + @Mock JdbcConverter converter; + + @Test // DATAJDBC-252 + public void saveReturnsEntityProducedByOperations() { + + SimpleJdbcRepository repository = new SimpleJdbcRepository<>(operations, entity, converter); + + Sample expected = new Sample(); + doReturn(expected).when(operations).save(any()); + + assertThat(repository.save(new Sample())).isEqualTo(expected); + } + + static class Sample {} +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/support/JdbcUtilTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/support/JdbcUtilTests.java new file mode 100644 index 0000000000..cc12dced85 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/support/JdbcUtilTests.java @@ -0,0 +1,36 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.support; + +import static org.assertj.core.api.Assertions.*; + +import java.sql.JDBCType; +import java.time.OffsetDateTime; + +import org.junit.jupiter.api.Test; + +/** + * Tests for {@link JdbcUtil}. + * + * @author Jens Schauder + */ +class JdbcUtilTests { + + @Test + void test() { + assertThat(JdbcUtil.targetSqlTypeFor(OffsetDateTime.class)).isEqualTo(JDBCType.TIMESTAMP_WITH_TIMEZONE); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/AssumeFeatureTestExecutionListener.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/AssumeFeatureTestExecutionListener.java new file mode 100644 index 0000000000..94db69c982 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/AssumeFeatureTestExecutionListener.java @@ -0,0 +1,57 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.springframework.context.ApplicationContext; +import org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature; +import org.springframework.test.context.TestContext; +import org.springframework.test.context.TestExecutionListener; + +/** + * {@link TestExecutionListener} to evaluate {@link EnabledOnFeature} annotations. + * + * @author Jens Schauder + * @author Mark Paluch + */ +public class AssumeFeatureTestExecutionListener implements TestExecutionListener { + + @Override + public void beforeTestMethod(TestContext testContext) { + + ApplicationContext applicationContext = testContext.getApplicationContext(); + TestDatabaseFeatures databaseFeatures = applicationContext.getBean(TestDatabaseFeatures.class); + + Set requiredFeatures = new LinkedHashSet<>(); + + EnabledOnFeature classAnnotation = testContext.getTestClass().getAnnotation(EnabledOnFeature.class); + if (classAnnotation != null) { + requiredFeatures.addAll(Arrays.asList(classAnnotation.value())); + } + + EnabledOnFeature methodAnnotation = testContext.getTestMethod().getAnnotation(EnabledOnFeature.class); + if (methodAnnotation != null) { + requiredFeatures.addAll(Arrays.asList(methodAnnotation.value())); + } + + for (TestDatabaseFeatures.Feature requiredFeature : requiredFeatures) { + requiredFeature.test(databaseFeatures); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/CombiningActiveProfileResolver.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/CombiningActiveProfileResolver.java new file mode 100644 index 0000000000..51aa5e192f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/CombiningActiveProfileResolver.java @@ -0,0 +1,72 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.testing; + +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.springframework.test.context.ActiveProfilesResolver; +import org.springframework.test.context.support.DefaultActiveProfilesResolver; + +/** + * A {@link ActiveProfilesResolver} combining the profile configurations from environement, system properties and + * {@link org.springframework.test.context.ActiveProfiles} annotations. + * + * @author Jens Schauder + */ +class CombiningActiveProfileResolver implements ActiveProfilesResolver { + + private static final String SPRING_PROFILES_ACTIVE = "spring.profiles.active"; + private final DefaultActiveProfilesResolver defaultActiveProfilesResolver = new DefaultActiveProfilesResolver(); + + @Override + public String[] resolve(Class testClass) { + + Set combinedProfiles = new LinkedHashSet<>(); + + combinedProfiles.addAll(Arrays.asList(defaultActiveProfilesResolver.resolve(testClass))); + combinedProfiles.addAll(Arrays.asList(getSystemProfiles())); + combinedProfiles.addAll(Arrays.asList(getEnvironmentProfiles())); + + return combinedProfiles.toArray(new String[0]); + } + + private static String[] getSystemProfiles() { + + if (System.getProperties().containsKey(SPRING_PROFILES_ACTIVE)) { + + String profiles = System.getProperty(SPRING_PROFILES_ACTIVE); + return profiles.split("\\s*,\\s*"); + } + + return new String[0]; + } + + private String[] getEnvironmentProfiles() { + + if (System.getenv().containsKey(SPRING_PROFILES_ACTIVE)) { + + String profiles = System.getenv().get(SPRING_PROFILES_ACTIVE); + return profiles.split("\\s*,\\s*"); + } + + return new String[0]; + + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/ConditionalOnDatabase.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/ConditionalOnDatabase.java new file mode 100644 index 0000000000..225198dde8 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/ConditionalOnDatabase.java @@ -0,0 +1,48 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.context.annotation.Conditional; + +/** + * Indicates that a component is eligible for registration/evaluation when a profile for a {@link DatabaseType} is + * activated. + *

+ * This annotation can be used on Spring components and on tests to indicate that a test should be only run when the + * appropriate profile is activated. + * + * @author Mark Paluch + * @see Conditional + * @see DatabaseTypeCondition + */ +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Conditional(DatabaseTypeCondition.class) +public @interface ConditionalOnDatabase { + + /** + * Database type on which the annotated class should be enabled. + */ + DatabaseType value(); + +} diff --git a/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java similarity index 54% rename from src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java index 0de09dcbf7..3ad151d4dc 100644 --- a/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,18 @@ */ package org.springframework.data.jdbc.testing; +import static org.awaitility.pollinterval.FibonacciPollInterval.*; + +import java.sql.Connection; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + import javax.sql.DataSource; -import org.springframework.beans.factory.annotation.Autowired; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.awaitility.Awaitility; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -33,27 +42,36 @@ * @author Jens Schauder * @author Oliver Gierke */ -@Configuration +@Configuration(proxyBeanMethods = false) abstract class DataSourceConfiguration { - @Autowired Class testClass; - @Autowired Environment environment; + private static final Log LOG = LogFactory.getLog(DataSourceConfiguration.class); + + private final TestClass testClass; + private final Environment environment; + + public DataSourceConfiguration(TestClass testClass, Environment environment) { + this.testClass = testClass; + this.environment = environment; + } @Bean DataSource dataSource() { - return createDataSource(); + DataSource dataSource = createDataSource(); + verifyConnection(dataSource); + return dataSource; } @Bean - DataSourceInitializer initializer() { + DataSourceInitializer initializer(DataSource dataSource) { DataSourceInitializer initializer = new DataSourceInitializer(); - initializer.setDataSource(dataSource()); + initializer.setDataSource(dataSource); String[] activeProfiles = environment.getActiveProfiles(); - String profile = activeProfiles.length == 0 ? "" : activeProfiles[0]; + String profile = getDatabaseProfile(activeProfiles); - ClassPathResource script = new ClassPathResource(TestUtils.createScriptName(testClass, profile)); + ClassPathResource script = new ClassPathResource(TestUtils.createScriptName(testClass.getTestClass(), profile)); ResourceDatabasePopulator populator = new ResourceDatabasePopulator(script); customizePopulator(populator); initializer.setDatabasePopulator(populator); @@ -61,6 +79,18 @@ DataSourceInitializer initializer() { return initializer; } + private static String getDatabaseProfile(String[] activeProfiles) { + + List validDbs = Arrays.stream(DatabaseType.values()).map(DatabaseType::getProfile).toList(); + for (String profile : activeProfiles) { + if (validDbs.contains(profile)) { + return profile; + } + } + + return ""; + } + /** * Return the {@link DataSource} to be exposed as a Spring bean. * @@ -76,4 +106,24 @@ DataSourceInitializer initializer() { * @param populator will never be {@literal null}. */ protected void customizePopulator(ResourceDatabasePopulator populator) {} + + private void verifyConnection(DataSource dataSource) { + + Awaitility.await() // + .atMost(5L, TimeUnit.MINUTES) // + .pollInterval(fibonacci(TimeUnit.SECONDS)) // + .ignoreExceptions() // + .until(() -> { + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Verifying connectivity to %s...", dataSource)); + } + + try (Connection connection = dataSource.getConnection()) { + return true; + } + }); + + LOG.debug("Connectivity verified"); + } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DatabaseType.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DatabaseType.java new file mode 100644 index 0000000000..f72bcf0b38 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DatabaseType.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.util.Locale; + +/** + * Supported database types. Types are defined to express against which database a particular test is expected to run. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public enum DatabaseType { + + DB2, HSQL, H2, MARIADB, MYSQL, ORACLE, POSTGRES, SQL_SERVER("mssql"); + + private final String profile; + + DatabaseType() { + this.profile = name().toLowerCase(Locale.ROOT); + } + + DatabaseType(String profile) { + this.profile = profile; + } + + /** + * @return the profile string as used in Spring profiles. + */ + public String getProfile() { + return profile; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DatabaseTypeCondition.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DatabaseTypeCondition.java new file mode 100644 index 0000000000..9db0e2af6a --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DatabaseTypeCondition.java @@ -0,0 +1,98 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import static org.assertj.core.api.Assumptions.*; + +import java.lang.reflect.AnnotatedElement; +import java.util.Optional; + +import org.junit.platform.commons.util.AnnotationUtils; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.annotation.Order; +import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.type.AnnotatedTypeMetadata; +import org.springframework.test.context.TestContext; +import org.springframework.test.context.TestExecutionListener; +import org.springframework.util.MultiValueMap; + +/** + * {@link Condition} and {@link TestExecutionListener} to test whether the required {@link DatabaseType} configuration + * has been configured. The usage through {@link Condition} requires an existing application context while the + * {@link TestExecutionListener} usage detects the activated profiles early to avoid expensive application context + * startup if the condition does not match. + * + * @author Mark Paluch + */ +@Order(Integer.MIN_VALUE) +class DatabaseTypeCondition implements Condition, TestExecutionListener { + + @Override + public void prepareTestInstance(TestContext testContext) { + evaluate(testContext.getTestClass(), new StandardEnvironment(), true); + } + + @Override + public void beforeTestMethod(TestContext testContext) { + evaluate(testContext.getTestMethod(), + (ConfigurableEnvironment) testContext.getApplicationContext().getEnvironment(), false); + } + + private static void evaluate(AnnotatedElement element, ConfigurableEnvironment environment, + boolean enabledByDefault) { + + Optional databaseType = AnnotationUtils.findAnnotation(element, ConditionalOnDatabase.class) + .map(ConditionalOnDatabase::value); + + if (databaseType.isEmpty()) { + databaseType = AnnotationUtils.findAnnotation(element, EnabledOnDatabase.class).map(EnabledOnDatabase::value); + } + + if (databaseType.isPresent()) { + + DatabaseType type = databaseType.get(); + + if (enabledByDefault) { + EnabledOnDatabaseCustomizer.customizeEnvironment(environment, type); + } + + assumeThat(environment.getActiveProfiles()).as("Enabled profiles").contains(type.getProfile()); + } + } + + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + + MultiValueMap attrs = metadata.getAllAnnotationAttributes(ConditionalOnDatabase.class.getName()); + if (attrs != null) { + for (Object value : attrs.get("value")) { + + DatabaseType type = (DatabaseType) value; + + if (context.getEnvironment().matchesProfiles(type.getProfile())) { + return true; + } + } + + return false; + } + + return true; + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/Db2DataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/Db2DataSourceConfiguration.java new file mode 100644 index 0000000000..ea5af20600 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/Db2DataSourceConfiguration.java @@ -0,0 +1,68 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.datasource.DriverManagerDataSource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.testcontainers.containers.Db2Container; + +/** + * {@link DataSource} setup for DB2. + * + * @author Jens Schauder + * @author Mark Paluch + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.DB2) +class Db2DataSourceConfiguration extends DataSourceConfiguration { + + public static final String DOCKER_IMAGE_NAME = "ibmcom/db2:11.5.7.0a"; + private static final Log LOG = LogFactory.getLog(Db2DataSourceConfiguration.class); + + private static Db2Container DB_2_CONTAINER; + + public Db2DataSourceConfiguration(TestClass testClass, Environment environment) { + super(testClass, environment); + } + + @Override + protected DataSource createDataSource() { + + if (DB_2_CONTAINER == null) { + + LOG.info("DB2 starting..."); + Db2Container container = new Db2Container(DOCKER_IMAGE_NAME).withReuse(true); + container.start(); + LOG.info("DB2 started"); + + DB_2_CONTAINER = container; + } + + return new DriverManagerDataSource(DB_2_CONTAINER.getJdbcUrl(), + DB_2_CONTAINER.getUsername(), DB_2_CONTAINER.getPassword()); + } + + @Override + protected void customizePopulator(ResourceDatabasePopulator populator) { + populator.setIgnoreFailedDrops(true); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnDatabase.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnDatabase.java new file mode 100644 index 0000000000..7e8ff4122f --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnDatabase.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.env.Environment; +import org.springframework.data.jdbc.testing.EnabledOnDatabaseCustomizer.EnabledOnDatabaseCustomizerFactory; +import org.springframework.data.jdbc.testing.TestClassCustomizer.TestClassCustomizerFactory; +import org.springframework.test.context.ContextCustomizerFactories; + +/** + * Selects a database configuration on which the test class is enabled. + *

+ * Using this annotation will enable the test configuration if no test environment is given. If a test environment is + * configured through {@link Environment#getActiveProfiles()}, then the test class will be skipped if the environment + * doesn't match the specified {@link DatabaseType}. + *

+ * If a test method is disabled via this annotation, that does not prevent the test class from being instantiated. + * Rather, it prevents the execution of the test method and method-level lifecycle callbacks such as {@code @BeforeEach} + * methods, {@code @AfterEach} methods, and corresponding extension APIs. When annotated on method and class level, all + * annotated features must match to run a test. + * + * @author Mark Paluch + * @see DatabaseTypeCondition + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +// required twice as the annotation lookup doesn't merge multiple occurrences of the same annotation +@ContextCustomizerFactories(value = { TestClassCustomizerFactory.class, EnabledOnDatabaseCustomizerFactory.class }) +@Documented +@Inherited +public @interface EnabledOnDatabase { + + /** + * Database type on which the annotated class should be enabled. + */ + DatabaseType value(); +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnDatabaseCustomizer.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnDatabaseCustomizer.java new file mode 100644 index 0000000000..a20436cbf4 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnDatabaseCustomizer.java @@ -0,0 +1,84 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.annotation.MergedAnnotation; +import org.springframework.core.annotation.MergedAnnotations; +import org.springframework.core.annotation.MergedAnnotations.SearchStrategy; +import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.test.context.ContextConfigurationAttributes; +import org.springframework.test.context.ContextCustomizer; +import org.springframework.test.context.ContextCustomizerFactory; +import org.springframework.test.context.MergedContextConfiguration; + +/** + * {@link ContextCustomizer} to select a specific configuration profile based on the {@code @EnabledOnDatabase} + * annotation. + * + * @author Mark Paluch + * @see EnabledOnDatabase + */ +public class EnabledOnDatabaseCustomizer implements ContextCustomizer { + + private final Class testClass; + + public EnabledOnDatabaseCustomizer(Class testClass) { + this.testClass = testClass; + } + + @Override + public void customizeContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedConfig) { + + MergedAnnotation annotation = MergedAnnotations.from(testClass, SearchStrategy.TYPE_HIERARCHY) + .get(EnabledOnDatabase.class); + + if (annotation.isPresent()) { + + DatabaseType value = annotation.getEnum("value", DatabaseType.class); + + customizeEnvironment(context.getEnvironment(), value); + } + } + + static void customizeEnvironment(ConfigurableEnvironment environment, DatabaseType value) { + + List profiles = Arrays.asList(environment.getActiveProfiles()); + + for (DatabaseType databaseType : DatabaseType.values()) { + + if (profiles.contains(databaseType.getProfile())) { + return; + } + } + + environment.addActiveProfile(value.getProfile()); + } + + public static class EnabledOnDatabaseCustomizerFactory implements ContextCustomizerFactory { + + @Override + public ContextCustomizer createContextCustomizer(Class testClass, + List configAttributes) { + return new EnabledOnDatabaseCustomizer(testClass); + } + + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnFeature.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnFeature.java new file mode 100644 index 0000000000..9c855e6054 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/EnabledOnFeature.java @@ -0,0 +1,51 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@code @RequiredFeature} is used to express that the annotated test class or test method is only enabled on + * one or more specified Spring Data JDBC {@link org.springframework.data.jdbc.testing.TestDatabaseFeatures.Feature + * features} are supported by the underlying database. + *

+ * When applied at the class level, all test methods within that class will be enabled if they support all database + * features. + *

+ * If a test method is disabled via this annotation, that does not prevent the test class from being instantiated. + * Rather, it prevents the execution of the test method and method-level lifecycle callbacks such as {@code @BeforeEach} + * methods, {@code @AfterEach} methods, and corresponding extension APIs. When annotated on method and class level, all + * annotated features must match to run a test. + * + * @author Jens Schauder + * @author Mark Paluch + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.TYPE }) +@Documented +public @interface EnabledOnFeature { + + /** + * Databases features on which the annotated class or method should be enabled. + * + * @see TestDatabaseFeatures.Feature + */ + TestDatabaseFeatures.Feature[] value(); +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/H2DataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/H2DataSourceConfiguration.java new file mode 100644 index 0000000000..cb01929bc1 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/H2DataSourceConfiguration.java @@ -0,0 +1,51 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import javax.sql.DataSource; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; + +/** + * {@link DataSource} setup for H2. + * + * @author Mark Paluch + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.H2) +class H2DataSourceConfiguration { + + private final TestClass testClass; + + public H2DataSourceConfiguration(TestClass testClass) { + this.testClass = testClass; + } + + @Bean + DataSource dataSource() { + + return new EmbeddedDatabaseBuilder() // + .generateUniqueName(true) // + .setType(EmbeddedDatabaseType.H2) // + .setScriptEncoding("UTF-8") // + .ignoreFailedDrops(true) // + .addScript(TestUtils.createScriptName(testClass.getTestClass(), "h2")) // + .build(); + } +} diff --git a/src/test/java/org/springframework/data/jdbc/testing/HsqlDataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/HsqlDataSourceConfiguration.java similarity index 74% rename from src/test/java/org/springframework/data/jdbc/testing/HsqlDataSourceConfiguration.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/HsqlDataSourceConfiguration.java index c9703725d1..85f730a909 100644 --- a/src/test/java/org/springframework/data/jdbc/testing/HsqlDataSourceConfiguration.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/HsqlDataSourceConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,6 @@ import javax.sql.DataSource; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; @@ -30,11 +29,15 @@ * @author Jens Schauder * @author Oliver Gierke */ -@Configuration -@Profile({ "hsql", "default" }) +@Configuration(proxyBeanMethods = false) +@Profile({ "hsql", "!h2 && !mysql && !mariadb && !postgres && !oracle && !db2 && !mssql" }) class HsqlDataSourceConfiguration { - @Autowired Class context; + private final TestClass testClass; + + public HsqlDataSourceConfiguration(TestClass testClass) { + this.testClass = testClass; + } @Bean DataSource dataSource() { @@ -44,7 +47,7 @@ DataSource dataSource() { .setType(EmbeddedDatabaseType.HSQL) // .setScriptEncoding("UTF-8") // .ignoreFailedDrops(true) // - .addScript(TestUtils.createScriptName(context, "hsql")) // + .addScript(TestUtils.createScriptName(testClass.getTestClass(), "hsql")) // .build(); } } diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/IntegrationTest.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/IntegrationTest.java new file mode 100644 index 0000000000..0ba5ba876d --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/IntegrationTest.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import static org.springframework.test.context.TestExecutionListeners.MergeMode.*; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.jdbc.testing.EnabledOnDatabaseCustomizer.EnabledOnDatabaseCustomizerFactory; +import org.springframework.data.jdbc.testing.TestClassCustomizer.TestClassCustomizerFactory; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.ContextCustomizerFactories; +import org.springframework.test.context.TestExecutionListeners; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.transaction.annotation.Transactional; + +/** + * {@code @IntegrationTest} is a composed annotation that combines + * {@link ExtendWith @ExtendWith(SpringExtension.class)} from JUnit Jupiter with + * {@link ContextConfiguration @ContextConfiguration} and {@link TestExecutionListeners @TestExecutionListeners} from + * the Spring TestContext Framework enabling transaction management. + *

+ * Integration tests use the Spring Context and a potential profile to create an environment for tests to run against. + * As integration tests require a specific set of infrastructure components, test classes and configuration components + * can be annotated with {@link EnabledOnDatabase @EnabledOnDatabase} or + * {@link ConditionalOnDatabase @ConditionalOnDatabase} to enable and restrict or only restrict configuration on which + * tests are ran. + * + * @author Mark Paluch + * @see ConditionalOnDatabase + * @see EnabledOnDatabase + */ +@TestExecutionListeners(value = AssumeFeatureTestExecutionListener.class, mergeMode = MERGE_WITH_DEFAULTS) +// required twice as the annotation lookup doesn't merge multiple occurrences of the same annotation +@ContextCustomizerFactories(value = { TestClassCustomizerFactory.class, EnabledOnDatabaseCustomizerFactory.class }) +@ActiveProfiles(resolver = CombiningActiveProfileResolver.class) +@ExtendWith(SpringExtension.class) +@Transactional +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +public @interface IntegrationTest { + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/LicenseListener.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/LicenseListener.java new file mode 100644 index 0000000000..93af34c0ae --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/LicenseListener.java @@ -0,0 +1,67 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import static org.springframework.data.jdbc.testing.MsSqlDataSourceConfiguration.*; + +import org.junit.AssumptionViolatedException; +import org.springframework.core.annotation.Order; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.test.context.TestContext; +import org.springframework.test.context.TestExecutionListener; +import org.testcontainers.utility.LicenseAcceptance; + +/** + * {@link TestExecutionListener} to selectively skip tests if the license for a particular database container was not + * accepted. + * + * @author Mark Paluch + * @author Jens Schauder + */ +@Order(Integer.MIN_VALUE) +class LicenseListener implements TestExecutionListener { + + private final StandardEnvironment environment = new StandardEnvironment(); + + @Override + public void prepareTestInstance(TestContext testContext) { + + if (environment.matchesProfiles(DatabaseType.DB2.getProfile())) { + assumeLicenseAccepted(Db2DataSourceConfiguration.DOCKER_IMAGE_NAME); + } + + if (environment.matchesProfiles(DatabaseType.SQL_SERVER.getProfile())) { + assumeLicenseAccepted(MS_SQL_SERVER_VERSION); + } + } + + private void assumeLicenseAccepted(String imageName) { + + try { + LicenseAcceptance.assertLicenseAccepted(imageName); + } catch (IllegalStateException e) { + + if (environment.getProperty("on-missing-license", "fail").equals("ignore-test")) { + throw new AssumptionViolatedException(e.getMessage(), e); + } + + throw new IllegalStateException( + "You need to accept the license for the database with which you are testing or set \"ignore-missing-license\" as active profile in order to skip tests for which a license is missing.", + e); + } + } + +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MariaDBDataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MariaDBDataSourceConfiguration.java new file mode 100644 index 0000000000..bfe5065b25 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MariaDBDataSourceConfiguration.java @@ -0,0 +1,80 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.sql.Connection; +import java.sql.SQLException; + +import javax.sql.DataSource; + +import org.mariadb.jdbc.MariaDbDataSource; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.jdbc.datasource.init.ScriptUtils; +import org.testcontainers.containers.MariaDBContainer; + +/** + * {@link DataSource} setup for MariaDB. Starts a Docker-container with a MariaDB database, and sets up database "test". + * + * @author Christoph Preißner + * @author Mark Paluch + * @author Jens Schauder + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.MARIADB) +class MariaDBDataSourceConfiguration extends DataSourceConfiguration implements InitializingBean { + + private static MariaDBContainer MARIADB_CONTAINER; + + public MariaDBDataSourceConfiguration(TestClass testClass, Environment environment) { + super(testClass, environment); + } + + @Override + protected DataSource createDataSource() { + + if (MARIADB_CONTAINER == null) { + + MariaDBContainer container = new MariaDBContainer<>("mariadb:10.8.3").withUsername("root").withPassword("") + .withConfigurationOverride(""); + container.start(); + + MARIADB_CONTAINER = container; + } + + try { + + MariaDbDataSource dataSource = new MariaDbDataSource(); + dataSource.setUrl(MARIADB_CONTAINER.getJdbcUrl()); + dataSource.setUser(MARIADB_CONTAINER.getUsername()); + dataSource.setPassword(MARIADB_CONTAINER.getPassword()); + return dataSource; + } catch (SQLException sqlex) { + throw new RuntimeException(sqlex); + } + } + + @Override + public void afterPropertiesSet() throws Exception { + + try (Connection connection = createDataSource().getConnection()) { + ScriptUtils.executeSqlScript(connection, + new ByteArrayResource("DROP DATABASE test;CREATE DATABASE test;".getBytes())); + } + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MsSqlDataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MsSqlDataSourceConfiguration.java new file mode 100644 index 0000000000..828da03804 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MsSqlDataSourceConfiguration.java @@ -0,0 +1,72 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import javax.sql.DataSource; + +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.testcontainers.containers.MSSQLServerContainer; + +import com.microsoft.sqlserver.jdbc.SQLServerDataSource; + +/** + * {@link DataSource} setup for PostgreSQL. + *

+ * Configuration for a MSSQL Datasource. + * + * @author Thomas Lang + * @author Mark Paluch + * @author Jens Schauder + * @see + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.SQL_SERVER) +public class MsSqlDataSourceConfiguration extends DataSourceConfiguration { + + public static final String MS_SQL_SERVER_VERSION = "mcr.microsoft.com/mssql/server:2022-latest"; + private static MSSQLServerContainer MSSQL_CONTAINER; + + public MsSqlDataSourceConfiguration(TestClass testClass, Environment environment) { + super(testClass, environment); + } + + @Override + protected DataSource createDataSource() { + + if (MSSQL_CONTAINER == null) { + + MSSQLServerContainer container = new MSSQLServerContainer<>(MS_SQL_SERVER_VERSION) // + .withReuse(true); + container.start(); + + MSSQL_CONTAINER = container; + } + + SQLServerDataSource sqlServerDataSource = new SQLServerDataSource(); + sqlServerDataSource.setURL(MSSQL_CONTAINER.getJdbcUrl()); + sqlServerDataSource.setUser(MSSQL_CONTAINER.getUsername()); + sqlServerDataSource.setPassword(MSSQL_CONTAINER.getPassword()); + + return sqlServerDataSource; + } + + @Override + protected void customizePopulator(ResourceDatabasePopulator populator) { + populator.setIgnoreFailedDrops(true); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MySqlDataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MySqlDataSourceConfiguration.java new file mode 100644 index 0000000000..88c0d5d1cc --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/MySqlDataSourceConfiguration.java @@ -0,0 +1,91 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.sql.Connection; + +import javax.sql.DataSource; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.jdbc.datasource.init.ScriptUtils; +import org.testcontainers.containers.MySQLContainer; + +import com.mysql.cj.jdbc.MysqlDataSource; + +/** + * {@link DataSource} setup for MySQL. Starts a docker container with a MySql database and sets up a database name + * "test" in it. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Sedat Gokcen + * @author Mark Paluch + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.MYSQL) +class MySqlDataSourceConfiguration extends DataSourceConfiguration implements InitializingBean { + + private static MySQLContainer MYSQL_CONTAINER; + + public MySqlDataSourceConfiguration(TestClass testClass, Environment environment) { + super(testClass, environment); + } + + @Override + protected DataSource createDataSource() { + + if (MYSQL_CONTAINER == null) { + + MySQLContainer container = new MySQLContainer<>("mysql:8.0.29").withUsername("test").withPassword("test") + .withConfigurationOverride(""); + + container.start(); + + MYSQL_CONTAINER = container; + } + + MysqlDataSource dataSource = new MysqlDataSource(); + dataSource.setUrl(MYSQL_CONTAINER.getJdbcUrl()); + dataSource.setUser("root"); + dataSource.setPassword(MYSQL_CONTAINER.getPassword()); + dataSource.setDatabaseName(MYSQL_CONTAINER.getDatabaseName()); + + return dataSource; + } + + @Override + public void afterPropertiesSet() throws Exception { + + try (Connection connection = createDataSource().getConnection()) { + ScriptUtils.executeSqlScript(connection, + new ByteArrayResource("DROP DATABASE test;CREATE DATABASE test;".getBytes())); + } + } + + private DataSource createRootDataSource() { + + MysqlDataSource dataSource = new MysqlDataSource(); + dataSource.setUrl(MYSQL_CONTAINER.getJdbcUrl()); + dataSource.setUser("root"); + dataSource.setPassword(MYSQL_CONTAINER.getPassword()); + dataSource.setDatabaseName(MYSQL_CONTAINER.getDatabaseName()); + + return dataSource; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/OracleDataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/OracleDataSourceConfiguration.java new file mode 100644 index 0000000000..2d3ca4cfd2 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/OracleDataSourceConfiguration.java @@ -0,0 +1,100 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.DriverManagerDataSource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.testcontainers.oracle.OracleContainer; +import org.testcontainers.utility.DockerImageName; + +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; + +/** + * {@link DataSource} setup for Oracle Database 23ai FREE. Starts a docker container with an Oracle database. + * + * @see Oracle Docker Image + * @see Testcontainers Oracle + * @author Thomas Lang + * @author Jens Schauder + * @author Loïc Lefèvre + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.ORACLE) +public class OracleDataSourceConfiguration extends DataSourceConfiguration { + + private static final Log LOG = LogFactory.getLog(OracleDataSourceConfiguration.class); + + private static DataSource DATA_SOURCE; + + public OracleDataSourceConfiguration(TestClass testClass, Environment environment) { + super(testClass, environment); + } + + @Override + protected synchronized DataSource createDataSource() { + + if (DATA_SOURCE == null) { + + LOG.info("Oracle starting..."); + DockerImageName dockerImageName = DockerImageName.parse("gvenzl/oracle-free:23-slim"); + OracleContainer container = new OracleContainer(dockerImageName) // + .withStartupTimeoutSeconds(200) // + .withReuse(true); + container.start(); + LOG.info("Oracle started"); + + initDb(container.getJdbcUrl(),container.getUsername(), container.getPassword()); + + DATA_SOURCE = poolDataSource(new DriverManagerDataSource(container.getJdbcUrl(), + container.getUsername(), container.getPassword())); + } + return DATA_SOURCE; + } + + private DataSource poolDataSource(DataSource dataSource) { + + HikariConfig config = new HikariConfig(); + config.setDataSource(dataSource); + + config.setMaximumPoolSize(10); + config.setIdleTimeout(30000); + config.setMaxLifetime(600000); + config.setConnectionTimeout(30000); + + return new HikariDataSource(config); + } + + private void initDb(String jdbcUrl, String username, String password) { + + final DriverManagerDataSource dataSource = new DriverManagerDataSource(jdbcUrl, "system", + password); + final JdbcTemplate jdbc = new JdbcTemplate(dataSource); + jdbc.execute("GRANT ALL PRIVILEGES TO " + username); + } + + @Override + protected void customizePopulator(ResourceDatabasePopulator populator) { + populator.setIgnoreFailedDrops(true); + } +} diff --git a/src/test/java/org/springframework/data/jdbc/testing/PostgresDataSourceConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/PostgresDataSourceConfiguration.java similarity index 66% rename from src/test/java/org/springframework/data/jdbc/testing/PostgresDataSourceConfiguration.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/PostgresDataSourceConfiguration.java index 9adfdca1b8..4b6745ce4a 100644 --- a/src/test/java/org/springframework/data/jdbc/testing/PostgresDataSourceConfiguration.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/PostgresDataSourceConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,36 +19,39 @@ import org.postgresql.ds.PGSimpleDataSource; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; +import org.springframework.core.env.Environment; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.testcontainers.containers.PostgreSQLContainer; /** - * {@link DataSource} setup for PostgreSQL. - * - * Starts a docker container with a Postgres database. + * {@link DataSource} setup for PostgreSQL. Starts a docker container with a Postgres database. * * @author Jens Schauder * @author Oliver Gierke * @author Sedat Gokcen + * @author Mark Paluch */ -@Configuration -@Profile("postgres") +@Configuration(proxyBeanMethods = false) +@ConditionalOnDatabase(DatabaseType.POSTGRES) public class PostgresDataSourceConfiguration extends DataSourceConfiguration { - private static final PostgreSQLContainer POSTGRESQL_CONTAINER = new PostgreSQLContainer(); + private static PostgreSQLContainer POSTGRESQL_CONTAINER; - static { - POSTGRESQL_CONTAINER.start(); + public PostgresDataSourceConfiguration(TestClass testClass, Environment environment) { + super(testClass, environment); } - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.testing.DataSourceConfiguration#createDataSource() - */ @Override protected DataSource createDataSource() { + if (POSTGRESQL_CONTAINER == null) { + + PostgreSQLContainer container = new PostgreSQLContainer<>("postgres:14.3"); + container.start(); + + POSTGRESQL_CONTAINER = container; + } + PGSimpleDataSource dataSource = new PGSimpleDataSource(); dataSource.setUrl(POSTGRESQL_CONTAINER.getJdbcUrl()); dataSource.setUser(POSTGRESQL_CONTAINER.getUsername()); @@ -57,10 +60,6 @@ protected DataSource createDataSource() { return dataSource; } - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.testing.DataSourceFactoryBean#customizePopulator(org.springframework.jdbc.datasource.init.ResourceDatabasePopulator) - */ @Override protected void customizePopulator(ResourceDatabasePopulator populator) { populator.setIgnoreFailedDrops(true); diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestClass.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestClass.java new file mode 100644 index 0000000000..6903fd81c7 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestClass.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import org.springframework.util.Assert; + +/** + * Value object to represent the underlying test class. + * + * @author Mark Paluch + */ +public final class TestClass { + + private final Class testClass; + + private TestClass(Class testClass) { + this.testClass = testClass; + } + + /** + * Create a new {@link TestClass} given {@code testClass}. + * + * @param testClass must not be {@literal null}. + * @return the new {@link TestClass}. + */ + public static TestClass of(Class testClass) { + + Assert.notNull(testClass, "TestClass must not be null"); + + return new TestClass(testClass); + } + + public Class getTestClass() { + return testClass; + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestClassCustomizer.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestClassCustomizer.java new file mode 100644 index 0000000000..31e1b085e1 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestClassCustomizer.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.util.List; + +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.test.context.ContextConfigurationAttributes; +import org.springframework.test.context.ContextCustomizer; +import org.springframework.test.context.ContextCustomizerFactory; +import org.springframework.test.context.MergedContextConfiguration; + +/** + * {@link ContextCustomizer} registering {@link TestClass}. + * + * @author Mark Paluch + */ +class TestClassCustomizer implements ContextCustomizer { + + private final Class testClass; + + public TestClassCustomizer(Class testClass) { + this.testClass = testClass; + } + + @Override + public void customizeContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedConfig) { + context.getBeanFactory().registerSingleton(TestClass.class.getSimpleName(), TestClass.of(testClass)); + } + + static class TestClassCustomizerFactory implements ContextCustomizerFactory { + + @Override + public ContextCustomizer createContextCustomizer(Class testClass, + List configAttributes) { + return new TestClassCustomizer(testClass); + } + + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestConfiguration.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestConfiguration.java new file mode 100644 index 0000000000..0767c2ee73 --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestConfiguration.java @@ -0,0 +1,207 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import javax.sql.DataSource; + +import org.apache.ibatis.session.SqlSessionFactory; +import org.mockito.Mockito; + +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; +import org.springframework.context.annotation.Primary; +import org.springframework.context.annotation.Profile; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.jdbc.core.convert.*; +import org.springframework.data.jdbc.core.dialect.JdbcArrayColumns; +import org.springframework.data.jdbc.core.dialect.JdbcDialect; +import org.springframework.data.jdbc.core.mapping.JdbcMappingContext; +import org.springframework.data.jdbc.core.mapping.JdbcSimpleTypes; +import org.springframework.data.jdbc.repository.config.DialectResolver; +import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.query.ExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Infrastructure configuration for integration tests. + * + * @author Oliver Gierke + * @author Jens Schauder + * @author Mark Paluch + * @author Fei Dong + * @author Myeonghyeon Lee + * @author Christoph Strobl + * @author Chirag Tailor + * @author Christopher Klein + * @author Mikhail Polivakha + */ +@Configuration +@ComponentScan // To pick up configuration classes (per activated profile) +public class TestConfiguration { + + public static final String PROFILE_SINGLE_QUERY_LOADING = "singleQueryLoading"; + public static final String PROFILE_NO_SINGLE_QUERY_LOADING = "!" + PROFILE_SINGLE_QUERY_LOADING; + + @Autowired + DataSource dataSource; + @Autowired + BeanFactory beanFactory; + @Autowired + ApplicationEventPublisher publisher; + @Autowired(required = false) + SqlSessionFactory sqlSessionFactory; + + @Bean + JdbcRepositoryFactory jdbcRepositoryFactory( + @Qualifier("defaultDataAccessStrategy") DataAccessStrategy dataAccessStrategy, RelationalMappingContext context, + Dialect dialect, JdbcConverter converter, Optional> namedQueries, + List> callbacks, List evaulationContextExtensions) { + + JdbcRepositoryFactory factory = new JdbcRepositoryFactory(dataAccessStrategy, context, converter, dialect, + publisher, namedParameterJdbcTemplate()); + + factory.setEntityCallbacks(EntityCallbacks.create(callbacks.toArray(new EntityCallback[0]))); + + namedQueries.map(it -> it.iterator().next()).ifPresent(factory::setNamedQueries); + + factory.setEvaluationContextProvider( + new ExtensionAwareQueryMethodEvaluationContextProvider(evaulationContextExtensions)); + return factory; + } + + @Bean + @Primary + NamedParameterJdbcOperations namedParameterJdbcTemplate() { + return new NamedParameterJdbcTemplate(dataSource); + } + + @Bean + PlatformTransactionManager transactionManager() { + return new DataSourceTransactionManager(dataSource); + } + + @Bean + DataAccessStrategy defaultDataAccessStrategy( + @Qualifier("namedParameterJdbcTemplate") NamedParameterJdbcOperations template, RelationalMappingContext context, + JdbcConverter converter, Dialect dialect) { + + return new DataAccessStrategyFactory(new SqlGeneratorSource(context, converter, dialect), converter, template, + new SqlParametersFactory(context, converter), new InsertStrategyFactory(template, dialect)).create(); + } + + @Bean("jdbcMappingContext") + @Profile(PROFILE_NO_SINGLE_QUERY_LOADING) + JdbcMappingContext jdbcMappingContextWithOutSingleQueryLoading(Optional namingStrategy, + CustomConversions conversions) { + + JdbcMappingContext mappingContext = new JdbcMappingContext(namingStrategy.orElse(DefaultNamingStrategy.INSTANCE)); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + return mappingContext; + } + + @Bean("jdbcMappingContext") + @Profile(PROFILE_SINGLE_QUERY_LOADING) + JdbcMappingContext jdbcMappingContextWithSingleQueryLoading(Optional namingStrategy, + CustomConversions conversions) { + + JdbcMappingContext mappingContext = new JdbcMappingContext(namingStrategy.orElse(DefaultNamingStrategy.INSTANCE)); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.setSingleQueryLoadingEnabled(true); + return mappingContext; + } + + @Bean + CustomConversions jdbcCustomConversions(Dialect dialect) { + + SimpleTypeHolder simpleTypeHolder = dialect.simpleTypes().isEmpty() ? + JdbcSimpleTypes.HOLDER : + new SimpleTypeHolder(dialect.simpleTypes(), JdbcSimpleTypes.HOLDER); + + return new JdbcCustomConversions(CustomConversions.StoreConversions.of(simpleTypeHolder, storeConverters(dialect)), + Collections.emptyList()); + } + + private List storeConverters(Dialect dialect) { + + List converters = new ArrayList<>(); + converters.addAll(dialect.getConverters()); + converters.addAll(JdbcCustomConversions.storeConverters()); + return converters; + } + + @Bean + JdbcConverter relationalConverter(RelationalMappingContext mappingContext, @Lazy RelationResolver relationResolver, + CustomConversions conversions, @Qualifier("namedParameterJdbcTemplate") NamedParameterJdbcOperations template, + Dialect dialect) { + + org.springframework.data.jdbc.core.dialect.JdbcArrayColumns arrayColumns = dialect instanceof JdbcDialect + ? + ((JdbcDialect) dialect).getArraySupport() : + JdbcArrayColumns.DefaultSupport.INSTANCE; + + return new MappingJdbcConverter( // + mappingContext, // + relationResolver, // + conversions, // + new DefaultJdbcTypeFactory(template.getJdbcOperations(), arrayColumns)); + } + + /** + * Creates a {@link IdGeneratingEntityCallback} bean using the configured + * {@link #jdbcDialect(NamedParameterJdbcOperations)}. + * + * @return must not be {@literal null}. + */ + @Bean + public IdGeneratingEntityCallback idGeneratingBeforeSaveCallback(JdbcMappingContext mappingContext, + NamedParameterJdbcOperations operations, Dialect dialect) { + return Mockito.spy(new IdGeneratingEntityCallback(mappingContext, dialect, operations)); + } + + @Bean + Dialect jdbcDialect(NamedParameterJdbcOperations operations) { + return DialectResolver.getDialect(operations.getJdbcOperations()); + } + + @Lazy + @Bean + TestDatabaseFeatures features(NamedParameterJdbcOperations operations) { + return new TestDatabaseFeatures(operations.getJdbcOperations()); + } +} diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestDatabaseFeatures.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestDatabaseFeatures.java new file mode 100644 index 0000000000..0a985bd5ad --- /dev/null +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestDatabaseFeatures.java @@ -0,0 +1,138 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.jdbc.testing; + +import static org.assertj.core.api.Assumptions.*; + +import java.util.Arrays; +import java.util.Locale; +import java.util.function.Consumer; + +import org.springframework.jdbc.core.ConnectionCallback; +import org.springframework.jdbc.core.JdbcOperations; + +/** + * This class provides information about which features a database integration supports in order to react on the + * presence or absence of features in tests. + * + * @author Jens Schauder + * @author Chirag Tailor + * @author Mikhail Polivakha + */ +public class TestDatabaseFeatures { + + private final Database database; + + public TestDatabaseFeatures(JdbcOperations jdbcTemplate) { + + String productName = jdbcTemplate.execute( + (ConnectionCallback) c -> c.getMetaData().getDatabaseProductName().toLowerCase(Locale.ENGLISH)); + + database = Arrays.stream(Database.values()).filter(db -> db.matches(productName)).findFirst().orElseThrow(); + } + + /** + * Not all databases support really huge numbers as represented by {@link java.math.BigDecimal} and similar. + */ + private void supportsHugeNumbers() { + assumeThat(database).isNotIn(Database.Oracle, Database.SqlServer); + } + + /** + * Microsoft SqlServer does not allow explicitly setting ids in columns where the value gets generated by the + * database. Such columns therefore must not be used in referenced entities, since we do a delete and insert, which + * must not recreate an id. See https://github.com/spring-projects/spring-data-jdbc/issues/437 + */ + private void supportsGeneratedIdsInReferencedEntities() { + assumeThat(database).isNotEqualTo(Database.SqlServer); + } + + private void supportsArrays() { + + assumeThat(database).isNotIn(Database.MySql, Database.MariaDb, Database.SqlServer, Database.Db2, Database.Oracle); + } + + private void supportsNanosecondPrecision() { + + assumeThat(database).isNotIn(Database.MySql, Database.PostgreSql, Database.MariaDb, Database.SqlServer); + } + + private void supportsMultiDimensionalArrays() { + + supportsArrays(); + assumeThat(database).isNotIn(Database.H2, Database.Hsql); + } + + private void supportsNullPrecedence() { + assumeThat(database).isNotIn(Database.MySql, Database.MariaDb, Database.SqlServer); + } + + private void supportsSequences() { + assumeThat(database).isNotIn(Database.MySql); + } + + private void supportsWhereInTuples() { + assumeThat(database).isIn(Database.MySql, Database.PostgreSql); + } + + public void databaseIs(Database database) { + assumeThat(this.database).isEqualTo(database); + } + + public enum Database { + Hsql, H2, MySql, MariaDb, PostgreSql, SqlServer("microsoft"), Db2, Oracle; + + private final String identification; + + Database(String identification) { + this.identification = identification; + } + + Database() { + this.identification = null; + } + + boolean matches(String productName) { + + String identification = this.identification == null ? name().toLowerCase() : this.identification; + return productName.contains(identification); + } + } + + public enum Feature { + + SUPPORTS_MULTIDIMENSIONAL_ARRAYS(TestDatabaseFeatures::supportsMultiDimensionalArrays), // + SUPPORTS_HUGE_NUMBERS(TestDatabaseFeatures::supportsHugeNumbers), // + SUPPORTS_ARRAYS(TestDatabaseFeatures::supportsArrays), // + SUPPORTS_GENERATED_IDS_IN_REFERENCED_ENTITIES(TestDatabaseFeatures::supportsGeneratedIdsInReferencedEntities), // + SUPPORTS_NANOSECOND_PRECISION(TestDatabaseFeatures::supportsNanosecondPrecision), // + SUPPORTS_NULL_PRECEDENCE(TestDatabaseFeatures::supportsNullPrecedence), + IS_POSTGRES(f -> f.databaseIs(Database.PostgreSql)), // + WHERE_IN_TUPLE(TestDatabaseFeatures::supportsWhereInTuples), // + SUPPORTS_SEQUENCES(TestDatabaseFeatures::supportsSequences), // + IS_HSQL(f -> f.databaseIs(Database.Hsql)); + + private final Consumer featureMethod; + + Feature(Consumer featureMethod) { + this.featureMethod = featureMethod; + } + + void test(TestDatabaseFeatures features) { + featureMethod.accept(features); + } + } +} diff --git a/src/test/java/org/springframework/data/jdbc/testing/TestUtils.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestUtils.java similarity index 68% rename from src/test/java/org/springframework/data/jdbc/testing/TestUtils.java rename to spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestUtils.java index 6f9afa2cab..5cf1bf25ae 100644 --- a/src/test/java/org/springframework/data/jdbc/testing/TestUtils.java +++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/TestUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.data.jdbc.testing; +import org.springframework.core.io.ClassPathResource; import org.springframework.util.Assert; /** @@ -33,10 +34,17 @@ public interface TestUtils { */ public static String createScriptName(Class testClass, String databaseType) { - Assert.notNull(testClass, "Test class must not be null!"); - Assert.hasText(databaseType, "Database type must not be null or empty!"); + Assert.notNull(testClass, "Test class must not be null"); + Assert.hasText(databaseType, "Database type must not be null or empty"); - return String.format("%s/%s-%s.sql", testClass.getPackage().getName(), testClass.getSimpleName(), + String path = String.format("%s/%s-%s.sql", testClass.getPackage().getName(), testClass.getSimpleName(), databaseType.toLowerCase()); + + ClassPathResource resource = new ClassPathResource(path); + if (!resource.exists()) { + throw new IllegalStateException("Test resource " + path + " not found"); + } + + return path; } } diff --git a/spring-data-jdbc/src/test/kotlin/org/springframework/data/jdbc/core/JdbcAggregateOperationsExtensionsTests.kt b/spring-data-jdbc/src/test/kotlin/org/springframework/data/jdbc/core/JdbcAggregateOperationsExtensionsTests.kt new file mode 100644 index 0000000000..6f0279d15d --- /dev/null +++ b/spring-data-jdbc/src/test/kotlin/org/springframework/data/jdbc/core/JdbcAggregateOperationsExtensionsTests.kt @@ -0,0 +1,210 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.jdbc.core + +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test +import org.springframework.data.domain.Pageable +import org.springframework.data.domain.Sort +import org.springframework.data.jdbc.testing.TestClass +import org.springframework.data.relational.core.query.Query + +/** + * Unit tests for [JdbcAggregateOperations]. + * + * @author Felix Desyatirikov + */ + +class JdbcAggregateOperationsExtensionsTests { + + val operations = mockk(relaxed = true) + + @Test // GH-1961 + fun `count with reified type parameter extension should call its Java counterpart`() { + + operations.count() + + verify { operations.count(TestClass::class.java) } + } + + @Test // GH-1961 + fun `count(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk(relaxed = true) + + operations.count(query) + + verify { + operations.count(query, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `exists(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk(relaxed = true) + + operations.exists(query) + + verify { + operations.exists(query, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `existsById(id) with reified type parameter extension should call its Java counterpart`() { + + val id = 1L + + operations.existsById(id) + + verify { + operations.existsById(id, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `findById(id) with reified type parameter extension should call its Java counterpart`() { + + val id = 1L + + operations.findById(id) + + verify { + operations.findById(id, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `findAllById(ids) with reified type parameter extension should call its Java counterpart`() { + + val ids = listOf(1L, 2L) + + operations.findAllById(ids) + + verify { + operations.findAllById(ids, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `findAll() with reified type parameter extension should call its Java counterpart`() { + + operations.findAll() + + verify { + operations.findAll(TestClass::class.java) + } + } + + @Test // GH-1961 + fun `findAll(Sort) with reified type parameter extension should call its Java counterpart`() { + + val sort = mockk(relaxed = true) + + operations.findAll(sort) + + verify { + operations.findAll(TestClass::class.java, sort) + } + } + + @Test // GH-1961 + fun `findAll(Pageable) with reified type parameter extension should call its Java counterpart`() { + + val pageable = mockk(relaxed = true) + + operations.findAll(pageable) + + verify { + operations.findAll(TestClass::class.java, pageable) + } + } + + @Test // GH-1961 + fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk(relaxed = true) + + operations.findOne(query) + + verify { + operations.findOne(query, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `findAll(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk(relaxed = true) + + operations.findAll(query) + + verify { + operations.findAll(query, TestClass::class.java) + } + } + + + @Test // GH-1961 + fun `findAll(Query, Pageable) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk(relaxed = true) + val pageable = mockk(relaxed = true) + + operations.findAll(query, pageable) + + verify { + operations.findAll(query, TestClass::class.java, pageable) + } + } + + @Test // GH-1961 + fun `deleteById(id) with reified type parameter extension should call its Java counterpart`() { + + val id = 1L + + operations.deleteById(id) + + verify { + operations.deleteById(id, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `deleteAllById(ids) with reified type parameter extension should call its Java counterpart`() { + + val ids = listOf(1L, 2L) + + operations.deleteAllById(ids) + + verify { + operations.deleteAllById(ids, TestClass::class.java) + } + } + + @Test // GH-1961 + fun `deleteAll(ids) with reified type parameter extension should call its Java counterpart`() { + + operations.deleteAll() + + verify { + operations.deleteAll(TestClass::class.java) + } + } +} \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/META-INF/jdbc-named-queries.properties b/spring-data-jdbc/src/test/resources/META-INF/jdbc-named-queries.properties new file mode 100644 index 0000000000..217ad9e1a4 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/META-INF/jdbc-named-queries.properties @@ -0,0 +1,2 @@ +DummyEntity.findAllByNamedQuery=SELECT * FROM DUMMY_ENTITY +DummyEntity.customQuery=SELECT * FROM DUMMY_ENTITY \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/META-INF/spring.factories b/spring-data-jdbc/src/test/resources/META-INF/spring.factories new file mode 100644 index 0000000000..9631bc279e --- /dev/null +++ b/spring-data-jdbc/src/test/resources/META-INF/spring.factories @@ -0,0 +1 @@ +org.springframework.test.context.TestExecutionListener=org.springframework.data.jdbc.testing.LicenseListener,org.springframework.data.jdbc.testing.DatabaseTypeCondition diff --git a/spring-data-jdbc/src/test/resources/logback.xml b/spring-data-jdbc/src/test/resources/logback.xml new file mode 100644 index 0000000000..67cda4afc6 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/logback.xml @@ -0,0 +1,19 @@ + + + + + + %d %5p %40.40c:%4L - %m%n + + + + + + + + + + + + + diff --git a/src/test/resources/mysql.cnf b/spring-data-jdbc/src/test/resources/mysql.cnf similarity index 100% rename from src/test/resources/mysql.cnf rename to spring-data-jdbc/src/test/resources/mysql.cnf diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core.dialect/PostgresDialectIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core.dialect/PostgresDialectIntegrationTests-postgres.sql new file mode 100644 index 0000000000..0d5df184f1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core.dialect/PostgresDialectIntegrationTests-postgres.sql @@ -0,0 +1,8 @@ +DROP TABLE customers; + +CREATE TABLE customers ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL, + person_data JSONB, + session_data JSONB +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/ImmutableAggregateTemplateHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/ImmutableAggregateTemplateHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..20b1a40f77 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/ImmutableAggregateTemplateHsqlIntegrationTests-hsql.sql @@ -0,0 +1,45 @@ +CREATE TABLE LEGO_SET +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); + +CREATE TABLE MANUAL +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + LEGO_SET BIGINT, + CONTENT VARCHAR(2000) +); +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET (id); + +CREATE TABLE AUTHOR +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + LEGO_SET BIGINT, + NAME VARCHAR(2000) +); +ALTER TABLE AUTHOR + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET (id); + +CREATE TABLE WITH_COPY_CONSTRUCTOR +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); + +CREATE TABLE ROOT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE NON_ROOT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + ROOT BIGINT NOT NULL, + NAME VARCHAR(30) +); +ALTER TABLE NON_ROOT + ADD FOREIGN KEY (ROOT) REFERENCES ROOT (ID); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-db2.sql new file mode 100644 index 0000000000..e93990e31b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-db2.sql @@ -0,0 +1,470 @@ +DROP TABLE MANUAL; +DROP TABLE LEGO_SET; + +DROP TABLE Child_No_Id; +DROP TABLE ONE_TO_ONE_PARENT; + +DROP TABLE ELEMENT_NO_ID; +DROP TABLE LIST_PARENT; +DROP TABLE SIMPLE_LIST_PARENT; + +DROP TABLE BYTE_ARRAY_OWNER; + +DROP TABLE CHAIN0; +DROP TABLE CHAIN1; +DROP TABLE CHAIN2; +DROP TABLE CHAIN3; +DROP TABLE CHAIN4; + +DROP TABLE NO_ID_CHAIN0; +DROP TABLE NO_ID_CHAIN1; +DROP TABLE NO_ID_CHAIN2; +DROP TABLE NO_ID_CHAIN3; +DROP TABLE NO_ID_CHAIN4; + +DROP TABLE NO_ID_MAP_CHAIN0; +DROP TABLE NO_ID_MAP_CHAIN1; +DROP TABLE NO_ID_MAP_CHAIN2; +DROP TABLE NO_ID_MAP_CHAIN3; +DROP TABLE NO_ID_MAP_CHAIN4; + +DROP TABLE NO_ID_LIST_CHAIN0; +DROP TABLE NO_ID_LIST_CHAIN1; +DROP TABLE NO_ID_LIST_CHAIN2; +DROP TABLE NO_ID_LIST_CHAIN3; +DROP TABLE NO_ID_LIST_CHAIN4; + +DROP TABLE WITH_READ_ONLY; +DROP TABLE VERSIONED_AGGREGATE; +DROP TABLE WITH_LOCAL_DATE_TIME; + +DROP TABLE WITH_ID_ONLY; + +DROP TABLE WITH_INSERT_ONLY; + +DROP TABLE MULTIPLE_COLLECTIONS; +DROP TABLE MAP_ELEMENT; +DROP TABLE LIST_ELEMENT; +DROP TABLE SET_ELEMENT; + +DROP TABLE BOOK; +DROP TABLE AUTHOR; + +DROP TABLE ENUM_MAP_OWNER; + +DROP TABLE REFERENCED; +DROP TABLE WITH_ONE_TO_ONE; + +DROP TABLE THIRD; +DROP TABLE SEC; +DROP TABLE FIRST; + +CREATE TABLE LEGO_SET +( + "id1" BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + "id2" BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + LEGO_SET BIGINT, + "alternative" BIGINT, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET ("id1"); + +CREATE TABLE ONE_TO_ONE_PARENT +( + "id3" BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER NOT NULL PRIMARY KEY, + content VARCHAR(30) +); + +CREATE TABLE LIST_PARENT +( + "id4" BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE SIMPLE_LIST_PARENT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE ELEMENT_NO_ID +( + CONTENT VARCHAR(100), + LIST_PARENT_KEY BIGINT, + SIMPLE_LIST_PARENT_KEY BIGINT, + LIST_PARENT BIGINT, + SIMPLE_LIST_PARENT BIGINT +); +ALTER TABLE ELEMENT_NO_ID + ADD FOREIGN KEY (LIST_PARENT) + REFERENCES LIST_PARENT ("id4"); +-- +-- CREATE TABLE ARRAY_OWNER +-- ( +-- ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, +-- DIGITS VARCHAR(20) ARRAY[10] NOT NULL, +-- MULTIDIMENSIONAL VARCHAR(20) ARRAY[10] NULL +-- ); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + BINARY_DATA VARBINARY(20) NOT NULL +); + +CREATE TABLE CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE CHAIN3 +( + THREE BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 30) PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 20) PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 10) PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 0) PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT NOT NULL, + NO_ID_LIST_CHAIN4_KEY BIGINT NOT NULL, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT NOT NULL, + NO_ID_LIST_CHAIN4_KEY BIGINT NOT NULL, + NO_ID_LIST_CHAIN3_KEY BIGINT NOT NULL, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT NOT NULL, + NO_ID_LIST_CHAIN4_KEY BIGINT NOT NULL, + NO_ID_LIST_CHAIN3_KEY BIGINT NOT NULL, + NO_ID_LIST_CHAIN2_KEY BIGINT NOT NULL, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT NOT NULL, + NO_ID_LIST_CHAIN4_KEY BIGINT NOT NULL, + NO_ID_LIST_CHAIN3_KEY BIGINT NOT NULL, + NO_ID_LIST_CHAIN2_KEY BIGINT NOT NULL, + NO_ID_LIST_CHAIN1_KEY BIGINT NOT NULL, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT NOT NULL, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20) NOT NULL, + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT NOT NULL, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20) NOT NULL, + NO_ID_MAP_CHAIN3_KEY VARCHAR(20) NOT NULL, + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT NOT NULL, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20) NOT NULL, + NO_ID_MAP_CHAIN3_KEY VARCHAR(20) NOT NULL, + NO_ID_MAP_CHAIN2_KEY VARCHAR(20) NOT NULL, + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT NOT NULL, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20) NOT NULL, + NO_ID_MAP_CHAIN3_KEY VARCHAR(20) NOT NULL, + NO_ID_MAP_CHAIN2_KEY VARCHAR(20) NOT NULL, + NO_ID_MAP_CHAIN1_KEY VARCHAR(20) NOT NULL, + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE WITH_READ_ONLY ( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + NAME VARCHAR(200), + READ_ONLY VARCHAR(200) DEFAULT 'from-db' +); + +CREATE TABLE VERSIONED_AGGREGATE +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + VERSION BIGINT +); + + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT NOT NULL PRIMARY KEY, + TEST_TIME TIMESTAMP(9) +); + +CREATE TABLE WITH_ID_ONLY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY +); + +CREATE TABLE WITH_INSERT_ONLY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + "renamed" VARCHAR(100), + ID BIGINT +); + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-h2.sql new file mode 100644 index 0000000000..24ef5bdeab --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-h2.sql @@ -0,0 +1,420 @@ +CREATE TABLE LEGO_SET +( + "id1" SERIAL PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + "id2" SERIAL PRIMARY KEY, + LEGO_SET BIGINT, + "alternative" BIGINT, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET ("id1"); + +CREATE TABLE ONE_TO_ONE_PARENT +( + "id3" SERIAL PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER PRIMARY KEY, + content VARCHAR(30) +); + +CREATE TABLE LIST_PARENT +( + "id4" SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SIMPLE_LIST_PARENT +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE element_no_id +( + content VARCHAR(100), + SIMPLE_LIST_PARENT_key BIGINT, + SIMPLE_LIST_PARENT INTEGER, + LIST_PARENT_key BIGINT, + LIST_PARENT INTEGER +); + +CREATE TABLE "ARRAY_OWNER" +( + ID SERIAL PRIMARY KEY, + DIGITS VARCHAR(30) ARRAY[10] NOT NULL, + MULTIDIMENSIONAL VARCHAR(30) ARRAY[10] NULL +); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID SERIAL PRIMARY KEY, + BINARY_DATA BYTEA NOT NULL +); + +CREATE TABLE DOUBLE_LIST_OWNER +( + ID SERIAL PRIMARY KEY, + DIGITS DOUBLE ARRAY[10] +); + +CREATE TABLE FLOAT_LIST_OWNER +( + ID SERIAL PRIMARY KEY, + DIGITS FLOAT ARRAY[10] +); + +CREATE TABLE CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE CHAIN3 +( + THREE SERIAL PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO SERIAL PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE SERIAL PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO SERIAL PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + NO_ID_LIST_CHAIN1_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE "VERSIONED_AGGREGATE" +( + ID SERIAL PRIMARY KEY, + VERSION BIGINT +); + +CREATE TABLE WITH_READ_ONLY +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(200), + READ_ONLY VARCHAR(200) DEFAULT 'from-db' +); + + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT PRIMARY KEY, + TEST_TIME TIMESTAMP(9) WITHOUT TIME ZONE +); + +CREATE TABLE WITH_ID_ONLY +( + ID SERIAL PRIMARY KEY +); + +CREATE TABLE WITH_INSERT_ONLY +( + ID SERIAL PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID SERIAL PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + "renamed" VARCHAR(100), + ID BIGINT +); + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-hsql.sql new file mode 100644 index 0000000000..21e80a6c98 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-hsql.sql @@ -0,0 +1,422 @@ +CREATE TABLE LEGO_SET +( + "id1" BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + "id2" BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + LEGO_SET BIGINT, + "alternative" BIGINT, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET ("id1"); + +CREATE TABLE ONE_TO_ONE_PARENT +( + "id3" BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER PRIMARY KEY, + content VARCHAR(30) +); + +CREATE TABLE SIMPLE_LIST_PARENT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE LIST_PARENT +( + "id4" BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE ELEMENT_NO_ID +( + CONTENT VARCHAR(100), + SIMPLE_LIST_PARENT_KEY BIGINT, + SIMPLE_LIST_PARENT BIGINT, + LIST_PARENT_KEY BIGINT, + LIST_PARENT BIGINT +); +ALTER TABLE ELEMENT_NO_ID + ADD FOREIGN KEY (LIST_PARENT) + REFERENCES LIST_PARENT ("id4"); + +CREATE TABLE ARRAY_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + DIGITS VARCHAR(20) ARRAY[10] NOT NULL, + MULTIDIMENSIONAL VARCHAR(20) ARRAY[10] NULL +); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + BINARY_DATA VARBINARY(20) NOT NULL +); + +CREATE TABLE DOUBLE_LIST_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + DIGITS DOUBLE PRECISION ARRAY[10] +); + +CREATE TABLE FLOAT_LIST_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + DIGITS FLOAT ARRAY[10] +); + +CREATE TABLE CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE CHAIN3 +( + THREE BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 30) PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 20) PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 10) PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 0) PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + NO_ID_LIST_CHAIN1_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE WITH_READ_ONLY ( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 40) PRIMARY KEY, + NAME VARCHAR(200), + READ_ONLY VARCHAR(200) DEFAULT 'from-db' +); + +CREATE TABLE VERSIONED_AGGREGATE +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + VERSION BIGINT +); + + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT PRIMARY KEY, + TEST_TIME TIMESTAMP(9) +); + +CREATE TABLE WITH_INSERT_ONLY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE WITH_ID_ONLY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); + + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + "renamed" VARCHAR(100), + ID BIGINT +); + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..14636eff40 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mariadb.sql @@ -0,0 +1,394 @@ +CREATE TABLE LEGO_SET +( + `id1` BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + `id2` BIGINT AUTO_INCREMENT PRIMARY KEY, + LEGO_SET BIGINT, + ALTERNATIVE BIGINT, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET (`id1`); + +CREATE TABLE ONE_TO_ONE_PARENT +( + `id3` BIGINT AUTO_INCREMENT PRIMARY KEY, + `content` VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER PRIMARY KEY, + `content` VARCHAR(30) +); + +CREATE TABLE LIST_PARENT +( + `id4` BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE SIMPLE_LIST_PARENT +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element_no_id +( + CONTENT VARCHAR(100), + SIMPLE_LIST_PARENT_key BIGINT, + SIMPLE_LIST_PARENT BIGINT, + LIST_PARENT_key BIGINT, + LIST_PARENT BIGINT +); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + BINARY_DATA VARBINARY(20) NOT NULL +); + +CREATE TABLE CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + + +CREATE TABLE CHAIN3 +( + THREE BIGINT AUTO_INCREMENT PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4(FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO BIGINT AUTO_INCREMENT PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3(THREE) +); + +CREATE TABLE CHAIN1 +( + ONE BIGINT AUTO_INCREMENT PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2(TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO BIGINT AUTO_INCREMENT PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1(ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + NO_ID_LIST_CHAIN1_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE VERSIONED_AGGREGATE +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + VERSION BIGINT +); + + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT PRIMARY KEY, + TEST_TIME TIMESTAMP(6) +); + +CREATE TABLE WITH_ID_ONLY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY +); + +CREATE TABLE WITH_INSERT_ONLY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + `renamed` VARCHAR(100), + ID BIGINT +); + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mssql.sql new file mode 100644 index 0000000000..d922614f26 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mssql.sql @@ -0,0 +1,444 @@ +DROP TABLE IF EXISTS MANUAL; +DROP TABLE IF EXISTS LEGO_SET; +CREATE TABLE LEGO_SET +( + [id1] BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + [id2] BIGINT IDENTITY PRIMARY KEY, + LEGO_SET BIGINT, + ALTERNATIVE BIGINT, + CONTENT VARCHAR(2000) +); +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) REFERENCES LEGO_SET (id1); + +DROP TABLE IF EXISTS Child_No_Id; +DROP TABLE IF EXISTS ONE_TO_ONE_PARENT; +CREATE TABLE ONE_TO_ONE_PARENT +( + [id3] BIGINT IDENTITY PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT BIGINT PRIMARY KEY, + [content] VARCHAR(30) +); + +DROP TABLE IF EXISTS element_no_id; +DROP TABLE IF EXISTS LIST_PARENT; +DROP TABLE IF EXISTS SIMPLE_LIST_PARENT; +CREATE TABLE LIST_PARENT +( + [id4] BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE SIMPLE_LIST_PARENT +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element_no_id +( + CONTENT VARCHAR(100), + SIMPLE_LIST_PARENT_key BIGINT, + SIMPLE_LIST_PARENT BIGINT, + LIST_PARENT_key BIGINT, + LIST_PARENT BIGINT +); + +DROP TABLE IF EXISTS BYTE_ARRAY_OWNER; +CREATE TABLE BYTE_ARRAY_OWNER +( + ID BIGINT IDENTITY PRIMARY KEY, + BINARY_DATA VARBINARY(20) NOT NULL +); + +DROP TABLE IF EXISTS CHAIN0; +DROP TABLE IF EXISTS CHAIN1; +DROP TABLE IF EXISTS CHAIN2; +DROP TABLE IF EXISTS CHAIN3; +DROP TABLE IF EXISTS CHAIN4; + +CREATE TABLE CHAIN4 +( + FOUR BIGINT IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE CHAIN3 +( + THREE BIGINT IDENTITY PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO BIGINT IDENTITY PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE BIGINT IDENTITY PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO BIGINT IDENTITY PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +DROP TABLE IF EXISTS NO_ID_CHAIN0; +DROP TABLE IF EXISTS NO_ID_CHAIN1; +DROP TABLE IF EXISTS NO_ID_CHAIN2; +DROP TABLE IF EXISTS NO_ID_CHAIN3; +DROP TABLE IF EXISTS NO_ID_CHAIN4; + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR BIGINT IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +DROP TABLE IF EXISTS NO_ID_LIST_CHAIN0; +DROP TABLE IF EXISTS NO_ID_LIST_CHAIN1; +DROP TABLE IF EXISTS NO_ID_LIST_CHAIN2; +DROP TABLE IF EXISTS NO_ID_LIST_CHAIN3; +DROP TABLE IF EXISTS NO_ID_LIST_CHAIN4; + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR BIGINT IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + NO_ID_LIST_CHAIN1_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + +DROP TABLE IF EXISTS NO_ID_MAP_CHAIN0; +DROP TABLE IF EXISTS NO_ID_MAP_CHAIN1; +DROP TABLE IF EXISTS NO_ID_MAP_CHAIN2; +DROP TABLE IF EXISTS NO_ID_MAP_CHAIN3; +DROP TABLE IF EXISTS NO_ID_MAP_CHAIN4; + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR BIGINT IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +DROP TABLE IF EXISTS VERSIONED_AGGREGATE; + +CREATE TABLE VERSIONED_AGGREGATE +( + ID BIGINT IDENTITY PRIMARY KEY, + VERSION BIGINT +); + + +DROP TABLE IF EXISTS WITH_LOCAL_DATE_TIME; + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT PRIMARY KEY, + TEST_TIME datetime2(7) +); + +DROP TABLE IF EXISTS WITH_ID_ONLY; + +CREATE TABLE WITH_ID_ONLY +( + ID BIGINT IDENTITY PRIMARY KEY +); + +DROP TABLE IF EXISTS WITH_INSERT_ONLY; + +CREATE TABLE WITH_INSERT_ONLY +( + ID BIGINT IDENTITY PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +DROP TABLE MULTIPLE_COLLECTIONS; +DROP TABLE MAP_ELEMENT; +DROP TABLE LIST_ELEMENT; +DROP TABLE SET_ELEMENT; + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +DROP TABLE BOOK; +DROP TABLE AUTHOR; + +CREATE TABLE AUTHOR +( + ID BIGINT IDENTITY PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +DROP TABLE ENUM_MAP_OWNER; +CREATE TABLE ENUM_MAP_OWNER +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); + + +DROP TABLE REFERENCED; +DROP TABLE WITH_ONE_TO_ONE; + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + "renamed" VARCHAR(100), + ID BIGINT +); + +DROP TABLE THIRD; +DROP TABLE SEC; +DROP TABLE FIRST; + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mysql.sql new file mode 100644 index 0000000000..3672630b26 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-mysql.sql @@ -0,0 +1,400 @@ +CREATE TABLE LEGO_SET +( + `id1` BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + `id2` BIGINT AUTO_INCREMENT PRIMARY KEY, + LEGO_SET BIGINT, + ALTERNATIVE BIGINT, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET (`id1`); + +CREATE TABLE ONE_TO_ONE_PARENT +( + `id3` BIGINT AUTO_INCREMENT PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER PRIMARY KEY, + `content` VARCHAR(30) +); + +CREATE TABLE SIMPLE_LIST_PARENT +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE LIST_PARENT +( + `id4` BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element_no_id +( + CONTENT VARCHAR(100), + LIST_PARENT_key BIGINT, + SIMPLE_LIST_PARENT_key BIGINT, + LIST_PARENT BIGINT, + SIMPLE_LIST_PARENT BIGINT +); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + BINARY_DATA VARBINARY(20) NOT NULL +); + +CREATE TABLE CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + + +CREATE TABLE CHAIN3 +( + THREE BIGINT AUTO_INCREMENT PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO BIGINT AUTO_INCREMENT PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE BIGINT AUTO_INCREMENT PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO BIGINT AUTO_INCREMENT PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + NO_ID_LIST_CHAIN1_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR BIGINT AUTO_INCREMENT PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE VERSIONED_AGGREGATE +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + VERSION BIGINT +); + +CREATE TABLE WITH_READ_ONLY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(200), + READ_ONLY VARCHAR(200) DEFAULT 'from-db' +); + + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT PRIMARY KEY, + TEST_TIME TIMESTAMP(6) +); + +CREATE TABLE WITH_ID_ONLY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY +); + +CREATE TABLE WITH_INSERT_ONLY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); + + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + `renamed` VARCHAR(100), + ID BIGINT +); + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-oracle.sql new file mode 100644 index 0000000000..706e5e46d9 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-oracle.sql @@ -0,0 +1,450 @@ +DROP TABLE MANUAL CASCADE CONSTRAINTS PURGE; +DROP TABLE LEGO_SET CASCADE CONSTRAINTS PURGE; +DROP TABLE CHILD_NO_ID CASCADE CONSTRAINTS PURGE; +DROP TABLE ONE_TO_ONE_PARENT CASCADE CONSTRAINTS PURGE; +DROP TABLE ELEMENT_NO_ID CASCADE CONSTRAINTS PURGE; +DROP TABLE LIST_PARENT CASCADE CONSTRAINTS PURGE; +DROP TABLE SIMPLE_LIST_PARENT CASCADE CONSTRAINTS PURGE; +DROP TABLE BYTE_ARRAY_OWNER CASCADE CONSTRAINTS PURGE; +DROP TABLE CHAIN0 CASCADE CONSTRAINTS PURGE; +DROP TABLE CHAIN1 CASCADE CONSTRAINTS PURGE; +DROP TABLE CHAIN2 CASCADE CONSTRAINTS PURGE; +DROP TABLE CHAIN3 CASCADE CONSTRAINTS PURGE; +DROP TABLE CHAIN4 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_CHAIN0 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_CHAIN1 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_CHAIN2 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_CHAIN3 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_CHAIN4 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_LIST_CHAIN0 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_LIST_CHAIN1 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_LIST_CHAIN2 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_LIST_CHAIN3 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_LIST_CHAIN4 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_MAP_CHAIN0 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_MAP_CHAIN1 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_MAP_CHAIN2 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_MAP_CHAIN3 CASCADE CONSTRAINTS PURGE; +DROP TABLE NO_ID_MAP_CHAIN4 CASCADE CONSTRAINTS PURGE; +DROP TABLE VERSIONED_AGGREGATE CASCADE CONSTRAINTS PURGE; +DROP TABLE WITH_READ_ONLY CASCADE CONSTRAINTS PURGE; +DROP TABLE WITH_LOCAL_DATE_TIME CASCADE CONSTRAINTS PURGE; +DROP TABLE WITH_ID_ONLY CASCADE CONSTRAINTS PURGE; +DROP TABLE WITH_INSERT_ONLY CASCADE CONSTRAINTS PURGE; + +DROP TABLE MULTIPLE_COLLECTIONS CASCADE CONSTRAINTS PURGE; +DROP TABLE MAP_ELEMENT CASCADE CONSTRAINTS PURGE; +DROP TABLE LIST_ELEMENT CASCADE CONSTRAINTS PURGE; +DROP TABLE SET_ELEMENT CASCADE CONSTRAINTS PURGE; + +DROP TABLE BOOK CASCADE CONSTRAINTS PURGE; +DROP TABLE AUTHOR CASCADE CONSTRAINTS PURGE; + +DROP TABLE ENUM_MAP_OWNER CASCADE CONSTRAINTS PURGE; + +DROP TABLE REFERENCED CASCADE CONSTRAINTS PURGE; +DROP TABLE WITH_ONE_TO_ONE CASCADE CONSTRAINTS PURGE; + +DROP TABLE THIRD CASCADE CONSTRAINTS PURGE; +DROP TABLE SEC CASCADE CONSTRAINTS PURGE; +DROP TABLE FIRST CASCADE CONSTRAINTS PURGE; + +CREATE TABLE LEGO_SET +( + "id1" NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + "id2" NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + LEGO_SET NUMBER, + "alternative" NUMBER, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET ("id1"); + +CREATE TABLE ONE_TO_ONE_PARENT +( + "id3" NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER PRIMARY KEY, + CONTENT VARCHAR(30) +); + +CREATE TABLE LIST_PARENT +( + "id4" NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE SIMPLE_LIST_PARENT +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element_no_id +( + CONTENT VARCHAR(100), + SIMPLE_LIST_PARENT_key NUMBER, + SIMPLE_LIST_PARENT NUMBER, + LIST_PARENT_key NUMBER, + LIST_PARENT NUMBER +); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + BINARY_DATA RAW(100) NOT NULL +); + +CREATE TABLE CHAIN4 +( + FOUR NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + + +CREATE TABLE CHAIN3 +( + THREE NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 NUMBER, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 NUMBER, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 NUMBER, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 NUMBER, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 NUMBER, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 NUMBER, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 NUMBER, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 NUMBER, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 NUMBER, + NO_ID_LIST_CHAIN4_KEY NUMBER, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 NUMBER, + NO_ID_LIST_CHAIN4_KEY NUMBER, + NO_ID_LIST_CHAIN3_KEY NUMBER, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 NUMBER, + NO_ID_LIST_CHAIN4_KEY NUMBER, + NO_ID_LIST_CHAIN3_KEY NUMBER, + NO_ID_LIST_CHAIN2_KEY NUMBER, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 NUMBER, + NO_ID_LIST_CHAIN4_KEY NUMBER, + NO_ID_LIST_CHAIN3_KEY NUMBER, + NO_ID_LIST_CHAIN2_KEY NUMBER, + NO_ID_LIST_CHAIN1_KEY NUMBER, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 NUMBER, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 NUMBER, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 NUMBER, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 NUMBER, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE VERSIONED_AGGREGATE +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + VERSION NUMBER +); + +CREATE TABLE WITH_READ_ONLY +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR(200), + READ_ONLY VARCHAR(200) DEFAULT 'from-db' +); + + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID NUMBER PRIMARY KEY, + TEST_TIME TIMESTAMP(9) +); + +CREATE TABLE WITH_ID_ONLY +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY +); + + +CREATE TABLE WITH_INSERT_ONLY +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS NUMBER, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS NUMBER, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS NUMBER, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER NUMBER, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR NUMBER, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + "renamed" VARCHAR(100), + ID NUMBER +); +CREATE TABLE FIRST +( + ID NUMBER NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID NUMBER NOT NULL PRIMARY KEY, + FIRST NUMBER NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC NUMBER NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-postgres.sql new file mode 100644 index 0000000000..36f20896b7 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-postgres.sql @@ -0,0 +1,473 @@ +DROP TABLE MANUAL; +DROP TABLE LEGO_SET; +DROP TABLE ONE_TO_ONE_PARENT; +DROP TABLE Child_No_Id; +DROP TABLE element_no_id; +DROP TABLE "LIST_PARENT"; +DROP TABLE SIMPLE_LIST_PARENT; +DROP TABLE "ARRAY_OWNER"; +DROP TABLE DOUBLE_LIST_OWNER; +DROP TABLE FLOAT_LIST_OWNER; +DROP TABLE BYTE_ARRAY_OWNER; +DROP TABLE CHAIN0; +DROP TABLE CHAIN1; +DROP TABLE CHAIN2; +DROP TABLE CHAIN3; +DROP TABLE CHAIN4; +DROP TABLE NO_ID_CHAIN0; +DROP TABLE NO_ID_CHAIN1; +DROP TABLE NO_ID_CHAIN2; +DROP TABLE NO_ID_CHAIN3; +DROP TABLE NO_ID_CHAIN4; +DROP TABLE NO_ID_LIST_CHAIN0; +DROP TABLE NO_ID_LIST_CHAIN1; +DROP TABLE NO_ID_LIST_CHAIN2; +DROP TABLE NO_ID_LIST_CHAIN3; +DROP TABLE NO_ID_LIST_CHAIN4; +DROP TABLE NO_ID_MAP_CHAIN0; +DROP TABLE NO_ID_MAP_CHAIN1; +DROP TABLE NO_ID_MAP_CHAIN2; +DROP TABLE NO_ID_MAP_CHAIN3; +DROP TABLE NO_ID_MAP_CHAIN4; +DROP TABLE "VERSIONED_AGGREGATE"; +DROP TABLE WITH_READ_ONLY; +DROP TABLE WITH_LOCAL_DATE_TIME; +DROP TABLE WITH_ID_ONLY; +DROP TABLE WITH_INSERT_ONLY; + +DROP TABLE MULTIPLE_COLLECTIONS; +DROP TABLE MAP_ELEMENT; +DROP TABLE LIST_ELEMENT; +DROP TABLE SET_ELEMENT; + +DROP TABLE BOOK; +DROP TABLE AUTHOR; + +DROP TABLE ENUM_MAP_OWNER; + +DROP TABLE REFERENCED; +DROP TABLE WITH_ONE_TO_ONE; + +DROP TABLE THIRD; +DROP TABLE SEC; +DROP TABLE FIRST; + +CREATE TABLE LEGO_SET +( + "id1" SERIAL PRIMARY KEY, + NAME VARCHAR(30) +); +CREATE TABLE MANUAL +( + "id2" SERIAL PRIMARY KEY, + LEGO_SET BIGINT, + ALTERNATIVE BIGINT, + CONTENT VARCHAR(2000) +); + +ALTER TABLE MANUAL + ADD FOREIGN KEY (LEGO_SET) + REFERENCES LEGO_SET ("id1"); + +CREATE TABLE ONE_TO_ONE_PARENT +( + "id3" SERIAL PRIMARY KEY, + content VARCHAR(30) +); +CREATE TABLE Child_No_Id +( + ONE_TO_ONE_PARENT INTEGER PRIMARY KEY, + content VARCHAR(30) +); + +CREATE TABLE "LIST_PARENT" +( + "id4" SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SIMPLE_LIST_PARENT +( + id SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE element_no_id +( + content VARCHAR(100), + LIST_PARENT_key BIGINT, + SIMPLE_LIST_PARENT_key BIGINT, + "LIST_PARENT" INTEGER, + SIMPLE_LIST_PARENT INTEGER +); + +CREATE TABLE "ARRAY_OWNER" +( + ID SERIAL PRIMARY KEY, + DIGITS VARCHAR(20)[10], + MULTIDIMENSIONAL VARCHAR(20)[10][10] +); + +CREATE TABLE DOUBLE_LIST_OWNER +( + ID SERIAL PRIMARY KEY, + DIGITS DOUBLE PRECISION[10] +); + +CREATE TABLE FLOAT_LIST_OWNER +( + ID SERIAL PRIMARY KEY, + DIGITS REAL[10] +); + +CREATE TABLE BYTE_ARRAY_OWNER +( + ID SERIAL PRIMARY KEY, + BINARY_DATA BYTEA NOT NULL +); + +CREATE TABLE CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE CHAIN3 +( + THREE SERIAL PRIMARY KEY, + THREE_VALUE VARCHAR(20), + CHAIN4 BIGINT, + FOREIGN KEY (CHAIN4) REFERENCES CHAIN4 (FOUR) +); + +CREATE TABLE CHAIN2 +( + TWO SERIAL PRIMARY KEY, + TWO_VALUE VARCHAR(20), + CHAIN3 BIGINT, + FOREIGN KEY (CHAIN3) REFERENCES CHAIN3 (THREE) +); + +CREATE TABLE CHAIN1 +( + ONE SERIAL PRIMARY KEY, + ONE_VALUE VARCHAR(20), + CHAIN2 BIGINT, + FOREIGN KEY (CHAIN2) REFERENCES CHAIN2 (TWO) +); + +CREATE TABLE CHAIN0 +( + ZERO SERIAL PRIMARY KEY, + ZERO_VALUE VARCHAR(20), + CHAIN1 BIGINT, + FOREIGN KEY (CHAIN1) REFERENCES CHAIN1 (ONE) +); + +CREATE TABLE NO_ID_CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_CHAIN4 BIGINT, + FOREIGN KEY (NO_ID_CHAIN4) REFERENCES NO_ID_CHAIN4 (FOUR) +); + + +CREATE TABLE NO_ID_LIST_CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_LIST_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY), + FOREIGN KEY (NO_ID_LIST_CHAIN4) REFERENCES NO_ID_LIST_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_LIST_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) REFERENCES NO_ID_LIST_CHAIN3 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) REFERENCES NO_ID_LIST_CHAIN2 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_LIST_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_LIST_CHAIN4 BIGINT, + NO_ID_LIST_CHAIN4_KEY BIGINT, + NO_ID_LIST_CHAIN3_KEY BIGINT, + NO_ID_LIST_CHAIN2_KEY BIGINT, + NO_ID_LIST_CHAIN1_KEY BIGINT, + PRIMARY KEY (NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY, + NO_ID_LIST_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) REFERENCES NO_ID_LIST_CHAIN1 ( + NO_ID_LIST_CHAIN4, + NO_ID_LIST_CHAIN4_KEY, + NO_ID_LIST_CHAIN3_KEY, + NO_ID_LIST_CHAIN2_KEY + ) +); + + + +CREATE TABLE NO_ID_MAP_CHAIN4 +( + FOUR SERIAL PRIMARY KEY, + FOUR_VALUE VARCHAR(20) +); + +CREATE TABLE NO_ID_MAP_CHAIN3 +( + THREE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY), + FOREIGN KEY (NO_ID_MAP_CHAIN4) REFERENCES NO_ID_MAP_CHAIN4 (FOUR) +); + +CREATE TABLE NO_ID_MAP_CHAIN2 +( + TWO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) REFERENCES NO_ID_MAP_CHAIN3 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN1 +( + ONE_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) REFERENCES NO_ID_MAP_CHAIN2 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY + ) +); + +CREATE TABLE NO_ID_MAP_CHAIN0 +( + ZERO_VALUE VARCHAR(20), + NO_ID_MAP_CHAIN4 BIGINT, + NO_ID_MAP_CHAIN4_KEY VARCHAR(20), + NO_ID_MAP_CHAIN3_KEY VARCHAR(20), + NO_ID_MAP_CHAIN2_KEY VARCHAR(20), + NO_ID_MAP_CHAIN1_KEY VARCHAR(20), + PRIMARY KEY (NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY, + NO_ID_MAP_CHAIN1_KEY), + FOREIGN KEY ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) REFERENCES NO_ID_MAP_CHAIN1 ( + NO_ID_MAP_CHAIN4, + NO_ID_MAP_CHAIN4_KEY, + NO_ID_MAP_CHAIN3_KEY, + NO_ID_MAP_CHAIN2_KEY + ) +); + +CREATE TABLE "VERSIONED_AGGREGATE" +( + ID SERIAL PRIMARY KEY, + VERSION BIGINT +); + +CREATE TABLE WITH_READ_ONLY +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(200), + READ_ONLY VARCHAR(200) DEFAULT 'from-db' +); + +CREATE TABLE WITH_LOCAL_DATE_TIME +( + ID BIGINT PRIMARY KEY, + TEST_TIME TIMESTAMP(9) WITHOUT TIME ZONE +); + +CREATE TABLE WITH_ID_ONLY +( + ID SERIAL PRIMARY KEY +); + +CREATE TABLE WITH_INSERT_ONLY +( + ID SERIAL PRIMARY KEY, + INSERT_ONLY VARCHAR(100) +); + +CREATE TABLE MULTIPLE_COLLECTIONS +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE SET_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE LIST_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY INT, + NAME VARCHAR(100) +); + +CREATE TABLE MAP_ELEMENT +( + MULTIPLE_COLLECTIONS BIGINT, + MULTIPLE_COLLECTIONS_KEY VARCHAR(10), + ENUM_MAP_OWNER BIGINT, + ENUM_MAP_OWNER_KEY VARCHAR(10), + NAME VARCHAR(100) +); + +CREATE TABLE AUTHOR +( + ID SERIAL PRIMARY KEY +); + +CREATE TABLE BOOK +( + AUTHOR BIGINT, + NAME VARCHAR(100) +); + +CREATE TABLE ENUM_MAP_OWNER +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE WITH_ONE_TO_ONE +( + ID VARCHAR(100) +); + +CREATE TABLE REFERENCED +( + "renamed" VARCHAR(100), + ID BIGINT +); + +CREATE TABLE FIRST +( + ID BIGINT NOT NULL PRIMARY KEY, + NAME VARCHAR(20) NOT NULL +); + +CREATE TABLE SEC +( + ID BIGINT NOT NULL PRIMARY KEY, + FIRST BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (FIRST) REFERENCES FIRST (ID) +); + +CREATE TABLE THIRD +( + SEC BIGINT NOT NULL, + NAME VARCHAR(20) NOT NULL, + FOREIGN KEY (SEC) REFERENCES SEC (ID) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-db2.sql new file mode 100644 index 0000000000..e08b911b79 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-db2.sql @@ -0,0 +1,17 @@ +DROP TABLE OTHER.DUMMY_ENTITY; +DROP TABLE OTHER.REFERENCED; +DROP SCHEMA OTHER RESTRICT; + +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-h2.sql new file mode 100644 index 0000000000..952dd1e345 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-h2.sql @@ -0,0 +1,14 @@ +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(30) +); + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-hsql.sql new file mode 100644 index 0000000000..89f7802652 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-hsql.sql @@ -0,0 +1,15 @@ +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(30) +); + + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..9525406392 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mariadb.sql @@ -0,0 +1,15 @@ +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(30) +); + + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mssql.sql new file mode 100644 index 0000000000..656bfb21b1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mssql.sql @@ -0,0 +1,18 @@ +DROP TABLE IF EXISTS OTHER.DUMMY_ENTITY; +DROP TABLE IF EXISTS OTHER.REFERENCED; +DROP SCHEMA IF EXISTS OTHER; + +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(30) +); + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mysql.sql new file mode 100644 index 0000000000..9525406392 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-mysql.sql @@ -0,0 +1,15 @@ +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(30) +); + + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-oracle.sql new file mode 100644 index 0000000000..9e93e500fc --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-oracle.sql @@ -0,0 +1,19 @@ +DROP USER OTHER CASCADE; + +CREATE USER OTHER; + +ALTER USER OTHER QUOTA UNLIMITED ON USERS; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR2(30) +); + + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR2(30) +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-postgres.sql new file mode 100644 index 0000000000..952dd1e345 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateSchemaIntegrationTests-postgres.sql @@ -0,0 +1,14 @@ +CREATE SCHEMA OTHER; + +CREATE TABLE OTHER.DUMMY_ENTITY +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(30) +); + +CREATE TABLE OTHER.REFERENCED +( + DUMMY_ENTITY INTEGER, + NAME VARCHAR(30) +); + diff --git a/src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisCustomizingNamespaceHsqlIntegrationTests-hsql.sql similarity index 100% rename from src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisHsqlIntegrationTests-hsql.sql rename to spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisCustomizingNamespaceHsqlIntegrationTests-hsql.sql diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..a2f6eb9021 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.mybatis/MyBatisHsqlIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE dummyentity(id BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcAuditingHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcAuditingHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..7763dd230f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcAuditingHsqlIntegrationTests-hsql.sql @@ -0,0 +1,8 @@ +CREATE TABLE DummyEntity ( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + name VARCHAR(128), + created_By VARCHAR(128), + created_Date TIMESTAMP, + last_Modified_By VARCHAR(128), + last_Modified_Date TIMESTAMP +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-db2.sql new file mode 100644 index 0000000000..660512ca78 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-db2.sql @@ -0,0 +1,3 @@ +DROP TABLE Dummy_entity; + +CREATE TABLE Dummy_Entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-h2.sql new file mode 100644 index 0000000000..aab1bd853f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-h2.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-hsql.sql new file mode 100644 index 0000000000..aab1bd853f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..ec172704c2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mariadb.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mssql.sql new file mode 100644 index 0000000000..f9407ad2db --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mssql.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS Dummy_Entity; +CREATE TABLE Dummy_Entity ( id BIGINT IDENTITY PRIMARY KEY); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mysql.sql new file mode 100644 index 0000000000..ec172704c2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-mysql.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-oracle.sql new file mode 100644 index 0000000000..cc28f6be46 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-oracle.sql @@ -0,0 +1,3 @@ +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS; + +CREATE TABLE DUMMY_ENTITY ( id NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-postgres.sql new file mode 100644 index 0000000000..78469c6d31 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesBrokenTransactionManagerRefIntegrationTests-postgres.sql @@ -0,0 +1,2 @@ +DROP TABLE Dummy_Entity +CREATE TABLE Dummy_Entity ( id SERIAL PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-db2.sql new file mode 100644 index 0000000000..660512ca78 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-db2.sql @@ -0,0 +1,3 @@ +DROP TABLE Dummy_entity; + +CREATE TABLE Dummy_Entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-h2.sql new file mode 100644 index 0000000000..aab1bd853f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-h2.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-hsql.sql new file mode 100644 index 0000000000..aab1bd853f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..ec172704c2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mariadb.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mssql.sql new file mode 100644 index 0000000000..f9407ad2db --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mssql.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS Dummy_Entity; +CREATE TABLE Dummy_Entity ( id BIGINT IDENTITY PRIMARY KEY); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mysql.sql new file mode 100644 index 0000000000..ec172704c2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mysql.sql @@ -0,0 +1 @@ +CREATE TABLE Dummy_Entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-oracle.sql new file mode 100644 index 0000000000..cc28f6be46 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-oracle.sql @@ -0,0 +1,3 @@ +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS; + +CREATE TABLE DUMMY_ENTITY ( id NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-postgres.sql new file mode 100644 index 0000000000..78469c6d31 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-postgres.sql @@ -0,0 +1,2 @@ +DROP TABLE Dummy_Entity +CREATE TABLE Dummy_Entity ( id SERIAL PRIMARY KEY) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.query/QueryAnnotationHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.query/QueryAnnotationHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..12c793eaaf --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository.query/QueryAnnotationHsqlIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/AbstractJdbcRepositoryLookUpStrategyTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/AbstractJdbcRepositoryLookUpStrategyTests-hsql.sql new file mode 100644 index 0000000000..8e5b1318ac --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/AbstractJdbcRepositoryLookUpStrategyTests-hsql.sql @@ -0,0 +1,5 @@ +CREATE TABLE aggregate_one +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryBeforeSaveHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryBeforeSaveHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..3e8d6ed8c3 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryBeforeSaveHsqlIntegrationTests-hsql.sql @@ -0,0 +1,22 @@ +-- noinspection SqlNoDataSourceInspectionForFile + +CREATE TABLE ImmutableEntity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE MutableEntity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE MutableWithImmutableIdEntity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE ImmutableWithMutableIdEntity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-db2.sql new file mode 100644 index 0000000000..aef62bb57a --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-db2.sql @@ -0,0 +1,5 @@ +DROP TABLE element; +DROP TABLE dummy_entity; + +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) NOT NULL PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-h2.sql new file mode 100644 index 0000000000..942dd36cf4 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-h2.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-hsql.sql new file mode 100644 index 0000000000..942dd36cf4 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-hsql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..e0a8a767cc --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mssql.sql new file mode 100644 index 0000000000..a56e334493 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mssql.sql @@ -0,0 +1,5 @@ +DROP TABLE ELEMENT; +DROP TABLE DUMMY_ENTITY; + +CREATE TABLE dummy_entity ( id BIGINT IDENTITY PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT IDENTITY PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mysql.sql new file mode 100644 index 0000000000..e0a8a767cc --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-oracle.sql new file mode 100644 index 0000000000..e6d9bfb0d8 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-oracle.sql @@ -0,0 +1,14 @@ +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS PURGE; +DROP TABLE ELEMENT CASCADE CONSTRAINTS PURGE; + +CREATE TABLE DUMMY_ENTITY ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE ELEMENT ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + CONTENT NUMBER, + DUMMY_ENTITY_KEY NUMBER , + DUMMY_ENTITY NUMBER +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-postgres.sql new file mode 100644 index 0000000000..ed7a483c99 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryConcurrencyIntegrationTests-postgres.sql @@ -0,0 +1,4 @@ +DROP TABLE dummy_entity; +DROP TABLE element; +CREATE TABLE dummy_entity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id SERIAL PRIMARY KEY, content BIGINT, Dummy_Entity_key BIGINT,dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCrossAggregateHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCrossAggregateHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..f03df7b7ea --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCrossAggregateHsqlIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE aggregate_one ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100), two INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-db2.sql new file mode 100644 index 0000000000..6abce10e3a --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-db2.sql @@ -0,0 +1,5 @@ +DROP TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL; +DROP TABLE OTHER_ENTITY; + +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( id BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-h2.sql new file mode 100644 index 0000000000..426153b9e3 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-h2.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( id IDENTITY PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID IDENTITY PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-hsql.sql new file mode 100644 index 0000000000..9508fbb0e2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-hsql.sql @@ -0,0 +1,3 @@ +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( id IDENTITY PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID IDENTITY PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..4e2dee5382 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( id BIGINT AUTO_INCREMENT PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID BIGINT AUTO_INCREMENT PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mssql.sql new file mode 100644 index 0000000000..0a884be3cf --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mssql.sql @@ -0,0 +1,5 @@ +DROP TABLE OTHER_ENTITY; +DROP TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL; + +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( id BIGINT IDENTITY PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID BIGINT IDENTITY PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mysql.sql new file mode 100644 index 0000000000..b1d3da76c2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( ID BIGINT AUTO_INCREMENT PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID BIGINT AUTO_INCREMENT PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-oracle.sql new file mode 100644 index 0000000000..b42ab92527 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-oracle.sql @@ -0,0 +1,15 @@ +DROP TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL CASCADE CONSTRAINTS PURGE; +DROP TABLE OTHER_ENTITY CASCADE CONSTRAINTS PURGE; + +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + STRINGY_NUMBER DECIMAL(20,10), + DIRECTION NUMBER(1,0) +); + +CREATE TABLE OTHER_ENTITY ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + CREATED DATE, + ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER +); + diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-postgres.sql new file mode 100644 index 0000000000..882d8df894 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryCustomConversionIntegrationTests-postgres.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_STRINGY_BIG_DECIMAL ( id SERIAL PRIMARY KEY, Stringy_number DECIMAL(20,10), DIRECTION INTEGER); +CREATE TABLE OTHER_ENTITY ( ID SERIAL PRIMARY KEY, CREATED DATE, ENTITY_WITH_STRINGY_BIG_DECIMAL INTEGER); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..57730a5ce7 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedHsqlIntegrationTests-hsql.sql @@ -0,0 +1,30 @@ +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX2_ATTR BIGINT, + PREFIX_TEST VARCHAR(100), + PREFIX_PREFIX2_ATTR BIGINT +); + +CREATE TABLE SORT_EMBEDDED_ENTITY +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + first_name VARCHAR(100), + address VARCHAR(255), + email VARCHAR(255), + phone_number VARCHAR(255) +); + +CREATE TABLE WITH_DOT_COLUMN +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + "address.city" VARCHAR(255) +); + +CREATE TABLE WITH_DOT_EMBEDDED +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + "PREFIX.EMAIL" VARCHAR(255), + "PREFIX.PHONE_NUMBER" VARCHAR(255) +) \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-db2.sql new file mode 100644 index 0000000000..05b12dccb5 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-db2.sql @@ -0,0 +1,2 @@ +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-h2.sql new file mode 100644 index 0000000000..1000cc556b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-h2.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-hsql.sql new file mode 100644 index 0000000000..1000cc556b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..c3519e1a99 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mariadb.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity (id BIGINT AUTO_INCREMENT PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mssql.sql new file mode 100644 index 0000000000..cdc0c880d7 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mssql.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS dummy_entity; +CREATE TABLE dummy_entity (id BIGINT IDENTITY PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mysql.sql new file mode 100644 index 0000000000..c3519e1a99 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-mysql.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity (id BIGINT AUTO_INCREMENT PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-oracle.sql new file mode 100644 index 0000000000..1a27e7c6ee --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-oracle.sql @@ -0,0 +1,7 @@ +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS PURGE; + +CREATE TABLE DUMMY_ENTITY ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + PREFIX_ATTR1 NUMBER, + PREFIX_ATTR2 VARCHAR2(100) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-postgres.sql new file mode 100644 index 0000000000..dc84d871df --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedImmutableIntegrationTests-postgres.sql @@ -0,0 +1,2 @@ +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity (id SERIAL PRIMARY KEY, PREFIX_ATTR1 BIGINT, PREFIX_ATTR2 VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-db2.sql new file mode 100644 index 0000000000..1a71745115 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-db2.sql @@ -0,0 +1,5 @@ +DROP TABLE dummy_entity; +DROP TABLE dummy_entity2; + +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, TEST VARCHAR(100)); +CREATE TABLE dummy_entity2 ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, TEST VARCHAR(100), PREFIX_ATTR BIGINT) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-h2.sql new file mode 100644 index 0000000000..60af8e60c9 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-h2.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, TEST VARCHAR(100)) +CREATE TABLE dummy_entity2 ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, TEST VARCHAR(100), PREFIX_ATTR BIGINT) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-hsql.sql new file mode 100644 index 0000000000..60af8e60c9 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-hsql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, TEST VARCHAR(100)) +CREATE TABLE dummy_entity2 ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, TEST VARCHAR(100), PREFIX_ATTR BIGINT) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..c9a201e612 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity (id BIGINT AUTO_INCREMENT PRIMARY KEY, TEST VARCHAR(100)); +CREATE TABLE dummy_entity2 (id BIGINT AUTO_INCREMENT PRIMARY KEY, TEST VARCHAR(100), PREFIX_ATTR BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mssql.sql new file mode 100644 index 0000000000..054f0d51c1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mssql.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS dummy_entity; +CREATE TABLE dummy_entity (id BIGINT IDENTITY PRIMARY KEY, TEST VARCHAR(100)); +DROP TABLE IF EXISTS dummy_entity2; +CREATE TABLE dummy_entity2 (id BIGINT PRIMARY KEY, TEST VARCHAR(100), PREFIX_ATTR BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mysql.sql new file mode 100644 index 0000000000..c8ae948db3 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-mysql.sql @@ -0,0 +1,11 @@ +CREATE TABLE dummy_entity +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + ID BIGINT PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_ATTR BIGINT +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-oracle.sql new file mode 100644 index 0000000000..0ab81989e5 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-oracle.sql @@ -0,0 +1,12 @@ +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS PURGE; +DROP TABLE DUMMY_ENTITY2 CASCADE CONSTRAINTS PURGE; + +CREATE TABLE DUMMY_ENTITY ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + TEST VARCHAR2(100) +); +CREATE TABLE DUMMY_ENTITY2 ( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + TEST VARCHAR2(100), + PREFIX_ATTR NUMBER +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-postgres.sql new file mode 100644 index 0000000000..fa4b1a1392 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedNotInAggregateRootIntegrationTests-postgres.sql @@ -0,0 +1,13 @@ +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity +( + "ID" SERIAL PRIMARY KEY, + TEST VARCHAR(100) +); +DROP TABLE dummy_entity2; +CREATE TABLE dummy_entity2 +( + "ID" INTEGER PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_ATTR BIGINT +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-db2.sql new file mode 100644 index 0000000000..c8fdf5b9a5 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-db2.sql @@ -0,0 +1,16 @@ +DROP TABLE dummy_entity; +DROP TABLE dummy_entity2; + +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + dummy_id BIGINT NOT NULL, + ORDER_KEY BIGINT NOT NULL, + TEST VARCHAR(100), + PRIMARY KEY (dummy_id, ORDER_KEY) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-h2.sql new file mode 100644 index 0000000000..fe9d025fd7 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-h2.sql @@ -0,0 +1,13 @@ +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + dummy_id BIGINT, + ORDER_KEY BIGINT, + TEST VARCHAR(100), + PRIMARY KEY (dummy_id, ORDER_KEY) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-hsql.sql new file mode 100644 index 0000000000..1b885a7867 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-hsql.sql @@ -0,0 +1,13 @@ +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + dummy_id BIGINT, + ORDER_KEY BIGINT, + TEST VARCHAR(100), + PRIMARY KEY (dummy_id, ORDER_KEY) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..c08506512e --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mariadb.sql @@ -0,0 +1,13 @@ +CREATE TABLE dummy_entity +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + dummy_id BIGINT, + ORDER_KEY BIGINT, + TEST VARCHAR(100), + PRIMARY KEY (dummy_id, ORDER_KEY) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mssql.sql new file mode 100644 index 0000000000..2cfda4b6c3 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mssql.sql @@ -0,0 +1,15 @@ +DROP TABLE IF EXISTS dummy_entity; +CREATE TABLE dummy_entity +( + id BIGINT IDENTITY PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +DROP TABLE IF EXISTS dummy_entity2; +CREATE TABLE dummy_entity2 +( + dummy_id BIGINT, + ORDER_KEY BIGINT, + TEST VARCHAR(100), + CONSTRAINT dummym_entity2_pk PRIMARY KEY (dummy_id, ORDER_KEY) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mysql.sql new file mode 100644 index 0000000000..c08506512e --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-mysql.sql @@ -0,0 +1,13 @@ +CREATE TABLE dummy_entity +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + dummy_id BIGINT, + ORDER_KEY BIGINT, + TEST VARCHAR(100), + PRIMARY KEY (dummy_id, ORDER_KEY) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-oracle.sql new file mode 100644 index 0000000000..2d538df140 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-oracle.sql @@ -0,0 +1,17 @@ +DROP TABLE DUMMY_ENTITY2 CASCADE CONSTRAINTS PURGE; +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS PURGE; + +CREATE TABLE DUMMY_ENTITY +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + TEST VARCHAR2(100), + PREFIX_TEST VARCHAR2(100) +); + +CREATE TABLE DUMMY_ENTITY2 +( + DUMMY_ID NUMBER, + ORDER_KEY NUMBER, + TEST VARCHAR2(100), + PRIMARY KEY (DUMMY_ID, ORDER_KEY) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-postgres.sql new file mode 100644 index 0000000000..e16ee36311 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithCollectionIntegrationTests-postgres.sql @@ -0,0 +1,15 @@ +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity +( + "ID" SERIAL PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +DROP TABLE dummy_entity2; +CREATE TABLE dummy_entity2 +( + "DUMMY_ID" BIGINT, + "ORDER_KEY" BIGINT, + TEST VARCHAR(100), + PRIMARY KEY ("DUMMY_ID", "ORDER_KEY") +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-db2.sql new file mode 100644 index 0000000000..99fb0d624e --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-db2.sql @@ -0,0 +1,14 @@ +DROP TABLE dummy_entity; +DROP TABLE dummy_entity2; + +CREATE TABLE dummy_entity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + ID BIGINT, + TEST VARCHAR(100) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-h2.sql new file mode 100644 index 0000000000..3556181141 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-h2.sql @@ -0,0 +1,11 @@ +CREATE TABLE dummy_entity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + ID BIGINT, + TEST VARCHAR(100) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-hsql.sql new file mode 100644 index 0000000000..3556181141 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-hsql.sql @@ -0,0 +1,11 @@ +CREATE TABLE dummy_entity +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +CREATE TABLE dummy_entity2 +( + ID BIGINT, + TEST VARCHAR(100) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..dd4f082b26 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity (id BIGINT AUTO_INCREMENT PRIMARY KEY, TEST VARCHAR(100), PREFIX_TEST VARCHAR(100)); +CREATE TABLE dummy_entity2 (id BIGINT AUTO_INCREMENT PRIMARY KEY, TEST VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mssql.sql new file mode 100644 index 0000000000..a437ee1450 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mssql.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS dummy_entity; +CREATE TABLE dummy_entity (id BIGINT IDENTITY PRIMARY KEY, TEST VARCHAR(100), PREFIX_TEST VARCHAR(100)); +DROP TABLE IF EXISTS dummy_entity2; +CREATE TABLE dummy_entity2 (id BIGINT PRIMARY KEY, TEST VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mysql.sql new file mode 100644 index 0000000000..dd4f082b26 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity (id BIGINT AUTO_INCREMENT PRIMARY KEY, TEST VARCHAR(100), PREFIX_TEST VARCHAR(100)); +CREATE TABLE dummy_entity2 (id BIGINT AUTO_INCREMENT PRIMARY KEY, TEST VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-oracle.sql new file mode 100644 index 0000000000..40437415d6 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-oracle.sql @@ -0,0 +1,16 @@ +DROP TABLE DUMMY_ENTITY2 CASCADE CONSTRAINTS PURGE; +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS PURGE; + + + +CREATE TABLE dummy_entity +( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + TEST VARCHAR2(100), + PREFIX_TEST VARCHAR2(100) +); +CREATE TABLE dummy_entity2 +( + ID NUMBER , + TEST VARCHAR2(100) +) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-postgres.sql new file mode 100644 index 0000000000..c8128208d3 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryEmbeddedWithReferenceIntegrationTests-postgres.sql @@ -0,0 +1,22 @@ +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity +( + "ID" SERIAL PRIMARY KEY, + TEST VARCHAR(100), + PREFIX_TEST VARCHAR(100) +); +DROP TABLE dummy_entity2; +CREATE TABLE dummy_entity2 +( + "ID" SERIAL PRIMARY KEY, + TEST VARCHAR(100) +); +-- +-- SELECT "dummy_entity"."ID" AS "ID", +-- "dummy_entity"."test" AS "test", +-- "dummy_entity"."prefix_test" AS "prefix_test", +-- "PREFIX_dummyEntity2"."id" AS "prefix_dummyentity2_id", +-- "PREFIX_dummyEntity2"."test" AS "prefix_dummyentity2_test" +-- FROM "dummy_entity" +-- LEFT OUTER JOIN "dummy_entity2" AS "PREFIX_dummyEntity2" ON +-- "PREFIX_dummyEntity2"."ID" = "dummy_entity"."ID" \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-db2.sql new file mode 100644 index 0000000000..2b771677c7 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-db2.sql @@ -0,0 +1,15 @@ +DROP TABLE ReadOnlyIdEntity; +DROP TABLE PrimitiveIdEntity; +DROP TABLE ImmutableWithManualIdentity; +DROP TABLE EntityWithSeq; +DROP TABLE PrimitiveIdEntityWithSeq; + +CREATE TABLE ReadOnlyIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE PrimitiveIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE SimpleSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE simple_seq_seq START WITH 1; +CREATE TABLE PersistableSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE persistable_seq_seq START WITH 1; +CREATE TABLE PrimitiveIdSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE "primitive_seq_seq" START WITH 1; diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-h2.sql new file mode 100644 index 0000000000..3db0d0db67 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-h2.sql @@ -0,0 +1,12 @@ +-- noinspection SqlNoDataSourceInspectionForFile + +CREATE TABLE ReadOnlyIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE PrimitiveIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT PRIMARY KEY, NAME VARCHAR(100)); + +CREATE TABLE SimpleSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE simple_seq_seq START WITH 1; +CREATE TABLE PersistableSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE persistable_seq_seq START WITH 1; +CREATE TABLE PrimitiveIdSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE "primitive_seq_seq" START WITH 1; diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-hsql.sql new file mode 100644 index 0000000000..0494ffa81b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-hsql.sql @@ -0,0 +1,12 @@ +-- noinspection SqlNoDataSourceInspectionForFile + +CREATE TABLE ReadOnlyIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE PrimitiveIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT PRIMARY KEY, NAME VARCHAR(100)); + +CREATE TABLE SimpleSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE "simple_seq_seq" START WITH 1; +CREATE TABLE PersistableSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE "persistable_seq_seq" START WITH 1; +CREATE TABLE PrimitiveIdSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE "primitive_seq_seq" START WITH 1; diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..d85b1c8f1f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mariadb.sql @@ -0,0 +1,9 @@ +CREATE TABLE ReadOnlyIdEntity (ID BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE PrimitiveIdEntity (ID BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE SimpleSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE simple_seq_seq START WITH 1; +CREATE TABLE PersistableSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE persistable_seq_seq START WITH 1; +CREATE TABLE PrimitiveIdSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE primitive_seq_seq START WITH 1; diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mssql.sql new file mode 100644 index 0000000000..9b379434c1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mssql.sql @@ -0,0 +1,17 @@ +DROP TABLE IF EXISTS ReadOnlyIdEntity; +DROP TABLE IF EXISTS PrimitiveIdEntity; +DROP TABLE IF EXISTS ImmutableWithManualIdentity; +DROP TABLE IF EXISTS EntityWithSeq; +DROP TABLE IF EXISTS PersistableEntityWithSeq; +DROP TABLE IF EXISTS PrimitiveIdEntityWithSeq; + +CREATE TABLE ReadOnlyIdEntity (ID BIGINT IDENTITY PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE PrimitiveIdEntity (ID BIGINT IDENTITY PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE EntityWithSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE SimpleSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE simple_seq_seq START WITH 1; +CREATE TABLE PersistableSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE persistable_seq_seq START WITH 1; +CREATE TABLE PrimitiveIdSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE primitive_seq_seq START WITH 1; diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mysql.sql similarity index 67% rename from src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mysql.sql rename to spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mysql.sql index b02242b2c9..7ad9775ebe 100644 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mysql.sql +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-mysql.sql @@ -1,2 +1,3 @@ CREATE TABLE ReadOnlyIdEntity (ID BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); CREATE TABLE PrimitiveIdEntity (ID BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT PRIMARY KEY, NAME VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-oracle.sql new file mode 100644 index 0000000000..3a0416888a --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-oracle.sql @@ -0,0 +1,39 @@ +DROP TABLE ReadOnlyIdEntity; +DROP TABLE PrimitiveIdEntity; +DROP TABLE ImmutableWithManualIdentity; + +CREATE TABLE ReadOnlyIdEntity ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE PrimitiveIdEntity ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE ImmutableWithManualIdentity ( + ID NUMBER PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE SimpleSeq ( + ID NUMBER PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE SEQUENCE simple_seq_seq START WITH 1; + +CREATE TABLE PersistableSeq ( + ID NUMBER PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE SEQUENCE persistable_seq_seq START WITH 1; + +CREATE TABLE PrimitiveIdSeq ( + ID NUMBER PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE SEQUENCE "primitive_seq_seq" START WITH 1; diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-postgres.sql new file mode 100644 index 0000000000..8f79fc83e6 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-postgres.sql @@ -0,0 +1,16 @@ +DROP TABLE ReadOnlyIdEntity; +DROP TABLE PrimitiveIdEntity; +DROP TABLE ImmutableWithManualIdentity; +DROP TABLE EntityWithSeq; +DROP TABLE PersistableEntityWithSeq; +DROP TABLE PrimitiveIdEntityWithSeq; + +CREATE TABLE ReadOnlyIdEntity (ID SERIAL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE PrimitiveIdEntity (ID SERIAL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE ImmutableWithManualIdentity (ID BIGINT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE SimpleSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE simple_seq_seq START WITH 1; +CREATE TABLE PersistableSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE persistable_seq_seq START WITH 1; +CREATE TABLE PrimitiveIdSeq (ID BIGINT NOT NULL PRIMARY KEY, NAME VARCHAR(100)); +CREATE SEQUENCE primitive_seq_seq START WITH 1; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-db2.sql new file mode 100644 index 0000000000..acbf9d5ed2 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-db2.sql @@ -0,0 +1,3 @@ +DROP TABLE dummy_entity; + +CREATE TABLE dummy_entity ( id_Prop BIGINT PRIMARY KEY, NAME VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-h2.sql new file mode 100644 index 0000000000..25a09e431d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-h2.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity ( id_Prop BIGINT PRIMARY KEY, NAME VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-hsql.sql new file mode 100644 index 0000000000..25a09e431d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity ( id_Prop BIGINT PRIMARY KEY, NAME VARCHAR(100)) diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..fdb22e2d43 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mariadb.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity (id_Prop BIGINT PRIMARY KEY, NAME VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mssql.sql new file mode 100644 index 0000000000..f12da5cb0e --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mssql.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS dummy_entity; +CREATE TABLE dummy_entity (id_Prop BIGINT PRIMARY KEY, NAME VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mysql.sql new file mode 100644 index 0000000000..fdb22e2d43 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-mysql.sql @@ -0,0 +1 @@ +CREATE TABLE dummy_entity (id_Prop BIGINT PRIMARY KEY, NAME VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-postgres.sql new file mode 100644 index 0000000000..65f67a5bf1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryInsertExistingIntegrationTests-postgres.sql @@ -0,0 +1,2 @@ +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity (id_Prop INTEGER PRIMARY KEY, NAME VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-db2.sql new file mode 100644 index 0000000000..1c00e779a6 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-db2.sql @@ -0,0 +1,58 @@ +DROP TABLE dummy_entity; +DROP TABLE ROOT; +DROP TABLE INTERMEDIATE; +DROP TABLE LEAF; +DROP TABLE WITH_DELIMITED_COLUMN; +DROP TABLE ENTITY_WITH_SEQUENCE; +DROP SEQUENCE ENTITY_SEQUENCE; + +CREATE TABLE dummy_entity +( + id_Prop BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME TIMESTAMP, + OFFSET_DATE_TIME TIMESTAMP, -- with time zone is only supported with z/OS + FLAG BOOLEAN, + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES BINARY(8) +); + +CREATE TABLE ROOT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + ROOT_ID BIGINT, + ROOT_KEY INTEGER +); + +CREATE TABLE LEAF +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + INTERMEDIATE_ID BIGINT, + INTERMEDIATE_KEY INTEGER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + "ORG.XTUNIT.IDENTIFIER" VARCHAR(100), + STYPE VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID BIGINT, + NAME VARCHAR(100) +); + +CREATE SEQUENCE ENTITY_SEQUENCE START WITH 1 INCREMENT BY 1 NO MAXVALUE; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-h2.sql new file mode 100644 index 0000000000..6f9087b69d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-h2.sql @@ -0,0 +1,50 @@ +CREATE TABLE dummy_entity +( + id_Prop BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME TIMESTAMP, + OFFSET_DATE_TIME TIMESTAMP WITH TIME ZONE, + FLAG BOOLEAN, + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES BINARY(8) +); + +CREATE TABLE ROOT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + ROOT_ID BIGINT, + ROOT_KEY INTEGER +); + +CREATE TABLE LEAF +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + INTERMEDIATE_ID BIGINT, + INTERMEDIATE_KEY INTEGER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + "ORG.XTUNIT.IDENTIFIER" VARCHAR(100), + STYPE VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID BIGINT, + NAME VARCHAR(100) +); + +CREATE SEQUENCE ENTITY_SEQUENCE START WITH 1 INCREMENT BY 1 NO MAXVALUE; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-hsql.sql new file mode 100644 index 0000000000..6f9087b69d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-hsql.sql @@ -0,0 +1,50 @@ +CREATE TABLE dummy_entity +( + id_Prop BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME TIMESTAMP, + OFFSET_DATE_TIME TIMESTAMP WITH TIME ZONE, + FLAG BOOLEAN, + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES BINARY(8) +); + +CREATE TABLE ROOT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + ROOT_ID BIGINT, + ROOT_KEY INTEGER +); + +CREATE TABLE LEAF +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + INTERMEDIATE_ID BIGINT, + INTERMEDIATE_KEY INTEGER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + "ORG.XTUNIT.IDENTIFIER" VARCHAR(100), + STYPE VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID BIGINT, + NAME VARCHAR(100) +); + +CREATE SEQUENCE ENTITY_SEQUENCE START WITH 1 INCREMENT BY 1 NO MAXVALUE; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..23d3ad7221 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mariadb.sql @@ -0,0 +1,50 @@ +CREATE TABLE dummy_entity +( + id_Prop BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME TIMESTAMP(3), + OFFSET_DATE_TIME TIMESTAMP(3), + FLAG BOOLEAN, + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES BINARY(8) +); + +CREATE TABLE ROOT +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + ROOT_ID BIGINT, + ROOT_KEY INTEGER +); + +CREATE TABLE LEAF +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + INTERMEDIATE_ID BIGINT, + INTERMEDIATE_KEY INTEGER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + `ORG.XTUNIT.IDENTIFIER` VARCHAR(100), + STYPE VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID BIGINT, + NAME VARCHAR(100) +); + +CREATE SEQUENCE `ENTITY_SEQUENCE` START WITH 1 INCREMENT BY 1 NO MAXVALUE; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mssql.sql new file mode 100644 index 0000000000..69f191f65d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mssql.sql @@ -0,0 +1,58 @@ +DROP TABLE IF EXISTS dummy_entity; +DROP TABLE IF EXISTS ROOT; +DROP TABLE IF EXISTS INTERMEDIATE; +DROP TABLE IF EXISTS LEAF; +DROP TABLE IF EXISTS WITH_DELIMITED_COLUMN; +DROP TABLE IF EXISTS ENTITY_WITH_SEQUENCE; +DROP SEQUENCE IF EXISTS ENTITY_SEQUENCE; + +CREATE TABLE dummy_entity +( + id_Prop BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME DATETIME2, + OFFSET_DATE_TIME DATETIMEOFFSET, + FLAG BIT, + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES VARBINARY(8) +); + +CREATE TABLE ROOT +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + ROOT_ID BIGINT, + ROOT_KEY INTEGER +); + +CREATE TABLE LEAF +( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + INTERMEDIATE_ID BIGINT, + INTERMEDIATE_KEY INTEGER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID BIGINT IDENTITY PRIMARY KEY, + "ORG.XTUNIT.IDENTIFIER" VARCHAR(100), + STYPE VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID BIGINT, + NAME VARCHAR(100) +); + +CREATE SEQUENCE ENTITY_SEQUENCE START WITH 1 INCREMENT BY 1 NO MAXVALUE; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mysql.sql new file mode 100644 index 0000000000..0d3e16587f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mysql.sql @@ -0,0 +1,45 @@ +SET +SQL_MODE='ALLOW_INVALID_DATES'; + +CREATE TABLE DUMMY_ENTITY +( + ID_PROP BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME TIMESTAMP(3) DEFAULT NULL, + OFFSET_DATE_TIME TIMESTAMP(3) DEFAULT NULL, + FLAG BIT(1), + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES BINARY(8) +); + +CREATE TABLE ROOT +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + ROOT_ID BIGINT, + ROOT_KEY INTEGER +); + +CREATE TABLE LEAF +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + INTERMEDIATE_ID BIGINT, + INTERMEDIATE_KEY INTEGER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID BIGINT AUTO_INCREMENT PRIMARY KEY, + `ORG.XTUNIT.IDENTIFIER` VARCHAR(100), + STYPE VARCHAR(100) +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-oracle.sql new file mode 100644 index 0000000000..428ff48f3f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-oracle.sql @@ -0,0 +1,58 @@ +DROP TABLE DUMMY_ENTITY CASCADE CONSTRAINTS PURGE; +DROP TABLE ROOT CASCADE CONSTRAINTS PURGE; +DROP TABLE INTERMEDIATE CASCADE CONSTRAINTS PURGE; +DROP TABLE LEAF CASCADE CONSTRAINTS PURGE; +DROP TABLE WITH_DELIMITED_COLUMN CASCADE CONSTRAINTS PURGE; +DROP TABLE ENTITY_WITH_SEQUENCE CASCADE CONSTRAINTS PURGE; +DROP SEQUENCE ENTITY_SEQUENCE; + +CREATE TABLE DUMMY_ENTITY +( + ID_PROP NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + NAME VARCHAR2(100), + POINT_IN_TIME TIMESTAMP, + OFFSET_DATE_TIME TIMESTAMP WITH TIME ZONE, + FLAG NUMBER(1,0), + REF NUMBER, + DIRECTION VARCHAR2(100), + BYTES RAW(8) +); + +CREATE TABLE ROOT +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE INTERMEDIATE +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + NAME VARCHAR2(100), + ROOT NUMBER, + ROOT_ID NUMBER, + ROOT_KEY NUMBER +); + +CREATE TABLE LEAF +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + NAME VARCHAR2(100), + INTERMEDIATE NUMBER, + INTERMEDIATE_ID NUMBER, + INTERMEDIATE_KEY NUMBER +); + +CREATE TABLE WITH_DELIMITED_COLUMN +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + "ORG.XTUNIT.IDENTIFIER" VARCHAR(100), + STYPE VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID NUMBER, + NAME VARCHAR(100) +); + +CREATE SEQUENCE ENTITY_SEQUENCE START WITH 1 INCREMENT BY 1; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-postgres.sql new file mode 100644 index 0000000000..42e69437a7 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-postgres.sql @@ -0,0 +1,58 @@ +DROP TABLE dummy_entity; +DROP TABLE ROOT; +DROP TABLE INTERMEDIATE; +DROP TABLE LEAF; +DROP TABLE WITH_DELIMITED_COLUMN; +DROP TABLE ENTITY_WITH_SEQUENCE; +DROP SEQUENCE ENTITY_SEQUENCE; + +CREATE TABLE dummy_entity +( + id_Prop SERIAL PRIMARY KEY, + NAME VARCHAR(100), + POINT_IN_TIME TIMESTAMP, + OFFSET_DATE_TIME TIMESTAMP WITH TIME ZONE, + FLAG BOOLEAN, + REF BIGINT, + DIRECTION VARCHAR(100), + BYTES BYTEA +); + +CREATE TABLE ROOT +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); + +CREATE TABLE INTERMEDIATE +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100), + ROOT BIGINT, + "ROOT_ID" BIGINT, + "ROOT_KEY" INTEGER +); + +CREATE TABLE LEAF +( + ID SERIAL PRIMARY KEY, + NAME VARCHAR(100), + INTERMEDIATE BIGINT, + "INTERMEDIATE_ID" BIGINT, + "INTERMEDIATE_KEY" INTEGER +); + +CREATE TABLE "WITH_DELIMITED_COLUMN" +( + ID SERIAL PRIMARY KEY, + "ORG.XTUNIT.IDENTIFIER" VARCHAR(100), + "STYPE" VARCHAR(100) +); + +CREATE TABLE ENTITY_WITH_SEQUENCE +( + ID BIGINT, + NAME VARCHAR(100) +); + +CREATE SEQUENCE "ENTITY_SEQUENCE" START WITH 1 INCREMENT BY 1 NO MAXVALUE; \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-db2.sql new file mode 100644 index 0000000000..a3af227696 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-db2.sql @@ -0,0 +1,18 @@ +DROP TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION; +DROP TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS; + +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( + id_Timestamp DATETIME NOT NULL PRIMARY KEY, + bool boolean, + SOME_ENUM VARCHAR(100), + big_Decimal VARCHAR(100), + big_Integer BIGINT, + date DATETIME, + local_Date_Time DATETIME, + zoned_Date_Time VARCHAR(30) +); + +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( + id_Timestamp DATETIME NOT NULL PRIMARY KEY, + data VARCHAR(100) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-h2.sql new file mode 100644 index 0000000000..3eb1994c26 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-h2.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( id_Timestamp DATETIME PRIMARY KEY, bool boolean, SOME_ENUM VARCHAR(100), big_Decimal DECIMAL(1025), big_Integer DECIMAL(20), date DATETIME, local_Date_Time DATETIME, zoned_Date_Time VARCHAR(30)); +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( id_Timestamp DATETIME NOT NULL PRIMARY KEY, data VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-hsql.sql new file mode 100644 index 0000000000..3eb1994c26 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-hsql.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( id_Timestamp DATETIME PRIMARY KEY, bool boolean, SOME_ENUM VARCHAR(100), big_Decimal DECIMAL(1025), big_Integer DECIMAL(20), date DATETIME, local_Date_Time DATETIME, zoned_Date_Time VARCHAR(30)); +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( id_Timestamp DATETIME NOT NULL PRIMARY KEY, data VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..c14f120013 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( id_Timestamp TIMESTAMP PRIMARY KEY, bool boolean, SOME_ENUM VARCHAR(100), big_Decimal DECIMAL(65), big_Integer DECIMAL(20), date DATETIME, local_Date_Time DATETIME, zoned_Date_Time VARCHAR(30)); +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( id_Timestamp TIMESTAMP NOT NULL PRIMARY KEY, data VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mssql.sql new file mode 100644 index 0000000000..b4fc5fde25 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mssql.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION; +DROP TABLE IF EXISTS ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS; +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( id_Timestamp DATETIME PRIMARY KEY, bool bit, SOME_ENUM VARCHAR(100), big_Decimal DECIMAL(38), big_Integer DECIMAL(20), date DATETIME, local_Date_Time DATETIME, zoned_Date_Time VARCHAR(30)); +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( id_Timestamp DATETIME NOT NULL PRIMARY KEY, data VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mysql.sql new file mode 100644 index 0000000000..c14f120013 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( id_Timestamp TIMESTAMP PRIMARY KEY, bool boolean, SOME_ENUM VARCHAR(100), big_Decimal DECIMAL(65), big_Integer DECIMAL(20), date DATETIME, local_Date_Time DATETIME, zoned_Date_Time VARCHAR(30)); +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( id_Timestamp TIMESTAMP NOT NULL PRIMARY KEY, data VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-oracle.sql new file mode 100644 index 0000000000..5b1fabc4d1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-oracle.sql @@ -0,0 +1,18 @@ +DROP TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION; +DROP TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS; + +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( + ID_TIMESTAMP TIMESTAMP PRIMARY KEY, + BOOL CHAR(1), + SOME_ENUM VARCHAR2(100), + BIG_DECIMAL DECIMAL (38), + BIG_INTEGER NUMBER(38, 0), + "DATE" TIMESTAMP, + LOCAL_DATE_TIME TIMESTAMP, + ZONED_DATE_TIME VARCHAR2(30) +); + +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( + ID_TIMESTAMP TIMESTAMP PRIMARY KEY, + DATA VARCHAR2(100) +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-postgres.sql new file mode 100644 index 0000000000..27d9fa6d8a --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-postgres.sql @@ -0,0 +1,5 @@ +DROP TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION; +DROP TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS; + +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS ( id_Timestamp TIMESTAMP PRIMARY KEY, bool boolean, SOME_ENUM VARCHAR(100), big_Decimal DECIMAL(65), big_Integer BIGINT, date TIMESTAMP, local_Date_Time TIMESTAMP, zoned_Date_Time VARCHAR(30)); +CREATE TABLE ENTITY_WITH_COLUMNS_REQUIRING_CONVERSIONS_RELATION ( "ID_TIMESTAMP" TIMESTAMP PRIMARY KEY, data VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-db2.sql new file mode 100644 index 0000000000..3d31bbc1e1 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-db2.sql @@ -0,0 +1,7 @@ +DROP TABLE address; +DROP TABLE person; + + +CREATE TABLE person ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, name VARCHAR(100)); +CREATE TABLE address ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, street VARCHAR(100), person_id BIGINT); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-h2.sql new file mode 100644 index 0000000000..a33c466af5 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-h2.sql @@ -0,0 +1,3 @@ +CREATE TABLE person ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, name VARCHAR(100)); +CREATE TABLE address ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, street VARCHAR(100), person_id BIGINT); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-hsql.sql new file mode 100644 index 0000000000..a33c466af5 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-hsql.sql @@ -0,0 +1,3 @@ +CREATE TABLE person ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, name VARCHAR(100)); +CREATE TABLE address ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, street VARCHAR(100), person_id BIGINT); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..40c0f81943 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mariadb.sql @@ -0,0 +1,3 @@ +CREATE TABLE person ( id INT NOT NULL AUTO_INCREMENT, name VARCHAR(100), PRIMARY KEY (id)); +CREATE TABLE address ( id INT NOT NULL AUTO_INCREMENT, street VARCHAR(100), person_id INT, PRIMARY KEY (id)); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mssql.sql new file mode 100644 index 0000000000..fb8b9c1b56 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mssql.sql @@ -0,0 +1,6 @@ +DROP TABLE IF EXISTS address; +DROP TABLE IF EXISTS person; + +CREATE TABLE person ( id int IDENTITY(1,1) PRIMARY KEY, name VARCHAR(100)); +CREATE TABLE address ( id int IDENTITY(1,1) PRIMARY KEY, street VARCHAR(100), person_id INT); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mysql.sql new file mode 100644 index 0000000000..40c0f81943 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-mysql.sql @@ -0,0 +1,3 @@ +CREATE TABLE person ( id INT NOT NULL AUTO_INCREMENT, name VARCHAR(100), PRIMARY KEY (id)); +CREATE TABLE address ( id INT NOT NULL AUTO_INCREMENT, street VARCHAR(100), person_id INT, PRIMARY KEY (id)); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-oracle.sql new file mode 100644 index 0000000000..ab6fb458db --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-oracle.sql @@ -0,0 +1,14 @@ +DROP TABLE ADDRESS; +DROP TABLE PERSON; + +CREATE TABLE PERSON ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE ADDRESS ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + STREET VARCHAR2(100), + PERSON_ID NUMBER); + +ALTER TABLE ADDRESS ADD FOREIGN KEY (PERSON_ID) REFERENCES PERSON(ID); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-postgres.sql new file mode 100644 index 0000000000..368434103a --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryResultSetExtractorIntegrationTests-postgres.sql @@ -0,0 +1,5 @@ +DROP TABLE person; +DROP TABLE address; +CREATE TABLE person ( id SERIAL PRIMARY KEY, name VARCHAR(100)); +CREATE TABLE address ( id SERIAL PRIMARY KEY, street VARCHAR(100), person_id INT); +ALTER TABLE address ADD FOREIGN KEY (person_id) REFERENCES person(id); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..9740f11d97 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsAndManuallyAssignedIdHsqlIntegrationTests-hsql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT not null); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsChainHsqlIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsChainHsqlIntegrationTests-hsql.sql new file mode 100644 index 0000000000..b3e6c450eb --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsChainHsqlIntegrationTests-hsql.sql @@ -0,0 +1,17 @@ +CREATE TABLE DUMMY_ENTITY +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE CHILD_ELEMENT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + NAME VARCHAR(100), + DUMMY_ENTITY BIGINT +); +CREATE TABLE GRAND_CHILD_ELEMENT +( + ID BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + CONTENT VARCHAR(100), + CHILD_ELEMENT BIGINT +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-db2.sql new file mode 100644 index 0000000000..1d2e747399 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-db2.sql @@ -0,0 +1,5 @@ +DROP TABLE element; +DROP TABLE dummy_entity; + +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-h2.sql new file mode 100644 index 0000000000..480b9f2787 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-h2.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-hsql.sql new file mode 100644 index 0000000000..480b9f2787 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-hsql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..2943283964 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mssql.sql new file mode 100644 index 0000000000..bc0abc14c9 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mssql.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS dummy_entity; +DROP TABLE IF EXISTS element; +CREATE TABLE dummy_entity ( id BIGINT identity PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT identity PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mysql.sql new file mode 100644 index 0000000000..2943283964 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-oracle.sql new file mode 100644 index 0000000000..52dbc96a4d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-oracle.sql @@ -0,0 +1,13 @@ +DROP TABLE ELEMENT; +DROP TABLE DUMMY_ENTITY; + +CREATE TABLE DUMMY_ENTITY ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE ELEMENT ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + CONTENT VARCHAR(100), + DUMMY_ENTITY NUMBER +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-postgres.sql new file mode 100644 index 0000000000..824a1e9481 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-postgres.sql @@ -0,0 +1,4 @@ +DROP TABLE element; +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id SERIAL PRIMARY KEY, content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..119c60823b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mssql.sql new file mode 100644 index 0000000000..3d2cd82c2e --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mssql.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS dummy_entity; +DROP TABLE IF EXISTS element; +CREATE TABLE dummy_entity ( id BIGINT identity PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mysql.sql new file mode 100644 index 0000000000..119c60823b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-postgres.sql new file mode 100644 index 0000000000..8c9eb5b48f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsNoIdIntegrationTests-postgres.sql @@ -0,0 +1,4 @@ +DROP TABLE element; +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (content VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-db2.sql new file mode 100644 index 0000000000..c7cd31b86a --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-db2.sql @@ -0,0 +1,36 @@ +DROP TABLE element; +DROP TABLE dummy_entity; + +DROP TABLE root; +DROP TABLE intermediate; +DROP TABLE leaf; + +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + content VARCHAR(100), + Dummy_Entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY +); +CREATE TABLE intermediate +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-h2.sql new file mode 100644 index 0000000000..8c40d6dded --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-h2.sql @@ -0,0 +1,29 @@ +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + content VARCHAR(100), + Dummy_Entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY +); +CREATE TABLE intermediate +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-hsql.sql new file mode 100644 index 0000000000..181e5366ad --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-hsql.sql @@ -0,0 +1,29 @@ +CREATE TABLE dummy_entity +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, + content VARCHAR(100), + Dummy_Entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY +); +CREATE TABLE intermediate +( + id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..2293ba1e69 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mariadb.sql @@ -0,0 +1,29 @@ +CREATE TABLE dummy_entity +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + content VARCHAR(100), + Dummy_Entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id BIGINT AUTO_INCREMENT PRIMARY KEY +); +CREATE TABLE intermediate +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mssql.sql new file mode 100644 index 0000000000..280087f96d --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mssql.sql @@ -0,0 +1,36 @@ +DROP TABLE IF EXISTS dummy_entity; +DROP TABLE IF EXISTS element; + +DROP TABLE IF EXISTS root; +DROP TABLE IF EXISTS intermediate; +DROP TABLE IF EXISTS leaf; + +CREATE TABLE dummy_entity +( + id BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id BIGINT IDENTITY PRIMARY KEY, + content VARCHAR(100), + Dummy_Entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id BIGINT IDENTITY PRIMARY KEY +); +CREATE TABLE intermediate +( + id BIGINT IDENTITY PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mysql.sql new file mode 100644 index 0000000000..2293ba1e69 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mysql.sql @@ -0,0 +1,29 @@ +CREATE TABLE dummy_entity +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + content VARCHAR(100), + Dummy_Entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id BIGINT AUTO_INCREMENT PRIMARY KEY +); +CREATE TABLE intermediate +( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-oracle.sql new file mode 100644 index 0000000000..ba93df3173 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-oracle.sql @@ -0,0 +1,37 @@ +DROP TABLE ELEMENT; +DROP TABLE DUMMY_ENTITY; + +DROP TABLE root; +DROP TABLE intermediate; +DROP TABLE leaf; + +CREATE TABLE DUMMY_ENTITY +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE ELEMENT +( + ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + CONTENT VARCHAR(100), + DUMMY_ENTITY_KEY NUMBER, + DUMMY_ENTITY NUMBER +); + +CREATE TABLE root +( + id NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY +); +CREATE TABLE intermediate +( + id NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY, + root NUMBER NOT NULL, + root_key NUMBER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate NUMBER NOT NULL, + intermediate_key NUMBER NOT NULL +); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-postgres.sql new file mode 100644 index 0000000000..a0a3d8157c --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-postgres.sql @@ -0,0 +1,36 @@ +DROP TABLE element; +DROP TABLE dummy_entity; + +DROP TABLE root; +DROP TABLE intermediate; +DROP TABLE leaf; + +CREATE TABLE dummy_entity +( + id SERIAL PRIMARY KEY, + NAME VARCHAR(100) +); +CREATE TABLE element +( + id SERIAL PRIMARY KEY, + content VARCHAR(100), + dummy_entity_key BIGINT, + dummy_entity BIGINT +); + +CREATE TABLE root +( + id SERIAL PRIMARY KEY +); +CREATE TABLE intermediate +( + id SERIAL PRIMARY KEY, + root BIGINT NOT NULL, + root_key INTEGER NOT NULL +); +CREATE TABLE leaf +( + name VARCHAR(100), + intermediate BIGINT NOT NULL, + intermediate_key INTEGER NOT NULL +); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-db2.sql new file mode 100644 index 0000000000..4c1dc2d722 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-db2.sql @@ -0,0 +1,9 @@ +DROP TABLE element; +DROP TABLE dummy_entity; + +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), Dummy_Entity_key VARCHAR(100), dummy_entity BIGINT); + +ALTER TABLE ELEMENT + ADD FOREIGN KEY (dummy_entity) + REFERENCES dummy_entity(id); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-h2.sql new file mode 100644 index 0000000000..15d39c175f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-h2.sql @@ -0,0 +1,6 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), Dummy_Entity_key VARCHAR(100), dummy_entity BIGINT); + +ALTER TABLE ELEMENT + ADD FOREIGN KEY (dummy_entity) + REFERENCES dummy_entity(id); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-hsql.sql new file mode 100644 index 0000000000..15d39c175f --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-hsql.sql @@ -0,0 +1,6 @@ +CREATE TABLE dummy_entity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), Dummy_Entity_key VARCHAR(100), dummy_entity BIGINT); + +ALTER TABLE ELEMENT + ADD FOREIGN KEY (dummy_entity) + REFERENCES dummy_entity(id); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..fdd3be31ef --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mariadb.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), Dummy_Entity_key VARCHAR(100),dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mssql.sql new file mode 100644 index 0000000000..01ba3be525 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mssql.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS dummy_entity; +DROP TABLE IF EXISTS element; +CREATE TABLE dummy_entity ( id BIGINT IDENTITY PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT IDENTITY PRIMARY KEY, content VARCHAR(100), Dummy_Entity_key VARCHAR(100),dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mysql.sql new file mode 100644 index 0000000000..fdd3be31ef --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mysql.sql @@ -0,0 +1,2 @@ +CREATE TABLE dummy_entity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), Dummy_Entity_key VARCHAR(100),dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-oracle.sql new file mode 100644 index 0000000000..6fb0e811c3 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-oracle.sql @@ -0,0 +1,17 @@ +DROP TABLE ELEMENT; +DROP TABLE DUMMY_ENTITY; + +CREATE TABLE DUMMY_ENTITY ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + NAME VARCHAR2(100) +); + +CREATE TABLE ELEMENT ( + ID NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, + CONTENT VARCHAR2(100), + DUMMY_ENTITY_KEY VARCHAR2(100), + DUMMY_ENTITY NUMBER ); + +ALTER TABLE ELEMENT + ADD FOREIGN KEY (DUMMY_ENTITY) + REFERENCES DUMMY_ENTITY(ID); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-postgres.sql new file mode 100644 index 0000000000..6d1c0b3e56 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-postgres.sql @@ -0,0 +1,4 @@ +DROP TABLE element; +DROP TABLE dummy_entity; +CREATE TABLE dummy_entity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); +CREATE TABLE element (id SERIAL PRIMARY KEY, content VARCHAR(100),dummy_entity_key VARCHAR(100), dummy_entity BIGINT); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-db2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-db2.sql new file mode 100644 index 0000000000..5bc12b55eb --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-db2.sql @@ -0,0 +1,3 @@ +DROP TABLE car; + +CREATE TABLE car ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, model VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-h2.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-h2.sql new file mode 100644 index 0000000000..7306fe6b3b --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-h2.sql @@ -0,0 +1,4 @@ +DROP TABLE car; + + +CREATE TABLE car ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, model VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-hsql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-hsql.sql new file mode 100644 index 0000000000..9d5026bc67 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-hsql.sql @@ -0,0 +1 @@ +CREATE TABLE car ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, model VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mariadb.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mariadb.sql new file mode 100644 index 0000000000..4179723376 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mariadb.sql @@ -0,0 +1 @@ +CREATE TABLE car ( id INT NOT NULL AUTO_INCREMENT, model VARCHAR(100), PRIMARY KEY (id)); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mssql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mssql.sql new file mode 100644 index 0000000000..60acad12fa --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mssql.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS car; +CREATE TABLE car ( id int IDENTITY(1,1) PRIMARY KEY, model VARCHAR(100)); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mysql.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mysql.sql new file mode 100644 index 0000000000..4179723376 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-mysql.sql @@ -0,0 +1 @@ +CREATE TABLE car ( id INT NOT NULL AUTO_INCREMENT, model VARCHAR(100), PRIMARY KEY (id)); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-oracle.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-oracle.sql new file mode 100644 index 0000000000..18c251e189 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-oracle.sql @@ -0,0 +1,2 @@ +DROP TABLE CAR; +CREATE TABLE CAR ( id NUMBER GENERATED by default on null as IDENTITY PRIMARY KEY, model VARCHAR(100)); diff --git a/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-postgres.sql b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-postgres.sql new file mode 100644 index 0000000000..0118aeda21 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/StringBasedJdbcQueryMappingConfigurationIntegrationTests-postgres.sql @@ -0,0 +1,2 @@ +DROP TABLE car; +CREATE TABLE car ( id SERIAL PRIMARY KEY, model VARCHAR(100)); \ No newline at end of file diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml new file mode 100644 index 0000000000..0e7566de1c --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml @@ -0,0 +1,16 @@ +databaseChangeLog: + - changeSet: + id: '123' + author: Someone + objectQuotingStrategy: LEGACY + changes: + - createTable: + columns: + - column: + autoIncrement: true + constraints: + nullable: false + primaryKey: true + name: id + type: INT + tableName: foo diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/create-fk-with-field.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/create-fk-with-field.sql new file mode 100644 index 0000000000..15b912bed9 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/create-fk-with-field.sql @@ -0,0 +1,11 @@ +CREATE TABLE group_of_persons +( + id int primary key +); + +CREATE TABLE person +( + id int, + first_name varchar(255), + last_name varchar(255) +); diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/drop-and-create-fk.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/drop-and-create-fk.sql new file mode 100644 index 0000000000..030599f566 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/drop-and-create-fk.sql @@ -0,0 +1,14 @@ +CREATE TABLE group_of_persons +( + id int primary key +); + +CREATE TABLE person +( + id int, + first_name varchar(255), + last_name varchar(255), + group_id int, + group_id_to_drop int, + constraint fk_to_drop foreign key (group_id_to_drop) references group_of_persons(id) +); diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/drop-and-create-table-with-fk.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/drop-and-create-table-with-fk.sql new file mode 100644 index 0000000000..9a81131180 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/drop-and-create-table-with-fk.sql @@ -0,0 +1,11 @@ +CREATE TABLE group_of_persons +( + id int primary key +); + +CREATE TABLE table_to_drop +( + id int primary key, + persons int, + constraint fk_to_drop foreign key (persons) references group_of_persons(id) +); diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql new file mode 100644 index 0000000000..226bde05eb --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql @@ -0,0 +1,5 @@ +CREATE TABLE person +( + id int, + first_name varchar(255) +); diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql new file mode 100644 index 0000000000..efbc517647 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql @@ -0,0 +1,4 @@ +CREATE TABLE DELETE_ME +( + id int +); diff --git a/src/test/resources/org/springframework/data/jdbc/mybatis/DummyEntityMapper.xml b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/mybatis/DummyEntityMapper.xml similarity index 80% rename from src/test/resources/org/springframework/data/jdbc/mybatis/DummyEntityMapper.xml rename to spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/mybatis/DummyEntityMapper.xml index c2e00541c5..a2a7f7090e 100644 --- a/src/test/resources/org/springframework/data/jdbc/mybatis/DummyEntityMapper.xml +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/mybatis/DummyEntityMapper.xml @@ -1,7 +1,7 @@ + "https://www.mybatis.org/dtd/mybatis-3-mapper.dtd"> @@ -9,13 +9,13 @@ - + INSERT INTO DummyEntity (id) VALUES (DEFAULT) diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/mybatis/mapper/DummyEntityMapper.xml b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/mybatis/mapper/DummyEntityMapper.xml new file mode 100644 index 0000000000..c12b09f184 --- /dev/null +++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/mybatis/mapper/DummyEntityMapper.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + INSERT INTO DummyEntity (id) VALUES (DEFAULT) + + + \ No newline at end of file diff --git a/spring-data-r2dbc/pom.xml b/spring-data-r2dbc/pom.xml new file mode 100644 index 0000000000..c46dd6bf6b --- /dev/null +++ b/spring-data-r2dbc/pom.xml @@ -0,0 +1,434 @@ + + + + 4.0.0 + + spring-data-r2dbc + 3.5.0-SNAPSHOT + + Spring Data R2DBC + Spring Data module for R2DBC + https://projects.spring.io/spring-data-r2dbc + + + org.springframework.data + spring-data-relational-parent + 3.5.0-SNAPSHOT + + + + + spring-data-r2dbc + + spring.data.r2dbc + reuseReports + + 0.1.4 + 1.0.0.RELEASE + 1.0.4 + 4.1.118.Final + + + 2018 + + + + + org.testcontainers + testcontainers-bom + ${testcontainers} + pom + import + + + io.netty + netty-bom + ${netty} + pom + import + + + + + + + + ${project.groupId} + spring-data-relational + ${project.version} + + + + ${project.groupId} + spring-data-commons + ${springdata.commons} + + + + org.springframework + spring-tx + + + + org.springframework + spring-context + + + + org.springframework + spring-beans + + + + org.springframework + spring-jdbc + + + + org.springframework + spring-core + + + + org.springframework + spring-r2dbc + + + + io.r2dbc + r2dbc-spi + ${r2dbc-spi.version} + + + + io.projectreactor + reactor-core + + + + + org.jetbrains.kotlin + kotlin-stdlib + true + + + + org.jetbrains.kotlin + kotlin-reflect + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-core + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-reactor + true + + + + org.assertj + assertj-core + ${assertj} + test + + + net.bytebuddy + byte-buddy + + + + + + io.projectreactor + reactor-test + test + + + + + + org.postgresql + postgresql + ${postgresql.version} + test + + + + com.microsoft.sqlserver + mssql-jdbc + ${mssql.version} + test + + + + org.mariadb.jdbc + mariadb-java-client + ${mariadb-java-client.version} + test + + + + com.mysql + mysql-connector-j + ${mysql-connector-java.version} + test + + + + com.oracle.database.jdbc + ojdbc11 + 23.4.0.24.05 + test + + + + + + org.postgresql + r2dbc-postgresql + ${r2dbc-postgresql.version} + true + + + + io.r2dbc + r2dbc-h2 + ${r2dbc-h2.version} + test + + + + org.mariadb + r2dbc-mariadb + ${r2dbc-mariadb.version} + test + + + + io.r2dbc + r2dbc-mssql + ${r2dbc-mssql.version} + test + + + + io.asyncer + r2dbc-mysql + ${r2dbc-mysql.version} + test + + + + com.oracle.database.r2dbc + oracle-r2dbc + ${oracle-r2dbc.version} + test + + + + io.r2dbc + r2dbc-spi-test + ${r2dbc-spi.version} + test + + + + + + org.testcontainers + mysql + test + + + org.slf4j + jcl-over-slf4j + + + + + + org.testcontainers + mariadb + test + + + org.slf4j + jcl-over-slf4j + + + + + + org.testcontainers + mssqlserver + test + + + org.slf4j + jcl-over-slf4j + + + + + + org.testcontainers + oracle-free + test + + + + org.testcontainers + postgresql + test + + + + de.schauderhaft.degraph + degraph-check + ${degraph-check.version} + test + + + + io.mockk + mockk-jvm + ${mockk} + test + + + + org.awaitility + awaitility + ${awaitility} + test + + + + com.tngtech.archunit + archunit + ${archunit.version} + test + + + + + + + + + + org.jacoco + jacoco-maven-plugin + ${jacoco} + + ${jacoco.destfile} + + + + jacoco-initialize + + prepare-agent + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + https://r2dbc.io/spec/1.0.0.RELEASE/api/ + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + default-test + + + **/*Tests.java + + + + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + + org.codehaus.mojo + flatten-maven-plugin + 1.1.0 + + + flatten + process-resources + + flatten + + + true + oss + + keep + keep + expand + remove + + + + + flatten-clean + clean + + clean + + + + + + + + + + no-jacoco + + + + org.jacoco + jacoco-maven-plugin + + + jacoco-initialize + none + + + + + + + + + + diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/aot/R2dbcRuntimeHints.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/aot/R2dbcRuntimeHints.java new file mode 100644 index 0000000000..bc7f5447ee --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/aot/R2dbcRuntimeHints.java @@ -0,0 +1,55 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.aot; + +import java.util.Arrays; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.r2dbc.dialect.PostgresDialect; +import org.springframework.data.r2dbc.mapping.event.AfterConvertCallback; +import org.springframework.data.r2dbc.mapping.event.AfterSaveCallback; +import org.springframework.data.r2dbc.mapping.event.BeforeConvertCallback; +import org.springframework.data.r2dbc.mapping.event.BeforeSaveCallback; +import org.springframework.data.r2dbc.repository.support.SimpleR2dbcRepository; + +/** + * {@link RuntimeHintsRegistrar} for R2DBC. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +class R2dbcRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, ClassLoader classLoader) { + + hints.reflection() + .registerTypes( + Arrays.asList(TypeReference.of(SimpleR2dbcRepository.class), TypeReference.of(AfterConvertCallback.class), + TypeReference + .of(BeforeConvertCallback.class), + TypeReference.of(BeforeSaveCallback.class), TypeReference.of(AfterSaveCallback.class)), + hint -> hint.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_PUBLIC_METHODS)); + + for (Class simpleType : PostgresDialect.INSTANCE.simpleTypes()) { + hints.reflection().registerType(TypeReference.of(simpleType), MemberCategory.PUBLIC_CLASSES); + } + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/AbstractR2dbcConfiguration.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/AbstractR2dbcConfiguration.java new file mode 100644 index 0000000000..2b86b2821f --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/AbstractR2dbcConfiguration.java @@ -0,0 +1,334 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.config; + +import io.r2dbc.spi.ConnectionFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.CustomConversions.StoreConversions; +import org.springframework.data.r2dbc.convert.IdGeneratingEntityCallback; +import org.springframework.data.r2dbc.convert.MappingR2dbcConverter; +import org.springframework.data.r2dbc.convert.R2dbcConverter; +import org.springframework.data.r2dbc.convert.R2dbcCustomConversions; +import org.springframework.data.r2dbc.core.DefaultReactiveDataAccessStrategy; +import org.springframework.data.r2dbc.core.R2dbcEntityTemplate; +import org.springframework.data.r2dbc.core.ReactiveDataAccessStrategy; +import org.springframework.data.r2dbc.dialect.DialectResolver; +import org.springframework.data.r2dbc.dialect.R2dbcDialect; +import org.springframework.data.r2dbc.mapping.R2dbcMappingContext; +import org.springframework.data.relational.RelationalManagedTypes; +import org.springframework.data.relational.core.mapping.DefaultNamingStrategy; +import org.springframework.data.relational.core.mapping.NamingStrategy; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.Table; +import org.springframework.data.util.TypeScanner; +import org.springframework.lang.Nullable; +import org.springframework.r2dbc.core.DatabaseClient; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Base class for Spring Data R2DBC configuration containing bean declarations that must be registered for Spring Data + * R2DBC to work. + * + * @author Mark Paluch + * @author Jens Schauder + * @see ConnectionFactory + * @see DatabaseClient + * @see org.springframework.data.r2dbc.repository.config.EnableR2dbcRepositories + */ +@Configuration(proxyBeanMethods = false) +public abstract class AbstractR2dbcConfiguration implements ApplicationContextAware { + + private static final String CONNECTION_FACTORY_BEAN_NAME = "connectionFactory"; + + private @Nullable ApplicationContext context; + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.context = applicationContext; + } + + /** + * Return a R2DBC {@link ConnectionFactory}. Annotate with {@link Bean} in case you want to expose a + * {@link ConnectionFactory} instance to the {@link org.springframework.context.ApplicationContext}. + * + * @return the configured {@link ConnectionFactory}. + */ + public abstract ConnectionFactory connectionFactory(); + + /** + * Returns the base packages to scan for R2DBC mapped entities at startup. Returns the package name of the + * configuration class' (the concrete class, not this one here) by default. So if you have a + * {@code com.acme.AppConfig} extending {@link AbstractR2dbcConfiguration} the base package will be considered + * {@code com.acme} unless the method is overridden to implement alternate behavior. + * + * @return the base packages to scan for mapped {@link Table} classes or an empty collection to not enable scanning + * for entities. + * @since 3.0 + */ + protected Collection getMappingBasePackages() { + + Package mappingBasePackage = getClass().getPackage(); + return Collections.singleton(mappingBasePackage == null ? null : mappingBasePackage.getName()); + } + + /** + * Returns the a {@link RelationalManagedTypes} object holding the initial entity set. + * + * @return new instance of {@link RelationalManagedTypes}. + * @throws ClassNotFoundException + * @since 3.0 + */ + @Bean + public RelationalManagedTypes r2dbcManagedTypes() throws ClassNotFoundException { + return RelationalManagedTypes.fromIterable(getInitialEntitySet()); + } + + /** + * Return a {@link R2dbcDialect} for the given {@link ConnectionFactory}. This method attempts to resolve a + * {@link R2dbcDialect} from {@link io.r2dbc.spi.ConnectionFactoryMetadata}. Override this method to specify a dialect + * instead of attempting to resolve one. + * + * @param connectionFactory the configured {@link ConnectionFactory}. + * @return the resolved {@link R2dbcDialect}. + * @throws org.springframework.data.r2dbc.dialect.DialectResolver.NoDialectException if the {@link R2dbcDialect} + * cannot be determined. + */ + public R2dbcDialect getDialect(ConnectionFactory connectionFactory) { + return DialectResolver.getDialect(connectionFactory); + } + + /** + * Register a {@link DatabaseClient} using {@link #connectionFactory()} and {@link ReactiveDataAccessStrategy}. + * + * @return must not be {@literal null}. + * @throws IllegalArgumentException if any of the required args is {@literal null}. + */ + @Bean({ "r2dbcDatabaseClient", "databaseClient" }) + public DatabaseClient databaseClient() { + + ConnectionFactory connectionFactory = lookupConnectionFactory(); + + return DatabaseClient.builder() // + .connectionFactory(connectionFactory) // + .bindMarkers(getDialect(connectionFactory).getBindMarkersFactory()) // + .build(); + } + + /** + * Register {@link R2dbcEntityTemplate} using {@link #databaseClient()} and {@link #connectionFactory()}. + * + * @param databaseClient must not be {@literal null}. + * @param dataAccessStrategy must not be {@literal null}. + * @return + * @since 1.2 + */ + @Bean + public R2dbcEntityTemplate r2dbcEntityTemplate(DatabaseClient databaseClient, + ReactiveDataAccessStrategy dataAccessStrategy) { + + Assert.notNull(databaseClient, "DatabaseClient must not be null"); + Assert.notNull(dataAccessStrategy, "ReactiveDataAccessStrategy must not be null"); + + return new R2dbcEntityTemplate(databaseClient, dataAccessStrategy); + } + + /** + * Register a {@link R2dbcMappingContext} and apply an optional {@link NamingStrategy}. + * + * @param namingStrategy optional {@link NamingStrategy}. Use {@link DefaultNamingStrategy#INSTANCE} as fallback. + * @param r2dbcCustomConversions customized R2DBC conversions. + * @param r2dbcManagedTypes R2DBC managed types, typically discovered through {@link #r2dbcManagedTypes() an entity + * scan}. + * @return must not be {@literal null}. + * @throws IllegalArgumentException if any of the required args is {@literal null}. + */ + @Bean + public R2dbcMappingContext r2dbcMappingContext(Optional namingStrategy, + R2dbcCustomConversions r2dbcCustomConversions, RelationalManagedTypes r2dbcManagedTypes) { + + Assert.notNull(namingStrategy, "NamingStrategy must not be null"); + + R2dbcMappingContext context = new R2dbcMappingContext(namingStrategy.orElse(DefaultNamingStrategy.INSTANCE)); + context.setSimpleTypeHolder(r2dbcCustomConversions.getSimpleTypeHolder()); + context.setManagedTypes(r2dbcManagedTypes); + + return context; + } + + /** + * Register a {@link IdGeneratingEntityCallback} using + * {@link #r2dbcMappingContext(Optional, R2dbcCustomConversions, RelationalManagedTypes)} and + * {@link #databaseClient()}. + * + * @since 3.5 + */ + @Bean + public IdGeneratingEntityCallback idGeneratingBeforeSaveCallback( + RelationalMappingContext relationalMappingContext, DatabaseClient databaseClient) { + return new IdGeneratingEntityCallback(relationalMappingContext, getDialect(lookupConnectionFactory()), + databaseClient); + } + + /** + * Creates a {@link ReactiveDataAccessStrategy} using the configured + * {@link #r2dbcConverter(R2dbcMappingContext, R2dbcCustomConversions) R2dbcConverter}. + * + * @param converter the configured {@link R2dbcConverter}. + * @return must not be {@literal null}. + * @see #r2dbcConverter(R2dbcMappingContext, R2dbcCustomConversions) + * @see #getDialect(ConnectionFactory) + * @throws IllegalArgumentException if any of the {@literal mappingContext} is {@literal null}. + */ + @Bean + public ReactiveDataAccessStrategy reactiveDataAccessStrategy(R2dbcConverter converter) { + + Assert.notNull(converter, "MappingContext must not be null"); + + return new DefaultReactiveDataAccessStrategy(getDialect(lookupConnectionFactory()), converter); + } + + /** + * Creates a {@link org.springframework.data.r2dbc.convert.R2dbcConverter} using the configured + * {@link #r2dbcMappingContext(Optional, R2dbcCustomConversions, RelationalManagedTypes)} R2dbcMappingContext}. + * + * @param mappingContext the configured {@link R2dbcMappingContext}. + * @param r2dbcCustomConversions customized R2DBC conversions. + * @return must not be {@literal null}. + * @see #r2dbcMappingContext(Optional, R2dbcCustomConversions, RelationalManagedTypes) + * @see #getDialect(ConnectionFactory) + * @throws IllegalArgumentException if any of the {@literal mappingContext} is {@literal null}. + * @since 1.2 + */ + @Bean + public MappingR2dbcConverter r2dbcConverter(R2dbcMappingContext mappingContext, + R2dbcCustomConversions r2dbcCustomConversions) { + + Assert.notNull(mappingContext, "MappingContext must not be null"); + + return new MappingR2dbcConverter(mappingContext, r2dbcCustomConversions); + } + + /** + * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These + * {@link CustomConversions} will be registered with the {@link MappingR2dbcConverter} and + * {@link #r2dbcMappingContext(Optional, R2dbcCustomConversions, RelationalManagedTypes)}. Returns an empty + * {@link R2dbcCustomConversions} instance by default. Override {@link #getCustomConverters()} to supply custom + * converters. + * + * @return must not be {@literal null}. + * @see #getCustomConverters() + */ + @Bean + public R2dbcCustomConversions r2dbcCustomConversions() { + return new R2dbcCustomConversions(getStoreConversions(), getCustomConverters()); + } + + /** + * Customization hook to return custom converters. + * + * @return return custom converters. + */ + protected List getCustomConverters() { + return Collections.emptyList(); + } + + /** + * Returns the {@link R2dbcDialect}-specific {@link StoreConversions}. + * + * @return the {@link R2dbcDialect}-specific {@link StoreConversions}. + */ + protected StoreConversions getStoreConversions() { + + R2dbcDialect dialect = getDialect(lookupConnectionFactory()); + + List converters = new ArrayList<>(dialect.getConverters()); + converters.addAll(R2dbcCustomConversions.STORE_CONVERTERS); + + return StoreConversions.of(dialect.getSimpleTypeHolder(), converters); + } + + ConnectionFactory lookupConnectionFactory() { + + ApplicationContext context = this.context; + Assert.notNull(context, "ApplicationContext is not yet initialized"); + + String[] beanNamesForType = context.getBeanNamesForType(ConnectionFactory.class); + + for (String beanName : beanNamesForType) { + + if (beanName.equals(CONNECTION_FACTORY_BEAN_NAME)) { + return context.getBean(CONNECTION_FACTORY_BEAN_NAME, ConnectionFactory.class); + } + } + + return connectionFactory(); + } + + /** + * Scans the mapping base package for classes annotated with {@link Table}. By default, it scans for entities in all + * packages returned by {@link #getMappingBasePackages()}. + * + * @see #getMappingBasePackages() + * @return + * @throws ClassNotFoundException + * @since 3.0 + */ + protected Set> getInitialEntitySet() throws ClassNotFoundException { + + Set> initialEntitySet = new HashSet<>(); + + for (String basePackage : getMappingBasePackages()) { + initialEntitySet.addAll(scanForEntities(basePackage)); + } + + return initialEntitySet; + } + + /** + * Scans the given base package for entities, i.e. R2DBC-specific types annotated with {@link Table}. + * + * @param basePackage must not be {@literal null}. + * @return a set of classes identified as entities. + * @since 3.0 + */ + protected Set> scanForEntities(String basePackage) { + + if (!StringUtils.hasText(basePackage)) { + return Collections.emptySet(); + } + + return TypeScanner.typeScanner(AbstractR2dbcConfiguration.class.getClassLoader()) // + .forTypesAnnotatedWith(Table.class) // + .scanPackages(basePackage) // + .collectAsSet(); + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/EnableR2dbcAuditing.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/EnableR2dbcAuditing.java new file mode 100644 index 0000000000..38923f8c73 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/EnableR2dbcAuditing.java @@ -0,0 +1,70 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.context.annotation.Import; +import org.springframework.data.auditing.DateTimeProvider; +import org.springframework.data.domain.ReactiveAuditorAware; + +/** + * Annotation to enable auditing in R2DBC via annotation configuration. + * + * @author Mark Paluch + * @since 1.2 + */ +@Inherited +@Documented +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Import(R2dbcAuditingRegistrar.class) +public @interface EnableR2dbcAuditing { + + /** + * Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal. + * + * @return empty {@link String} by default. + */ + String auditorAwareRef() default ""; + + /** + * Configures whether the creation and modification dates are set. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean setDates() default true; + + /** + * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean modifyOnCreate() default true; + + /** + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. + * + * @return empty {@link String} by default. + */ + String dateTimeProviderRef() default ""; +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/PersistentEntitiesFactoryBean.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/PersistentEntitiesFactoryBean.java new file mode 100644 index 0000000000..3daefd4fc8 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/PersistentEntitiesFactoryBean.java @@ -0,0 +1,51 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.config; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.r2dbc.mapping.R2dbcMappingContext; + +/** + * Simple helper to be able to wire the {@link PersistentEntities} from a {@link R2dbcMappingContext} bean available in + * the application context. + * + * @author Mark Paluch + * @since 1.2 + */ +public class PersistentEntitiesFactoryBean implements FactoryBean { + + private final R2dbcMappingContext mappingContext; + + /** + * Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link R2dbcMappingContext}. + * + * @param mappingContext must not be {@literal null}. + */ + public PersistentEntitiesFactoryBean(R2dbcMappingContext mappingContext) { + this.mappingContext = mappingContext; + } + + @Override + public PersistentEntities getObject() { + return PersistentEntities.of(mappingContext); + } + + @Override + public Class getObjectType() { + return PersistentEntities.class; + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/R2dbcAuditingRegistrar.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/R2dbcAuditingRegistrar.java new file mode 100644 index 0000000000..50064dfedc --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/R2dbcAuditingRegistrar.java @@ -0,0 +1,80 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.config; + +import java.lang.annotation.Annotation; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; +import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; +import org.springframework.data.auditing.config.AuditingConfiguration; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.r2dbc.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.util.Assert; + +/** + * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableR2dbcAuditing} annotation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.2 + */ +class R2dbcAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { + + @Override + protected Class getAnnotation() { + return EnableR2dbcAuditing.class; + } + + @Override + protected String getAuditingHandlerBeanName() { + return "r2dbcAuditingHandler"; + } + + @Override + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { + builder.setFactoryMethod("from").addConstructorArgReference("r2dbcMappingContext"); + } + + @Override + protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { + + Assert.notNull(configuration, "AuditingConfiguration must not be null"); + + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class)); + } + + @Override + protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, + BeanDefinitionRegistry registry) { + + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); + + BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder + .rootBeanDefinition(ReactiveAuditingEntityCallback.class); + listenerBeanDefinitionBuilder + .addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); + + registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(), + ReactiveAuditingEntityCallback.class.getName(), registry); + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/package-info.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/package-info.java new file mode 100644 index 0000000000..e4e7fb58ab --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/config/package-info.java @@ -0,0 +1,6 @@ +/** + * Configuration classes for Spring Data R2DBC. + */ +@org.springframework.lang.NonNullApi +@org.springframework.lang.NonNullFields +package org.springframework.data.r2dbc.config; diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/EntityRowMapper.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/EntityRowMapper.java new file mode 100644 index 0000000000..729ad8988a --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/EntityRowMapper.java @@ -0,0 +1,44 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import io.r2dbc.spi.Row; +import io.r2dbc.spi.RowMetadata; + +import java.util.function.BiFunction; + +/** + * Maps a {@link io.r2dbc.spi.Row} to an entity of type {@code T}, including entities referenced. + * + * @author Mark Paluch + * @author Ryland Degnan + */ +public class EntityRowMapper implements BiFunction { + + private final Class typeRoRead; + private final R2dbcConverter converter; + + public EntityRowMapper(Class typeRoRead, R2dbcConverter converter) { + + this.typeRoRead = typeRoRead; + this.converter = converter; + } + + @Override + public T apply(Row row, RowMetadata metadata) { + return converter.read(typeRoRead, row, metadata); + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/EnumWriteSupport.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/EnumWriteSupport.java new file mode 100644 index 0000000000..723f91ad14 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/EnumWriteSupport.java @@ -0,0 +1,55 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.WritingConverter; + +/** + * Support class to natively write {@link Enum} values to the database. + *

+ * By default, Spring Data converts enum values by to {@link Enum#name() String} for maximum portability. Registering a + * {@link WritingConverter} allows retaining the enum type so that actual enum values get passed thru to the driver. + *

+ * Enum types that should be written using their actual enum value to the database should require a converter for type + * pinning. Extend this class as the {@link org.springframework.data.convert.CustomConversions} support inspects + * {@link Converter} generics to identify conversion rules. + *

+ * For example: + * + *

+ * enum Color {
+ * 	Grey, Blue
+ * }
+ *
+ * class ColorConverter extends EnumWriteSupport<Color> {
+ *
+ * }
+ * 
+ * + * @author Mark Paluch + * @param the enum type that should be written using the actual value. + * @since 1.2 + */ +@WritingConverter +public abstract class EnumWriteSupport> implements Converter { + + @Override + public E convert(E enumInstance) { + return enumInstance; + } + +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/IdGeneratingEntityCallback.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/IdGeneratingEntityCallback.java new file mode 100644 index 0000000000..d4d75a0417 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/IdGeneratingEntityCallback.java @@ -0,0 +1,75 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import reactor.core.publisher.Mono; + +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.r2dbc.dialect.R2dbcDialect; +import org.springframework.data.r2dbc.mapping.OutboundRow; +import org.springframework.data.r2dbc.mapping.event.BeforeSaveCallback; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.r2dbc.core.DatabaseClient; +import org.springframework.util.Assert; + +/** + * Callback for generating identifier values through a database sequence. + * + * @author Mikhail Polivakha + * @author Mark Paluch + * @since 3.5 + */ +public class IdGeneratingEntityCallback implements BeforeSaveCallback { + + private final MappingContext, ? extends RelationalPersistentProperty> context; + private final SequenceEntityCallbackDelegate delegate; + + public IdGeneratingEntityCallback( + MappingContext, ? extends RelationalPersistentProperty> context, + R2dbcDialect dialect, + DatabaseClient databaseClient) { + + this.context = context; + this.delegate = new SequenceEntityCallbackDelegate(dialect, databaseClient); + } + + @Override + public Mono onBeforeSave(Object entity, OutboundRow row, SqlIdentifier table) { + + Assert.notNull(entity, "Entity must not be null"); + + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(entity.getClass()); + + if (!persistentEntity.hasIdProperty()) { + return Mono.just(entity); + } + + RelationalPersistentProperty property = persistentEntity.getRequiredIdProperty(); + PersistentPropertyAccessor accessor = persistentEntity.getPropertyAccessor(entity); + + if (!persistentEntity.isNew(entity) || delegate.hasValue(property, accessor) || !property.hasSequence()) { + return Mono.just(entity); + } + + Mono idGenerator = delegate.generateSequenceValue(property, row, accessor); + + return idGenerator.defaultIfEmpty(entity); + } + +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/MappingR2dbcConverter.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/MappingR2dbcConverter.java new file mode 100644 index 0000000000..82f96e1e30 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/MappingR2dbcConverter.java @@ -0,0 +1,512 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import io.r2dbc.spi.Blob; +import io.r2dbc.spi.Clob; +import io.r2dbc.spi.ColumnMetadata; +import io.r2dbc.spi.Readable; +import io.r2dbc.spi.ReadableMetadata; +import io.r2dbc.spi.Row; +import io.r2dbc.spi.RowMetadata; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.function.BiFunction; + +import org.springframework.core.convert.ConversionService; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mapping.IdentifierAccessor; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.r2dbc.mapping.OutboundRow; +import org.springframework.data.r2dbc.support.ArrayUtils; +import org.springframework.data.relational.core.conversion.MappingRelationalConverter; +import org.springframework.data.relational.core.dialect.ArrayColumns; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.r2dbc.core.Parameter; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; + +/** + * Converter for R2DBC. + * + * @author Mark Paluch + * @author Oliver Drotbohm + */ +public class MappingR2dbcConverter extends MappingRelationalConverter implements R2dbcConverter { + + /** + * Creates a new {@link MappingR2dbcConverter} given {@link MappingContext}. + * + * @param context must not be {@literal null}. + */ + public MappingR2dbcConverter( + MappingContext, ? extends RelationalPersistentProperty> context) { + super((RelationalMappingContext) context, + new R2dbcCustomConversions(R2dbcCustomConversions.STORE_CONVERSIONS, Collections.emptyList())); + } + + /** + * Creates a new {@link MappingR2dbcConverter} given {@link MappingContext} and {@link CustomConversions}. + * + * @param context must not be {@literal null}. + */ + public MappingR2dbcConverter( + MappingContext, ? extends RelationalPersistentProperty> context, + CustomConversions conversions) { + super((RelationalMappingContext) context, conversions); + } + + // ---------------------------------- + // Entity reading + // ---------------------------------- + + @Override + public R read(Class type, Row row) { + return read(type, row, null); + } + + @Override + public R read(Class type, Row row, @Nullable RowMetadata metadata) { + + TypeInformation typeInfo = TypeInformation.of(type); + Class rawType = typeInfo.getType(); + + if (Row.class.isAssignableFrom(rawType)) { + return type.cast(row); + } + + if (getConversions().hasCustomReadTarget(Row.class, rawType) + && getConversionService().canConvert(Row.class, rawType)) { + return getConversionService().convert(row, rawType); + } + + RowDocument document = toRowDocument(type, row, metadata != null ? metadata.getColumnMetadatas() : null); + return read(type, document); + } + + @Override + public RowDocument toRowDocument(Class type, Readable row, + @Nullable Iterable metadata) { + + RowDocument document = new RowDocument(); + RelationalPersistentEntity persistentEntity = getMappingContext().getPersistentEntity(type); + + if (persistentEntity != null) { + captureRowValues(row, metadata, document, persistentEntity); + } + + if (metadata != null) { + for (ReadableMetadata m : metadata) { + + if (document.containsKey(m.getName())) { + continue; + } + + document.put(m.getName(), row.get(m.getName())); + } + } + + return document; + } + + private static void captureRowValues(Readable row, @Nullable Iterable metadata, + RowDocument document, RelationalPersistentEntity persistentEntity) { + + for (RelationalPersistentProperty property : persistentEntity) { + + String identifier = property.getColumnName().getReference(); + + if (property.isEntity() || (metadata != null && !RowMetadataUtils.containsColumn(metadata, identifier))) { + continue; + } + + Object value; + Class propertyType = property.getType(); + + if (propertyType.equals(Clob.class)) { + value = row.get(identifier, Clob.class); + } else if (propertyType.equals(Blob.class)) { + value = row.get(identifier, Blob.class); + } else { + value = row.get(identifier); + } + + document.put(identifier, value); + } + } + + // ---------------------------------- + // Entity writing + // ---------------------------------- + + @Override + public void write(Object source, OutboundRow sink) { + + Class userClass = ClassUtils.getUserClass(source); + + Optional> customTarget = getConversions().getCustomWriteTarget(userClass, OutboundRow.class); + if (customTarget.isPresent()) { + + OutboundRow result = getConversionService().convert(source, OutboundRow.class); + sink.putAll(result); + return; + } + + writeInternal(source, sink, userClass); + } + + private void writeInternal(Object source, OutboundRow sink, Class userClass) { + + RelationalPersistentEntity entity = getRequiredPersistentEntity(userClass); + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(source); + + writeProperties(sink, entity, propertyAccessor); + } + + private void writeProperties(OutboundRow sink, RelationalPersistentEntity entity, + PersistentPropertyAccessor accessor) { + + for (RelationalPersistentProperty property : entity) { + + if (!property.isWritable()) { + continue; + } + + Object value; + + if (property.isIdProperty()) { + IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(accessor.getBean()); + value = identifierAccessor.getIdentifier(); + } else { + value = accessor.getProperty(property); + } + + if (value == null) { + writeNullInternal(sink, property); + continue; + } + + if (getConversions().isSimpleType(value.getClass())) { + writeSimpleInternal(sink, value, property); + } else { + writePropertyInternal(sink, value, property); + } + } + } + + private void writeSimpleInternal(OutboundRow sink, Object value, RelationalPersistentProperty property) { + + Object result = getPotentiallyConvertedSimpleWrite(value); + + sink.put(property.getColumnName(), + Parameter.fromOrEmpty(result, getPotentiallyConvertedSimpleNullType(property.getType()))); + } + + private void writePropertyInternal(OutboundRow sink, Object value, RelationalPersistentProperty property) { + + TypeInformation valueType = TypeInformation.of(value.getClass()); + + if (valueType.isCollectionLike()) { + + if (valueType.getActualType() != null && valueType.getRequiredActualType().isCollectionLike()) { + + // pass-thru nested collections + writeSimpleInternal(sink, value, property); + return; + } + + List collectionInternal = createCollection(asCollection(value), property); + sink.put(property.getColumnName(), Parameter.from(collectionInternal)); + return; + } + + throw new InvalidDataAccessApiUsageException("Nested entities are not supported"); + } + + /** + * Writes the given {@link Collection} using the given {@link RelationalPersistentProperty} information. + * + * @param collection must not be {@literal null}. + * @param property must not be {@literal null}. + * @return + */ + protected List createCollection(Collection collection, RelationalPersistentProperty property) { + return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>()); + } + + /** + * Populates the given {@link Collection sink} with converted values from the given {@link Collection source}. + * + * @param source the collection to create a {@link Collection} for, must not be {@literal null}. + * @param type the {@link TypeInformation} to consider or {@literal null} if unknown. + * @param sink the {@link Collection} to write to. + * @return + */ + @SuppressWarnings("unchecked") + private List writeCollectionInternal(Collection source, @Nullable TypeInformation type, + Collection sink) { + + TypeInformation componentType = null; + + List collection = sink instanceof List ? (List) sink : new ArrayList<>(sink); + + if (type != null) { + componentType = type.getComponentType(); + } + + for (Object element : source) { + + Class elementType = element == null ? null : element.getClass(); + + if (elementType == null || getConversions().isSimpleType(elementType)) { + collection.add(getPotentiallyConvertedSimpleWrite(element, + componentType != null ? componentType.getType() : Object.class)); + } else if (element instanceof Collection || elementType.isArray()) { + collection.add(writeCollectionInternal(asCollection(element), componentType, new ArrayList<>())); + } else { + throw new InvalidDataAccessApiUsageException("Nested entities are not supported"); + } + } + + return collection; + } + + private void writeNullInternal(OutboundRow sink, RelationalPersistentProperty property) { + + sink.put(property.getColumnName(), Parameter.empty(getPotentiallyConvertedSimpleNullType(property.getType()))); + } + + private Class getPotentiallyConvertedSimpleNullType(Class type) { + + Optional> customTarget = getConversions().getCustomWriteTarget(type); + + if (customTarget.isPresent()) { + return customTarget.get(); + } + + if (type.isEnum()) { + return String.class; + } + + return type; + } + + /** + * Checks whether we have a custom conversion registered for the given value into an arbitrary simple type. Returns + * the converted value if so. If not, we perform special enum handling or simply return the value as is. + * + * @param value + * @return + */ + @Nullable + private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) { + return getPotentiallyConvertedSimpleWrite(value, Object.class); + } + + /** + * Checks whether we have a custom conversion registered for the given value into an arbitrary simple type. Returns + * the converted value if so. If not, we perform special enum handling or simply return the value as is. + * + * @param value + * @return + */ + @Nullable + private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, Class typeHint) { + + if (value == null) { + return null; + } + + if (Object.class != typeHint) { + + if (getConversionService().canConvert(value.getClass(), typeHint)) { + value = getConversionService().convert(value, typeHint); + } + } + + Optional> customTarget = getConversions().getCustomWriteTarget(value.getClass()); + + if (customTarget.isPresent()) { + return getConversionService().convert(value, customTarget.get()); + } + + return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; + } + + @Override + public Object getArrayValue(ArrayColumns arrayColumns, RelationalPersistentProperty property, Object value) { + + Class actualType = null; + if (value instanceof Collection) { + actualType = CollectionUtils.findCommonElementType((Collection) value); + } else if (value.getClass().isArray()) { + actualType = value.getClass().getComponentType(); + } + + if (actualType == null) { + actualType = property.getActualType(); + } + + actualType = getTargetType(actualType); + + Class targetType = arrayColumns.getArrayType(actualType); + + if (!property.isArray() || !targetType.isAssignableFrom(value.getClass())) { + + int depth = value.getClass().isArray() ? ArrayUtils.getDimensionDepth(value.getClass()) : 1; + Class targetArrayType = ArrayUtils.getArrayClass(targetType, depth); + return getConversionService().convert(value, targetArrayType); + } + + return value; + } + + @Override + public Class getTargetType(Class valueType) { + + Optional> writeTarget = getConversions().getCustomWriteTarget(valueType); + + return writeTarget.orElseGet(() -> { + return Enum.class.isAssignableFrom(valueType) ? String.class : valueType; + }); + } + + @Override + public boolean isSimpleType(Class type) { + return getConversions().isSimpleType(type); + } + + // ---------------------------------- + // Id handling + // ---------------------------------- + + /** + * Returns a {@link java.util.function.Function} that populates the id property of the {@code object} from a + * {@link Row}. + * + * @param object must not be {@literal null}. + * @return + */ + @Override + @SuppressWarnings("unchecked") + public BiFunction populateIdIfNecessary(T object) { + + Assert.notNull(object, "Entity object must not be null"); + + Class userClass = ClassUtils.getUserClass(object); + RelationalPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(userClass); + + if (!entity.hasIdProperty()) { + return (row, rowMetadata) -> object; + } + + return (row, metadata) -> { + + if (metadata == null) { + metadata = row.getMetadata(); + } + + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(object); + RelationalPersistentProperty idProperty = entity.getRequiredIdProperty(); + + boolean idPropertyUpdateNeeded = false; + + Object id = propertyAccessor.getProperty(idProperty); + if (idProperty.getType().isPrimitive()) { + idPropertyUpdateNeeded = id instanceof Number number && number.longValue() == 0; + } else { + idPropertyUpdateNeeded = id == null; + } + + if (idPropertyUpdateNeeded) { + return potentiallySetId(row, metadata, propertyAccessor, idProperty) // + ? (T) propertyAccessor.getBean() // + : object; + } + + return object; + }; + } + + private boolean potentiallySetId(Row row, RowMetadata metadata, PersistentPropertyAccessor propertyAccessor, + RelationalPersistentProperty idProperty) { + + String idColumnName = idProperty.getColumnName().getReference(); + Object generatedIdValue = extractGeneratedIdentifier(row, metadata, idColumnName); + + if (generatedIdValue == null) { + return false; + } + + ConversionService conversionService = getConversionService(); + propertyAccessor.setProperty(idProperty, conversionService.convert(generatedIdValue, idProperty.getType())); + + return true; + } + + @Nullable + private Object extractGeneratedIdentifier(Row row, RowMetadata metadata, String idColumnName) { + + if (RowMetadataUtils.containsColumn(metadata, idColumnName)) { + return row.get(idColumnName); + } + + Iterable columns = RowMetadataUtils.getColumnMetadata(metadata); + Iterator it = columns.iterator(); + + if (it.hasNext()) { + ColumnMetadata column = it.next(); + return row.get(column.getName()); + } + + return null; + } + + private RelationalPersistentEntity getRequiredPersistentEntity(Class type) { + return (RelationalPersistentEntity) getMappingContext().getRequiredPersistentEntity(type); + } + + /** + * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a + * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element + * collection for everything else. + * + * @param source + * @return + */ + private static Collection asCollection(Object source) { + + if (source instanceof Collection) { + return (Collection) source; + } + + return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + } + +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverter.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverter.java new file mode 100644 index 0000000000..2702f946eb --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverter.java @@ -0,0 +1,111 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import io.r2dbc.spi.Readable; +import io.r2dbc.spi.ReadableMetadata; +import io.r2dbc.spi.Row; +import io.r2dbc.spi.RowMetadata; + +import java.util.function.BiFunction; + +import org.springframework.core.convert.ConversionService; +import org.springframework.data.convert.EntityReader; +import org.springframework.data.convert.EntityWriter; +import org.springframework.data.r2dbc.mapping.OutboundRow; +import org.springframework.data.relational.core.conversion.RelationalConverter; +import org.springframework.data.relational.core.dialect.ArrayColumns; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.domain.RowDocument; + +/** + * Central R2DBC specific converter interface. + * + * @author Mark Paluch + * @see EntityReader + */ +public interface R2dbcConverter + extends EntityReader, EntityWriter, RelationalConverter { + + /** + * Returns the underlying {@link ConversionService} used by the converter. + * + * @return never {@literal null}. + */ + @Override + ConversionService getConversionService(); + + /** + * Convert a {@code value} into an array representation according to {@link ArrayColumns}. + * + * @param arrayColumns dialect-specific array handling configuration. + * @param property + * @param value + * @return + */ + Object getArrayValue(ArrayColumns arrayColumns, RelationalPersistentProperty property, Object value); + + /** + * Return the target type for a value considering registered converters. + * + * @param valueType must not be {@literal null}. + * @return + * @since 1.1 + */ + Class getTargetType(Class valueType); + + /** + * Return whether the {@code type} is a simple type. Simple types are database primitives or types with a custom + * mapping strategy. + * + * @param type the type to inspect, must not be {@literal null}. + * @return {@literal true} if the type is a simple one. + * @see org.springframework.data.mapping.model.SimpleTypeHolder + * @since 1.2 + */ + boolean isSimpleType(Class type); + + /** + * Returns a {@link java.util.function.Function} that populates the id property of the {@code object} from a + * {@link Row}. + * + * @param object must not be {@literal null}. + * @return + */ + BiFunction populateIdIfNecessary(T object); + + /** + * Reads the given source into the given type. + * + * @param type they type to convert the given source to. + * @param source the source to create an object of the given type from. + * @param metadata the {@link RowMetadata}. + * @return + */ + R read(Class type, Row source, RowMetadata metadata); + + /** + * Create a flat {@link RowDocument} from a single {@link Readable Row or Stored Procedure output}. + * + * @param type the underlying entity type. + * @param row the row or stored procedure output to retrieve data from. + * @param metadata readable metadata. + * @return the {@link RowDocument} containing the data. + * @since 3.2 + */ + RowDocument toRowDocument(Class type, Readable row, Iterable metadata); + +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverters.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverters.java new file mode 100644 index 0000000000..7be1e328ee --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverters.java @@ -0,0 +1,231 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.r2dbc.convert; + +import io.r2dbc.spi.Row; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.UUID; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.convert.converter.ConverterFactory; +import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; + +/** + * Wrapper class to contain useful converters for the usage with R2DBC. + * + * @author Hebert Coelho + * @author Mark Paluch + * @author Valeriy Vyrva + */ +abstract class R2dbcConverters { + + private R2dbcConverters() {} + + /** + * @return A list of the registered converters + */ + public static Collection getConvertersToRegister() { + + List converters = new ArrayList<>(); + + converters.add(RowToBooleanConverter.INSTANCE); + converters.add(RowToNumberConverterFactory.INSTANCE); + converters.add(RowToLocalDateConverter.INSTANCE); + converters.add(RowToLocalDateTimeConverter.INSTANCE); + converters.add(RowToLocalTimeConverter.INSTANCE); + converters.add(RowToOffsetDateTimeConverter.INSTANCE); + converters.add(RowToStringConverter.INSTANCE); + converters.add(RowToUuidConverter.INSTANCE); + converters.add(RowToZonedDateTimeConverter.INSTANCE); + + return converters; + } + + /** + * Simple singleton to convert {@link Row}s to their {@link Boolean} representation. + * + * @author Hebert Coelho + */ + public enum RowToBooleanConverter implements Converter { + + INSTANCE; + + @Override + public Boolean convert(Row row) { + return row.get(0, Boolean.class); + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link LocalDate} representation. + * + * @author Hebert Coelho + */ + public enum RowToLocalDateConverter implements Converter { + + INSTANCE; + + @Override + public LocalDate convert(Row row) { + return row.get(0, LocalDate.class); + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link LocalDateTime} representation. + * + * @author Hebert Coelho + */ + public enum RowToLocalDateTimeConverter implements Converter { + + INSTANCE; + + @Override + public LocalDateTime convert(Row row) { + return row.get(0, LocalDateTime.class); + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link LocalTime} representation. + * + * @author Hebert Coelho + */ + public enum RowToLocalTimeConverter implements Converter { + + INSTANCE; + + @Override + public LocalTime convert(Row row) { + return row.get(0, LocalTime.class); + } + } + + /** + * Singleton converter factory to convert the first column of a {@link Row} to a {@link Number}. + *

+ * Support Number classes including Byte, Short, Integer, Float, Double, Long, BigInteger, BigDecimal. This class + * delegates to {@link NumberUtils#convertNumberToTargetClass(Number, Class)} to perform the conversion. + * + * @see Byte + * @see Short + * @see Integer + * @see Long + * @see java.math.BigInteger + * @see Float + * @see Double + * @see java.math.BigDecimal + * @author Hebert Coelho + */ + public enum RowToNumberConverterFactory implements ConverterFactory { + + INSTANCE; + + @Override + public Converter getConverter(Class targetType) { + Assert.notNull(targetType, "Target type must not be null"); + return new RowToNumber<>(targetType); + } + + static class RowToNumber implements Converter { + + private final Class targetType; + + RowToNumber(Class targetType) { + this.targetType = targetType; + } + + @Override + public T convert(Row source) { + + Object object = source.get(0); + + return (object != null ? NumberUtils.convertNumberToTargetClass((Number) object, this.targetType) : null); + } + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link OffsetDateTime} representation. + * + * @author Hebert Coelho + */ + public enum RowToOffsetDateTimeConverter implements Converter { + + INSTANCE; + + @Override + public OffsetDateTime convert(Row row) { + return row.get(0, OffsetDateTime.class); + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link String} representation. + * + * @author Hebert Coelho + */ + public enum RowToStringConverter implements Converter { + + INSTANCE; + + @Override + public String convert(Row row) { + return row.get(0, String.class); + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link UUID} representation. + * + * @author Hebert Coelho + */ + public enum RowToUuidConverter implements Converter { + + INSTANCE; + + @Override + public UUID convert(Row row) { + return row.get(0, UUID.class); + } + } + + /** + * Simple singleton to convert {@link Row}s to their {@link ZonedDateTime} representation. + * + * @author Hebert Coelho + */ + public enum RowToZonedDateTimeConverter implements Converter { + + INSTANCE; + + @Override + public ZonedDateTime convert(Row row) { + return row.get(0, ZonedDateTime.class); + } + } + +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcCustomConversions.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcCustomConversions.java new file mode 100644 index 0000000000..0e0c1f9f78 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/R2dbcCustomConversions.java @@ -0,0 +1,95 @@ +package org.springframework.data.r2dbc.convert; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.List; + +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.r2dbc.dialect.R2dbcDialect; +import org.springframework.data.r2dbc.mapping.R2dbcSimpleTypeHolder; + +/** + * Value object to capture custom conversion. {@link R2dbcCustomConversions} also act as factory for + * {@link org.springframework.data.mapping.model.SimpleTypeHolder} + * + * @author Mark Paluch + * @author Jens Schauder + * @see CustomConversions + * @see org.springframework.data.mapping.model.SimpleTypeHolder + */ +public class R2dbcCustomConversions extends CustomConversions { + + public static final List STORE_CONVERTERS; + + public static final StoreConversions STORE_CONVERSIONS; + + static { + + List converters = new ArrayList<>(R2dbcConverters.getConvertersToRegister()); + + STORE_CONVERTERS = Collections.unmodifiableList(converters); + STORE_CONVERSIONS = StoreConversions.of(R2dbcSimpleTypeHolder.HOLDER, STORE_CONVERTERS); + } + + /** + * Create a new {@link R2dbcCustomConversions} instance registering the given converters. + * + * @param storeConversions must not be {@literal null}. + * @param converters must not be {@literal null}. + */ + public R2dbcCustomConversions(StoreConversions storeConversions, Collection converters) { + super(new R2dbcCustomConversionsConfiguration(storeConversions, + converters instanceof List ? (List) converters : new ArrayList<>(converters))); + } + + protected R2dbcCustomConversions(ConverterConfiguration converterConfiguration) { + super(converterConfiguration); + } + + /** + * Create a new {@link R2dbcCustomConversions} from the given {@link R2dbcDialect} and {@code converters}. + * + * @param dialect must not be {@literal null}. + * @param converters must not be {@literal null}. + * @return the custom conversions object. + * @since 1.2 + */ + public static R2dbcCustomConversions of(R2dbcDialect dialect, Object... converters) { + return of(dialect, Arrays.asList(converters)); + } + + /** + * Create a new {@link R2dbcCustomConversions} from the given {@link R2dbcDialect} and {@code converters}. + * + * @param dialect must not be {@literal null}. + * @param converters must not be {@literal null}. + * @return the custom conversions object. + * @since 1.2 + */ + public static R2dbcCustomConversions of(R2dbcDialect dialect, Collection converters) { + + List storeConverters = new ArrayList<>(dialect.getConverters()); + storeConverters.addAll(R2dbcCustomConversions.STORE_CONVERTERS); + + return new R2dbcCustomConversions(StoreConversions.of(dialect.getSimpleTypeHolder(), storeConverters), converters); + } + + static class R2dbcCustomConversionsConfiguration extends ConverterConfiguration { + + public R2dbcCustomConversionsConfiguration(StoreConversions storeConversions, List userConverters) { + super(storeConversions, userConverters, convertiblePair -> { + + // Avoid JSR-310 temporal types conversion into java.util.Date + if (convertiblePair.getSourceType().getName().startsWith("java.time.") + && convertiblePair.getTargetType().equals(Date.class)) { + return false; + } + + return true; + }); + } + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/RowMetadataUtils.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/RowMetadataUtils.java new file mode 100644 index 0000000000..b57e5398ad --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/RowMetadataUtils.java @@ -0,0 +1,70 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import io.r2dbc.spi.ColumnMetadata; +import io.r2dbc.spi.ReadableMetadata; +import io.r2dbc.spi.RowMetadata; + +/** + * Utility methods for {@link io.r2dbc.spi.RowMetadata} + * + * @author Mark Paluch + * @since 1.3.7 + */ +class RowMetadataUtils { + + /** + * Check whether the column {@code name} is contained in {@link RowMetadata}. The check happens case-insensitive. + * + * @param metadata the metadata object to inspect. + * @param name column name. + * @return {@code true} if the metadata contains the column {@code name}. + */ + public static boolean containsColumn(RowMetadata metadata, String name) { + return containsColumn(getColumnMetadata(metadata), name); + } + + /** + * Check whether the column {@code name} is contained in {@link RowMetadata}. The check happens case-insensitive. + * + * @param columns the metadata to inspect. + * @param name column name. + * @return {@code true} if the metadata contains the column {@code name}. + */ + public static boolean containsColumn(Iterable columns, String name) { + + for (ReadableMetadata columnMetadata : columns) { + if (name.equalsIgnoreCase(columnMetadata.getName())) { + return true; + } + } + + return false; + } + + /** + * Return the {@link Iterable} of {@link ColumnMetadata} from {@link RowMetadata}. + * + * @param metadata the metadata object to inspect. + * @return + * @since 1.4.1 + */ + @SuppressWarnings("unchecked") + public static Iterable getColumnMetadata(RowMetadata metadata) { + return metadata.getColumnMetadatas(); + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/RowPropertyAccessor.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/RowPropertyAccessor.java new file mode 100644 index 0000000000..520a8d9927 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/RowPropertyAccessor.java @@ -0,0 +1,75 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import io.r2dbc.spi.Row; +import io.r2dbc.spi.RowMetadata; + +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.PropertyAccessor; +import org.springframework.expression.TypedValue; +import org.springframework.lang.Nullable; + +/** + * {@link PropertyAccessor} to read values from a {@link Row}. + * + * @author Mark Paluch + * @since 1.2 + */ +class RowPropertyAccessor implements PropertyAccessor { + + private final @Nullable RowMetadata rowMetadata; + + RowPropertyAccessor(@Nullable RowMetadata rowMetadata) { + this.rowMetadata = rowMetadata; + } + + @Override + public Class[] getSpecificTargetClasses() { + return new Class[] { Row.class }; + } + + @Override + public boolean canRead(EvaluationContext context, @Nullable Object target, String name) { + return rowMetadata != null && target != null && RowMetadataUtils.containsColumn(rowMetadata, name); + } + + @Override + public TypedValue read(EvaluationContext context, @Nullable Object target, String name) { + + if (target == null) { + return TypedValue.NULL; + } + + Object value = ((Row) target).get(name); + + if (value == null) { + return TypedValue.NULL; + } + + return new TypedValue(value); + } + + @Override + public boolean canWrite(EvaluationContext context, @Nullable Object target, String name) { + return false; + } + + @Override + public void write(EvaluationContext context, @Nullable Object target, String name, @Nullable Object newValue) { + throw new UnsupportedOperationException(); + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/SequenceEntityCallbackDelegate.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/SequenceEntityCallbackDelegate.java new file mode 100644 index 0000000000..5c3f452d87 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/SequenceEntityCallbackDelegate.java @@ -0,0 +1,108 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.convert; + +import reactor.core.publisher.Mono; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.r2dbc.mapping.OutboundRow; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.r2dbc.core.DatabaseClient; +import org.springframework.r2dbc.core.Parameter; +import org.springframework.util.ClassUtils; +import org.springframework.util.NumberUtils; + +/** + * Support class for generating identifier values through a database sequence. + * + * @author Mikhail Polivakha + * @author Mark Paluch + * @since 3.5 + * @see org.springframework.data.relational.core.mapping.Sequence + */ +class SequenceEntityCallbackDelegate { + + private static final Log LOG = LogFactory.getLog(SequenceEntityCallbackDelegate.class); + + private final Dialect dialect; + private final DatabaseClient databaseClient; + + public SequenceEntityCallbackDelegate(Dialect dialect, DatabaseClient databaseClient) { + this.dialect = dialect; + this.databaseClient = databaseClient; + } + + @SuppressWarnings("unchecked") + protected Mono generateSequenceValue(RelationalPersistentProperty property, OutboundRow row, + PersistentPropertyAccessor accessor) { + + Class targetType = ClassUtils.resolvePrimitiveIfNecessary(property.getType()); + + return getSequenceValue(property).map(it -> { + + Object sequenceValue = it; + if (sequenceValue instanceof Number && Number.class.isAssignableFrom(targetType)) { + sequenceValue = NumberUtils.convertNumberToTargetClass((Number) sequenceValue, + (Class) targetType); + } + + row.append(property.getColumnName(), Parameter.from(sequenceValue)); + accessor.setProperty(property, sequenceValue); + + return accessor.getBean(); + }); + } + + protected boolean hasValue(PersistentProperty property, PersistentPropertyAccessor propertyAccessor) { + + Object identifier = propertyAccessor.getProperty(property); + + if (property.getType().isPrimitive()) { + + Object primitiveDefault = ReflectionUtils.getPrimitiveDefault(property.getType()); + return !primitiveDefault.equals(identifier); + } + + return identifier != null; + } + + private Mono getSequenceValue(RelationalPersistentProperty property) { + + SqlIdentifier sequence = property.getSequence(); + + if (sequence != null && !dialect.getIdGeneration().sequencesSupported()) { + LOG.warn(""" + Entity type '%s' is marked for sequence usage but configured dialect '%s' + does not support sequences. Falling back to identity columns. + """.formatted(property.getOwner().getType(), ClassUtils.getQualifiedName(dialect.getClass()))); + return Mono.empty(); + } + + String sql = dialect.getIdGeneration().createSequenceQuery(sequence); + return databaseClient // + .sql(sql) // + .map((r, rowMetadata) -> r.get(0)) // + .one(); + } + +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/package-info.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/package-info.java new file mode 100644 index 0000000000..cb313f6a8a --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/convert/package-info.java @@ -0,0 +1,6 @@ +/** + * R2DBC-specific conversion and converter implementations. + */ +@org.springframework.lang.NonNullApi +@org.springframework.lang.NonNullFields +package org.springframework.data.r2dbc.convert; diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/BindParameterSource.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/BindParameterSource.java new file mode 100644 index 0000000000..3049eeb81f --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/BindParameterSource.java @@ -0,0 +1,71 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.core; + +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; + +/** + * Interface that defines common functionality for objects that can offer parameter values for named bind parameters, + * serving as argument for {@link NamedParameterExpander} operations. + *

+ * This interface allows for the specification of the type in addition to parameter values. All parameter values and + * types are identified by specifying the name of the parameter. + *

+ * Intended to wrap various implementations like a {@link java.util.Map} with a consistent interface. + * + * @author Mark Paluch + * @see MapBindParameterSource + * @deprecated since 1.2, without replacement. + */ +@Deprecated +interface BindParameterSource { + + /** + * Determine whether there is a value for the specified named parameter. + * + * @param paramName the name of the parameter. + * @return {@literal true} if there is a value defined; {@literal false} otherwise. + */ + boolean hasValue(String paramName); + + /** + * Return the parameter value for the requested named parameter. + * + * @param paramName the name of the parameter. + * @return the value of the specified parameter, can be {@literal null}. + * @throws IllegalArgumentException if there is no value for the requested parameter. + */ + @Nullable + Object getValue(String paramName) throws IllegalArgumentException; + + /** + * Determine the type for the specified named parameter. + * + * @param paramName the name of the parameter. + * @return the type of the specified parameter, or {@link Object#getClass()} if not known. + */ + default Class getType(String paramName) { + return Object.class; + } + + /** + * Returns parameter names of the underlying parameter source. + * + * @return parameter names of the underlying parameter source. + */ + Streamable getParameterNames(); +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/DefaultReactiveDataAccessStrategy.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/DefaultReactiveDataAccessStrategy.java new file mode 100644 index 0000000000..d655464e82 --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/DefaultReactiveDataAccessStrategy.java @@ -0,0 +1,330 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.core; + +import io.r2dbc.spi.Readable; +import io.r2dbc.spi.ReadableMetadata; +import io.r2dbc.spi.Row; +import io.r2dbc.spi.RowMetadata; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; + +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.InvalidDataAccessResourceUsageException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.r2dbc.convert.EntityRowMapper; +import org.springframework.data.r2dbc.convert.MappingR2dbcConverter; +import org.springframework.data.r2dbc.convert.R2dbcConverter; +import org.springframework.data.r2dbc.convert.R2dbcCustomConversions; +import org.springframework.data.r2dbc.dialect.R2dbcDialect; +import org.springframework.data.r2dbc.mapping.OutboundRow; +import org.springframework.data.r2dbc.mapping.R2dbcMappingContext; +import org.springframework.data.r2dbc.query.UpdateMapper; +import org.springframework.data.r2dbc.support.ArrayUtils; +import org.springframework.data.relational.core.dialect.ArrayColumns; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.lang.Nullable; +import org.springframework.r2dbc.core.Parameter; +import org.springframework.r2dbc.core.PreparedOperation; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; + +/** + * Default {@link ReactiveDataAccessStrategy} implementation. + * + * @author Mark Paluch + * @author Louis Morgan + * @author Jens Schauder + */ +public class DefaultReactiveDataAccessStrategy implements ReactiveDataAccessStrategy { + + private final R2dbcDialect dialect; + private final R2dbcConverter converter; + private final UpdateMapper updateMapper; + private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; + private final StatementMapper statementMapper; + private final NamedParameterExpander expander = new NamedParameterExpander(); + + /** + * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link R2dbcDialect} and optional + * {@link org.springframework.core.convert.converter.Converter}s. + * + * @param dialect the {@link R2dbcDialect} to use. + */ + public DefaultReactiveDataAccessStrategy(R2dbcDialect dialect) { + this(dialect, Collections.emptyList()); + } + + /** + * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link R2dbcDialect} and optional + * {@link org.springframework.core.convert.converter.Converter}s. + * + * @param dialect the {@link R2dbcDialect} to use. + * @param converters custom converters to register, must not be {@literal null}. + * @see R2dbcCustomConversions + * @see org.springframework.core.convert.converter.Converter + */ + public DefaultReactiveDataAccessStrategy(R2dbcDialect dialect, Collection converters) { + this(dialect, createConverter(dialect, converters)); + } + + /** + * Creates a new {@link R2dbcConverter} given {@link R2dbcDialect} and custom {@code converters}. + * + * @param dialect must not be {@literal null}. + * @param converters must not be {@literal null}. + * @return the {@link R2dbcConverter}. + */ + public static R2dbcConverter createConverter(R2dbcDialect dialect, Collection converters) { + + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(converters, "Converters must not be null"); + + R2dbcCustomConversions customConversions = R2dbcCustomConversions.of(dialect, converters); + + R2dbcMappingContext context = new R2dbcMappingContext(); + context.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); + + return new MappingR2dbcConverter(context, customConversions); + } + + /** + * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link R2dbcDialect} and {@link R2dbcConverter}. + * + * @param dialect the {@link R2dbcDialect} to use. + * @param converter must not be {@literal null}. + */ + @SuppressWarnings("unchecked") + public DefaultReactiveDataAccessStrategy(R2dbcDialect dialect, R2dbcConverter converter) { + + Assert.notNull(dialect, "Dialect must not be null"); + Assert.notNull(converter, "RelationalConverter must not be null"); + + this.converter = converter; + this.updateMapper = new UpdateMapper(dialect, converter); + this.mappingContext = (MappingContext, ? extends RelationalPersistentProperty>) this.converter + .getMappingContext(); + this.dialect = dialect; + + RenderContextFactory factory = new RenderContextFactory(dialect); + this.statementMapper = new DefaultStatementMapper(dialect, factory.createRenderContext(), this.updateMapper, + this.mappingContext); + } + + @Override + public List getAllColumns(Class entityType) { + + RelationalPersistentEntity persistentEntity = getPersistentEntity(entityType); + + if (persistentEntity == null) { + return Collections.singletonList(SqlIdentifier.unquoted("*")); + } + + List columnNames = new ArrayList<>(); + for (RelationalPersistentProperty property : persistentEntity) { + columnNames.add(property.getColumnName()); + } + + return columnNames; + } + + @Override + public List getIdentifierColumns(Class entityType) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(entityType); + + List columnNames = new ArrayList<>(); + for (RelationalPersistentProperty property : persistentEntity) { + + if (property.isIdProperty()) { + columnNames.add(property.getColumnName()); + } + } + + return columnNames; + } + + public OutboundRow getOutboundRow(Object object) { + + Assert.notNull(object, "Entity object must not be null"); + + OutboundRow row = new OutboundRow(); + + this.converter.write(object, row); + + RelationalPersistentEntity entity = getRequiredPersistentEntity(ClassUtils.getUserClass(object)); + + for (RelationalPersistentProperty property : entity) { + + Parameter value = row.get(property.getColumnName()); + if (value != null && shouldConvertArrayValue(property, value)) { + + Parameter writeValue = getArrayValue(value, property); + row.put(property.getColumnName(), writeValue); + } + } + + return row; + } + + private boolean shouldConvertArrayValue(RelationalPersistentProperty property, Parameter value) { + + if (!property.isCollectionLike()) { + return false; + } + + if (value.hasValue() && (value.getValue() instanceof Collection || value.getValue().getClass().isArray())) { + return true; + } + + if (Collection.class.isAssignableFrom(value.getType()) || value.getType().isArray()) { + return true; + } + + return false; + } + + private Parameter getArrayValue(Parameter value, RelationalPersistentProperty property) { + + if (value.getType().equals(byte[].class)) { + return value; + } + + ArrayColumns arrayColumns = this.dialect.getArraySupport(); + + if (!arrayColumns.isSupported()) { + throw new InvalidDataAccessResourceUsageException( + "Dialect " + this.dialect.getClass().getName() + " does not support array columns"); + } + + Class actualType = null; + if (value.getValue() instanceof Collection) { + actualType = CollectionUtils.findCommonElementType((Collection) value.getValue()); + } else if (!value.isEmpty() && value.getValue().getClass().isArray()) { + actualType = value.getValue().getClass().getComponentType(); + } + + if (actualType == null) { + actualType = property.getActualType(); + } + + actualType = converter.getTargetType(actualType); + + if (value.isEmpty()) { + + Class targetType = arrayColumns.getArrayType(actualType); + int depth = actualType.isArray() ? ArrayUtils.getDimensionDepth(actualType) : 1; + Class targetArrayType = ArrayUtils.getArrayClass(targetType, depth); + return Parameter.empty(targetArrayType); + } + + return Parameter.fromOrEmpty(this.converter.getArrayValue(arrayColumns, property, value.getValue()), actualType); + } + + @Override + public Parameter getBindValue(Parameter value) { + return this.updateMapper.getBindValue(value); + } + + @Override + public BiFunction getRowMapper(Class typeToRead) { + return new EntityRowMapper<>(typeToRead, this.converter); + } + + @Override + public RowDocument toRowDocument(Class type, Readable row, Iterable metadata) { + return this.converter.toRowDocument(type, row, metadata); + } + + @Override + public PreparedOperation processNamedParameters(String query, NamedParameterProvider parameterProvider) { + + List parameterNames = this.expander.getParameterNames(query); + + Map namedBindings = new LinkedHashMap<>(parameterNames.size()); + for (String parameterName : parameterNames) { + + Parameter value = parameterProvider.getParameter(parameterNames.indexOf(parameterName), parameterName); + + if (value == null) { + throw new InvalidDataAccessApiUsageException( + String.format("No parameter specified for [%s] in query [%s]", parameterName, query)); + } + + namedBindings.put(parameterName, value); + } + + return this.expander.expand(query, this.dialect.getBindMarkersFactory(), new MapBindParameterSource(namedBindings)); + } + + @Override + public SqlIdentifier getTableName(Class type) { + return getRequiredPersistentEntity(type).getQualifiedTableName(); + } + + @Override + public String toSql(SqlIdentifier identifier) { + return this.updateMapper.toSql(identifier); + } + + @Override + public StatementMapper getStatementMapper() { + return this.statementMapper; + } + + @Override + public R2dbcConverter getConverter() { + return this.converter; + } + + public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() { + return this.mappingContext; + } + + @Override + public String renderForGeneratedValues(SqlIdentifier identifier) { + return dialect.renderForGeneratedValues(identifier); + } + + /** + * @since 3.4 + */ + @Override + public Dialect getDialect() { + return dialect; + } + + private RelationalPersistentEntity getRequiredPersistentEntity(Class typeToRead) { + return this.mappingContext.getRequiredPersistentEntity(typeToRead); + } + + @Nullable + private RelationalPersistentEntity getPersistentEntity(Class typeToRead) { + return this.mappingContext.getPersistentEntity(typeToRead); + } +} diff --git a/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/DefaultStatementMapper.java b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/DefaultStatementMapper.java new file mode 100644 index 0000000000..a7fcf2a13e --- /dev/null +++ b/spring-data-r2dbc/src/main/java/org/springframework/data/r2dbc/core/DefaultStatementMapper.java @@ -0,0 +1,361 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.core; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.r2dbc.convert.R2dbcConverter; +import org.springframework.data.r2dbc.dialect.R2dbcDialect; +import org.springframework.data.r2dbc.query.BoundAssignments; +import org.springframework.data.r2dbc.query.BoundCondition; +import org.springframework.data.r2dbc.query.UpdateMapper; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.CriteriaDefinition; +import org.springframework.data.relational.core.sql.*; +import org.springframework.data.relational.core.sql.InsertBuilder.InsertValuesWithBuild; +import org.springframework.data.relational.core.sql.render.RenderContext; +import org.springframework.data.relational.core.sql.render.SqlRenderer; +import org.springframework.lang.Nullable; +import org.springframework.r2dbc.core.PreparedOperation; +import org.springframework.r2dbc.core.binding.BindMarkers; +import org.springframework.r2dbc.core.binding.BindTarget; +import org.springframework.r2dbc.core.binding.Bindings; +import org.springframework.util.Assert; + +/** + * Default {@link StatementMapper} implementation. + * + * @author Mark Paluch + * @author Roman Chigvintsev + * @author Mingyuan Wu + * @author Diego Krupitza + */ +class DefaultStatementMapper implements StatementMapper { + + private final R2dbcDialect dialect; + private final RenderContext renderContext; + private final UpdateMapper updateMapper; + private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; + + DefaultStatementMapper(R2dbcDialect dialect, R2dbcConverter converter) { + + RenderContextFactory factory = new RenderContextFactory(dialect); + + this.dialect = dialect; + this.renderContext = factory.createRenderContext(); + this.updateMapper = new UpdateMapper(dialect, converter); + this.mappingContext = converter.getMappingContext(); + } + + DefaultStatementMapper(R2dbcDialect dialect, RenderContext renderContext, UpdateMapper updateMapper, + MappingContext, ? extends RelationalPersistentProperty> mappingContext) { + this.dialect = dialect; + this.renderContext = renderContext; + this.updateMapper = updateMapper; + this.mappingContext = mappingContext; + } + + @Override + @SuppressWarnings("unchecked") + public TypedStatementMapper forType(Class type) { + + Assert.notNull(type, "Type must not be null"); + + return new DefaultTypedStatementMapper<>( + (RelationalPersistentEntity) this.mappingContext.getRequiredPersistentEntity(type)); + } + + @Override + public PreparedOperation getMappedObject(SelectSpec selectSpec) { + return getMappedObject(selectSpec, null); + } + + private PreparedOperation

requiredByWhere = new HashSet<>(); + Set
from = new HashSet<>(); + @Nullable Visitable parent; + + @Override + public void enter(Visitable segment) { + + if (segment instanceof Table && parent instanceof From) { + from.add((Table) segment); + } + + if (segment instanceof Where) { + segment.visit(new SubselectFilteringWhereVisitor()); + } + + if (segment instanceof Join || segment instanceof OrderByField || segment instanceof From + || segment instanceof Select || segment instanceof Where || segment instanceof SimpleFunction) { + parent = segment; + } + } + + @Override + public void leave(Visitable segment) {} + + /** + * {@link Visitor} that skips sub-{@link Select} and collects columns within a {@link Where} clause. + */ + class SubselectFilteringWhereVisitor implements Visitor { + + private @Nullable Select selectFilter; + + @Override + public void enter(Visitable segment) { + + if (selectFilter != null) { + return; + } + + if (segment instanceof Select) { + this.selectFilter = (Select) segment; + return; + } + + if (segment instanceof Table) { + requiredByWhere.add((Table) segment); + } + } + + @Override + public void leave(Visitable segment) { + + if (this.selectFilter == segment) { + this.selectFilter = null; + } + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AbstractSegment.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AbstractSegment.java new file mode 100644 index 0000000000..19846935c0 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AbstractSegment.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Abstract implementation to support {@link Segment} implementations. + * + * @author Mark Paluch + * @since 1.1 + */ +abstract class AbstractSegment implements Segment { + + private final Segment[] children; + + protected AbstractSegment(Segment... children) { + this.children = children; + } + + @Override + public void visit(Visitor visitor) { + + Assert.notNull(visitor, "Visitor must not be null"); + + visitor.enter(this); + for (Segment child : children) { + child.visit(visitor); + } + visitor.leave(this); + } + + @Override + public int hashCode() { + return toString().hashCode(); + } + + @Override + public boolean equals(@Nullable Object obj) { + return obj instanceof Segment && toString().equals(obj.toString()); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(" + StringUtils.arrayToDelimitedString(children, ", ") + ")"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Aliased.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Aliased.java new file mode 100644 index 0000000000..31e7750431 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Aliased.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Aliased element exposing an {@link #getAlias() alias}. + * + * @author Mark Paluch + * @since 1.1 + */ +public interface Aliased { + + /** + * @return the alias name. + */ + SqlIdentifier getAlias(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AliasedExpression.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AliasedExpression.java new file mode 100644 index 0000000000..713ec0d5ec --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AliasedExpression.java @@ -0,0 +1,54 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * An expression with an alias. + * + * @author Jens Schauder + * @since 1.1 + */ +public class AliasedExpression extends AbstractSegment implements Aliased, Expression { + + private final Expression expression; + private final SqlIdentifier alias; + + public AliasedExpression(Expression expression, String alias) { + + super(expression); + + this.expression = expression; + this.alias = SqlIdentifier.unquoted(alias); + } + + public AliasedExpression(Expression expression, SqlIdentifier alias) { + + super(expression); + + this.expression = expression; + this.alias = alias; + } + + @Override + public SqlIdentifier getAlias() { + return alias; + } + + @Override + public String toString() { + return expression.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AnalyticFunction.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AnalyticFunction.java new file mode 100644 index 0000000000..b3af3e1e86 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AnalyticFunction.java @@ -0,0 +1,94 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Arrays; + +/** + * Represents an analytic function, also known as windowing function + * + * @author Jens Schauder + * @since 2.7 + */ +public class AnalyticFunction extends AbstractSegment implements Expression { + + private final SimpleFunction function; + private final Partition partition; + private final OrderBy orderBy; + + public static AnalyticFunction create(String function, Expression... arguments) { + + return new AnalyticFunction(SimpleFunction.create(function, Arrays.asList(arguments)), new Partition(), + new OrderBy()); + } + + private AnalyticFunction(SimpleFunction function, Partition partition, OrderBy orderBy) { + + super(function, partition, orderBy); + + this.function = function; + this.partition = partition; + this.orderBy = orderBy; + } + + public AnalyticFunction partitionBy(Expression... partitionBy) { + + return new AnalyticFunction(function, new Partition(partitionBy), orderBy); + } + + public AnalyticFunction orderBy(OrderByField... orderBy) { + return new AnalyticFunction(function, partition, new OrderBy(orderBy)); + } + + public AnalyticFunction orderBy(Expression... orderByExpression) { + + final OrderByField[] orderByFields = Arrays.stream(orderByExpression) // + .map(OrderByField::from) // + .toArray(OrderByField[]::new); + + return new AnalyticFunction(function, partition, new OrderBy(orderByFields)); + } + + public AliasedAnalyticFunction as(String alias) { + return new AliasedAnalyticFunction(this, SqlIdentifier.unquoted(alias)); + } + + public AliasedAnalyticFunction as(SqlIdentifier alias) { + return new AliasedAnalyticFunction(this, alias); + } + + public static class Partition extends SegmentList { + Partition(Expression... expressions) { + super(expressions); + } + } + + private static class AliasedAnalyticFunction extends AnalyticFunction implements Aliased { + + private final SqlIdentifier alias; + + AliasedAnalyticFunction(AnalyticFunction analyticFunction, SqlIdentifier alias) { + + super(analyticFunction.function, analyticFunction.partition, analyticFunction.orderBy); + this.alias = alias; + } + + @Override + public SqlIdentifier getAlias() { + return alias; + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AndCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AndCondition.java new file mode 100644 index 0000000000..7d43577bd4 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AndCondition.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * {@link Condition} representing an {@code AND} relation between two {@link Condition}s. + * + * @author Mark Paluch + * @since 1.1 + * @see Condition#and(Condition) + */ +public class AndCondition extends MultipleCondition { + + AndCondition(Condition... conditions) { + super(" AND ", conditions); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AssignValue.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AssignValue.java new file mode 100644 index 0000000000..8816c8660d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AssignValue.java @@ -0,0 +1,73 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * Assign a {@link Expression} to a {@link Column}. + *

+ * Results in a rendered assignment: {@code = } (e.g. {@code col = 'foo'}. + *

+ * @author Mark Paluch + * @since 1.1 + */ +public class AssignValue extends AbstractSegment implements Assignment { + + private final Column column; + private final Expression value; + + private AssignValue(Column column, Expression value) { + super(column, value); + this.column = column; + this.value = value; + } + + /** + * Creates a {@link AssignValue value} assignment to a {@link Column} given an {@link Expression}. + * + * @param target target column, must not be {@literal null}. + * @param value assignment value, must not be {@literal null}. + * @return the {@link AssignValue}. + */ + public static AssignValue create(Column target, Expression value) { + + Assert.notNull(target, "Target column must not be null"); + Assert.notNull(value, "Value must not be null"); + + return new AssignValue(target, value); + } + + /** + * @return the target {@link Column}. + */ + public Column getColumn() { + return column; + } + + /** + * @return the value to assign. + */ + public Expression getValue() { + return value; + } + + @Override + public String toString() { + + return this.column + " = " + this.value; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Assignment.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Assignment.java new file mode 100644 index 0000000000..33d66dda73 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Assignment.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Update assignment to a {@link Column}. + * + * @author Mark Paluch + */ +public interface Assignment extends Segment {} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Assignments.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Assignments.java new file mode 100644 index 0000000000..29f5b94ef8 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Assignments.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Factory for common {@link Assignment}s. + * + * @author Mark Paluch + * @since 1.1 + * @see SQL + * @see Expressions + * @see Functions + */ +public abstract class Assignments { + + /** + * Creates a {@link AssignValue value} assignment to a {@link Column} given an {@link Expression}. + * + * @param target target column, must not be {@literal null}. + * @param value assignment value, must not be {@literal null}. + * @return the {@link AssignValue}. + */ + public static AssignValue value(Column target, Expression value) { + return AssignValue.create(target, value); + } + + // Utility constructor. + private Assignments() {} +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AsteriskFromTable.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AsteriskFromTable.java new file mode 100644 index 0000000000..7636c5ee77 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/AsteriskFromTable.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * {@link Segment} to select all columns from a {@link Table}. + *

+ * Renders to: {@code + * +

+ * .*} as in {@code SELECT + * +
+ * .* FROM …}. + *

+ * + * @author Mark Paluch + * @since 1.1 + * @see Table#asterisk() + */ +public class AsteriskFromTable extends AbstractSegment implements Expression { + + private final TableLike table; + + AsteriskFromTable(TableLike table) { + super(table); + this.table = table; + } + + public static AsteriskFromTable create(Table table) { + return new AsteriskFromTable(table); + } + + /** + * @return the associated {@link Table}. + */ + public TableLike getTable() { + return table; + } + + @Override + public String toString() { + + if (table instanceof Aliased) { + return ((Aliased) table).getAlias() + ".*"; + } + + return table + ".*"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Between.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Between.java new file mode 100644 index 0000000000..051ce7be0a --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Between.java @@ -0,0 +1,98 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * BETWEEN {@link Condition} comparing between {@link Expression}s. + *

+ * Results in a rendered condition: {@code BETWEEN AND }. + *

+ * + * @author Mark Paluch + * @author Meng Zuozhu + * @since 2.2 + */ +public class Between extends AbstractSegment implements Condition { + + private final Expression column; + private final Expression begin; + private final Expression end; + private final boolean negated; + + private Between(Expression column, Expression begin, Expression end, boolean negated) { + + super(column, begin, end); + + this.column = column; + this.begin = begin; + this.end = end; + this.negated = negated; + } + + /** + * Creates a new {@link Between} {@link Condition} given two {@link Expression}s. + * + * @param columnOrExpression left side of the comparison. + * @param begin begin value of the comparison. + * @param end end value of the comparison. + * @return the {@link Between} condition. + */ + public static Between create(Expression columnOrExpression, Expression begin, Expression end) { + + Assert.notNull(columnOrExpression, "Column or expression must not be null"); + Assert.notNull(begin, "Begin value must not be null"); + Assert.notNull(end, "end value must not be null"); + + return new Between(columnOrExpression, begin, end, false); + } + + /** + * @return the column {@link Expression}. + */ + public Expression getColumn() { + return column; + } + + /** + * @return the begin {@link Expression}. + */ + public Expression getBegin() { + return begin; + } + + /** + * @return the end {@link Expression}. + */ + public Expression getEnd() { + return end; + } + + public boolean isNegated() { + return negated; + } + + @Override + public Between not() { + return new Between(this.column, this.begin, this.end, !negated); + } + + @Override + public String toString() { + return column + (negated ? " NOT" : "") + " BETWEEN " + begin + " AND " + end; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/BindMarker.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/BindMarker.java new file mode 100644 index 0000000000..185453e4b3 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/BindMarker.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Bind marker/parameter placeholder used to construct prepared statements with parameter substitution. + * + * @author Mark Paluch + * @since 1.1 + */ +public class BindMarker extends AbstractSegment implements Expression { + + @Override + public String toString() { + return "?"; + } + + static class NamedBindMarker extends BindMarker implements Named { + + private final String name; + + NamedBindMarker(String name) { + this.name = name; + } + + @Override + public SqlIdentifier getName() { + return SqlIdentifier.unquoted(name); + } + + @Override + public String toString() { + return "?[" + name + "]"; + } + } +} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/Unset.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/BooleanLiteral.java similarity index 51% rename from src/main/java/org/springframework/data/jdbc/mapping/event/Unset.java rename to spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/BooleanLiteral.java index d4287fba7f..626f1c5213 100644 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/Unset.java +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/BooleanLiteral.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,27 +13,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; +package org.springframework.data.relational.core.sql; /** - * An unset identifier. Always returns {@link Optional#empty()} as value. + * Represents a {@link Boolean} literal. * - * @author Jens Schaude - * @author Oliver Gierke + * @author Mark Paluch * @since 2.0 */ -enum Unset implements Identifier { +public class BooleanLiteral extends Literal { + + BooleanLiteral(boolean content) { + super(content); + } - UNSET; + @Override + public Boolean getContent() { + return super.getContent(); + } - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.event.Identifier#getOptionalValue() - */ @Override - public Optional getOptionalValue() { - return Optional.empty(); + public String toString() { + return getContent() ? "TRUE" : "FALSE"; } } diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/CaseExpression.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/CaseExpression.java new file mode 100644 index 0000000000..fb7b13374e --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/CaseExpression.java @@ -0,0 +1,86 @@ +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.Nullable; + +import java.util.ArrayList; +import java.util.List; + +import static java.util.stream.Collectors.*; + +/** + * Case with one or more conditions expression. + *

+ * Results in a rendered condition: + *

+ *   CASE
+ *     WHEN condition1 THEN result1
+ *     WHEN condition2 THEN result2
+ *     ELSE result
+ *   END
+ * 
+ * + * @author Sven Rienstra + * @since 3.4 + */ +public class CaseExpression extends AbstractSegment implements Expression { + + private final List whenList; + @Nullable + private final Expression elseExpression; + + private static Segment[] children(List whenList, @Nullable Expression elseExpression) { + + List segments = new ArrayList<>(whenList); + + if (elseExpression != null) { + segments.add(elseExpression); + } + + return segments.toArray(new Segment[0]); + } + + private CaseExpression(List whenList, @Nullable Expression elseExpression) { + + super(children(whenList, elseExpression)); + + this.whenList = whenList; + this.elseExpression = elseExpression; + } + + /** + * Create CASE {@link Expression} with initial {@link When} condition. + * + * @param condition initial {@link When} condition + * @return the {@link CaseExpression} + */ + public static CaseExpression create(When condition) { + return new CaseExpression(List.of(condition), null); + } + + /** + * Add additional {@link When} condition + * + * @param condition the {@link When} condition + * @return the {@link CaseExpression} + */ + public CaseExpression when(When condition) { + List conditions = new ArrayList<>(this.whenList); + conditions.add(condition); + return new CaseExpression(conditions, elseExpression); + } + + /** + * Add ELSE clause + * + * @param elseExpression the {@link Expression} else value + * @return the {@link CaseExpression} + */ + public CaseExpression elseExpression(Expression elseExpression) { + return new CaseExpression(whenList, elseExpression); + } + + @Override + public String toString() { + return "CASE " + whenList.stream().map(When::toString).collect(joining(" ")) + (elseExpression != null ? " ELSE " + elseExpression : "") + " END"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Cast.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Cast.java new file mode 100644 index 0000000000..b34a0a5907 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Cast.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * Represents a {@code CAST} expression like {@code CAST(something AS JSON}. + * + * @author Jens Schauder + * @since 2.3 + */ +public class Cast extends AbstractSegment implements Expression { + + private final String targetType; + private final Expression expression; + + private Cast(Expression expression, String targetType) { + + super(expression); + + Assert.notNull(targetType, "Cast target must not be null"); + + this.expression = expression; + this.targetType = targetType; + } + + /** + * Creates a new {@code CAST} expression. + * + * @param expression the expression to cast. Must not be {@literal null}. + * @param targetType the type to cast to. Must not be {@literal null}. + * @return the {@code CAST} for {@code expression} into {@code targetType}. + */ + public static Expression create(Expression expression, String targetType) { + return new Cast(expression, targetType); + } + + public String getTargetType() { + return targetType; + } + + @Override + public String toString() { + return "CAST(" + expression + " AS " + targetType + ")"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Column.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Column.java new file mode 100644 index 0000000000..5f1660ff1b --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Column.java @@ -0,0 +1,444 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Objects; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Column name within a {@code SELECT … FROM} clause. + *

+ * Renders to: {@code } or {@code .}. + *

+ * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +public class Column extends AbstractSegment implements Expression, Named { + + private final SqlIdentifier name; + private final TableLike table; + + Column(String name, TableLike table) { + + super(table); + Assert.notNull(name, "Name must not be null"); + + this.name = SqlIdentifier.unquoted(name); + this.table = table; + } + + Column(SqlIdentifier name, TableLike table) { + + super(table); + Assert.notNull(name, "Name must not be null"); + + this.name = name; + this.table = table; + } + + /** + * Creates a new {@link Column} associated with a {@link Table}. + * + * @param name column name, must not {@literal null} or empty. + * @param table the table, must not be {@literal null}. + * @return the new {@link Column}. + * @since 2.3 + */ + public static Column create(String name, TableLike table) { + + Assert.hasText(name, "Name must not be null or empty"); + Assert.notNull(table, "Table must not be null"); + + return new Column(SqlIdentifier.unquoted(name), table); + } + + /** + * Creates a new {@link Column} associated with a {@link Table}. + * + * @param name column name, must not {@literal null}. + * @param table the table, must not be {@literal null}. + * @return the new {@link Column}. + * @since 2.0 + */ + public static Column create(SqlIdentifier name, Table table) { + + Assert.notNull(name, "Name must not be null"); + Assert.notNull(table, "Table must not be null"); + + return new Column(name, table); + } + + /** + * Creates a new aliased {@link Column} associated with a {@link Table}. + * + * @param name column name, must not {@literal null} or empty. + * @param table the table, must not be {@literal null}. + * @param alias column alias name, must not {@literal null} or empty. + * @return the new {@link Column}. + */ + public static Column aliased(String name, Table table, String alias) { + + Assert.hasText(name, "Name must not be null or empty"); + Assert.notNull(table, "Table must not be null"); + Assert.hasText(alias, "Alias must not be null or empty"); + + return new AliasedColumn(name, table, alias); + } + + /** + * Creates a new aliased {@link Column}. + * + * @param alias column alias name, must not {@literal null} or empty. + * @return the aliased {@link Column}. + */ + public Column as(String alias) { + + Assert.hasText(alias, "Alias must not be null or empty"); + + return new AliasedColumn(name, table, SqlIdentifier.unquoted(alias)); + } + + /** + * Creates a new aliased {@link Column}. + * + * @param alias column alias name, must not {@literal null}. + * @return the aliased {@link Column}. + * @since 2.0 + */ + public Column as(SqlIdentifier alias) { + + Assert.notNull(alias, "Alias must not be null"); + + return new AliasedColumn(name, table, alias); + } + + /** + * Creates a new {@link Column} associated with a {@link Table}. + * + * @param table the table, must not be {@literal null}. + * @return a new {@link Column} associated with {@link Table}. + */ + public Column from(Table table) { + + Assert.notNull(table, "Table must not be null"); + + return new Column(name, table); + } + + // ------------------------------------------------------------------------- + // Methods for Condition creation. + // ------------------------------------------------------------------------- + + /** + * Creates a {@code =} (equals) {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public Comparison isEqualTo(Expression expression) { + return Conditions.isEqual(this, expression); + } + + /** + * Creates a {@code !=} (not equals) {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public Comparison isNotEqualTo(Expression expression) { + return Conditions.isNotEqual(this, expression); + } + + /** + * Creates a {@code BETWEEN} {@link Condition}. + * + * @param begin begin value for the comparison. + * @param end end value for the comparison. + * @return the {@link Between} condition. + * @since 2.0 + */ + public Between between(Expression begin, Expression end) { + return Conditions.between(this, begin, end); + } + + /** + * Creates a {@code NOT BETWEEN} {@link Condition}. + * + * @param begin begin value for the comparison. + * @param end end value for the comparison. + * @return the {@link Between} condition. + * @since 2.0 + */ + public Between notBetween(Expression begin, Expression end) { + return Conditions.notBetween(this, begin, end); + } + + /** + * Creates a {@code <} (less) {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public Comparison isLess(Expression expression) { + return Conditions.isLess(this, expression); + } + + /** + * CCreates a {@code <=} (greater) {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public Comparison isLessOrEqualTo(Expression expression) { + return Conditions.isLessOrEqualTo(this, expression); + } + + /** + * Creates a {@code !=} (not equals) {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public Comparison isGreater(Expression expression) { + return Conditions.isGreater(this, expression); + } + + /** + * Creates a {@code <=} (greater or equal to) {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public Comparison isGreaterOrEqualTo(Expression expression) { + return Conditions.isGreaterOrEqualTo(this, expression); + } + + /** + * Creates a {@code LIKE} {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Like} condition. + */ + public Like like(Expression expression) { + return Conditions.like(this, expression); + } + + /** + * Creates a {@code NOT LIKE} {@link Condition}. + * + * @param expression right side of the comparison. + * @return the {@link Like} condition. + * @since 2.0 + */ + public Like notLike(Expression expression) { + return Conditions.notLike(this, expression); + } + + /** + * Creates a new {@link In} {@link Condition} given right {@link Expression}s. + * + * @param expression right side of the comparison. + * @return the {@link In} condition. + */ + public In in(Expression... expression) { + return Conditions.in(this, expression); + } + + /** + * Creates a new {@link In} {@link Condition} given a subselects. + * + * @param subselect right side of the comparison. + * @return the {@link In} condition. + */ + public In in(Select subselect) { + return Conditions.in(this, subselect); + } + + /** + * Creates a new {@code not} {@link In} {@link Condition} given right {@link Expression}s. + * + * @param expression right side of the comparison. + * @return the {@link In} condition. + */ + public In notIn(Expression... expression) { + return Conditions.notIn(this, expression); + } + + /** + * Creates a new {@code not} {@link In} {@link Condition} given a subselects. + * + * @param subselect right side of the comparison. + * @return the {@link In} condition. + */ + public In notIn(Select subselect) { + return Conditions.notIn(this, subselect); + } + + /** + * Creates a {@code IS NULL} condition. + * + * @return the {@link IsNull} condition. + */ + public IsNull isNull() { + return Conditions.isNull(this); + } + + /** + * Creates a {@code IS NOT NULL} condition. + * + * @return the {@link Condition} condition. + */ + public Condition isNotNull() { + return isNull().not(); + } + + // ------------------------------------------------------------------------- + // Methods for Assignment creation. + // ------------------------------------------------------------------------- + + /** + * Creates a value {@link AssignValue assignment}. + * + * @param value the value to assign. + * @return the {@link AssignValue} assignment. + */ + public AssignValue set(Expression value) { + return Assignments.value(this, value); + } + + @Override + public SqlIdentifier getName() { + return name; + } + + /** + * @return the column name as it is used in references. This can be the actual {@link #getName() name} or an + * {@link Aliased#getAlias() alias}. + */ + public SqlIdentifier getReferenceName() { + return getName(); + } + + /** + * @return the {@link Table}. Can be {@literal null} if the column was not referenced in the context of a + * {@link Table}. + */ + @Nullable + public TableLike getTable() { + return table; + } + + @Override + public String toString() { + + return getPrefix() + name; + } + + String getPrefix() { + String prefix = ""; + if (table != null) { + prefix = (table instanceof Aliased ? ((Aliased) table).getAlias() : table.getName()) + "."; + } + return prefix; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + Column column = (Column) o; + return name.equals(column.name) && table.equals(column.table); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), name, table); + } + + /** + * {@link Aliased} {@link Column} implementation. + */ + static class AliasedColumn extends Column implements Aliased { + + private final SqlIdentifier alias; + + private AliasedColumn(String name, TableLike table, String alias) { + super(name, table); + this.alias = SqlIdentifier.unquoted(alias); + } + + private AliasedColumn(SqlIdentifier name, TableLike table, SqlIdentifier alias) { + super(name, table); + this.alias = alias; + } + + @Override + public SqlIdentifier getAlias() { + return alias; + } + + @Override + public SqlIdentifier getReferenceName() { + return getAlias(); + } + + @Override + public Column from(Table table) { + + Assert.notNull(table, "Table must not be null"); + + return new AliasedColumn(getName(), table, getAlias()); + } + + @Override + public String toString() { + return getPrefix() + getName() + " AS " + getAlias(); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + AliasedColumn that = (AliasedColumn) o; + return alias.equals(that.alias); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), alias); + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Comparison.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Comparison.java new file mode 100644 index 0000000000..d7bec78feb --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Comparison.java @@ -0,0 +1,122 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * Comparing {@link Condition} comparing two {@link Expression}s. + *

+ * Results in a rendered condition: {@code } (e.g. {@code col = 'predicate'}. + *

+ * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +public class Comparison extends AbstractSegment implements Condition { + + private final Expression left; + private final String comparator; + private final Expression right; + + private Comparison(Expression left, String comparator, Expression right) { + + super(left, right); + + this.left = left; + this.comparator = comparator; + this.right = right; + } + + /** + * Creates a new {@link Comparison} {@link Condition} given two {@link Expression}s. + * + * @param leftColumnOrExpression the left {@link Expression}. + * @param comparator the comparator. + * @param rightColumnOrExpression the right {@link Expression}. + * @return the {@link Comparison} condition. + */ + public static Comparison create(Expression leftColumnOrExpression, String comparator, + Expression rightColumnOrExpression) { + + Assert.notNull(leftColumnOrExpression, "Left expression must not be null"); + Assert.notNull(comparator, "Comparator must not be null"); + Assert.notNull(rightColumnOrExpression, "Right expression must not be null"); + + return new Comparison(leftColumnOrExpression, comparator, rightColumnOrExpression); + } + + /** + * Creates a new {@link Comparison} from simple {@literal StringP} arguments + * + * @param unqualifiedColumnName gets turned in a {@link Expressions#just(String)} and is expected to be an unqualified + * unique column name but also could be an verbatim expression. Must not be {@literal null}. + * @param comparator must not be {@literal null}. + * @param rightValue is considered a {@link Literal}. Must not be {@literal null}. + * @return a new {@literal Comparison} of the first with the third argument using the second argument as comparison + * operator. Guaranteed to be not {@literal null}. + * @since 2.3 + */ + public static Comparison create(String unqualifiedColumnName, String comparator, Object rightValue) { + + Assert.notNull(unqualifiedColumnName, "UnqualifiedColumnName must not be null"); + Assert.notNull(comparator, "Comparator must not be null"); + Assert.notNull(rightValue, "RightValue must not be null"); + + return new Comparison(Expressions.just(unqualifiedColumnName), comparator, SQL.literalOf(rightValue)); + } + + @Override + public Condition not() { + + if ("=".equals(comparator)) { + return new Comparison(left, "!=", right); + } + + if ("!=".equals(comparator)) { + return new Comparison(left, "=", right); + } + + return new Not(this); + } + + /** + * @return the left {@link Expression}. + */ + public Expression getLeft() { + return left; + } + + /** + * @return the comparator. + */ + public String getComparator() { + return comparator; + } + + /** + * @return the right {@link Expression}. + */ + public Expression getRight() { + return right; + } + + @Override + public String toString() { + return left + " " + comparator + " " + right; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/CompositeSqlIdentifier.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/CompositeSqlIdentifier.java new file mode 100644 index 0000000000..0b232b256c --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/CompositeSqlIdentifier.java @@ -0,0 +1,91 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.StringJoiner; +import java.util.function.UnaryOperator; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Composite {@link SqlIdentifier}. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 2.0 + */ +class CompositeSqlIdentifier implements SqlIdentifier { + + private final SqlIdentifier[] parts; + + CompositeSqlIdentifier(SqlIdentifier... parts) { + + Assert.notNull(parts, "SqlIdentifier parts must not be null"); + Assert.noNullElements(parts, "SqlIdentifier parts must not contain null elements"); + Assert.isTrue(parts.length > 0, "SqlIdentifier parts must not be empty"); + + this.parts = parts; + } + + @Override + public Iterator iterator() { + return Arrays.asList(parts).iterator(); + } + + @Override + public SqlIdentifier transform(UnaryOperator transformationFunction) { + throw new UnsupportedOperationException("Composite SQL Identifiers cannot be transformed"); + } + + @Override + public String toSql(IdentifierProcessing processing) { + + StringJoiner stringJoiner = new StringJoiner("."); + + for (SqlIdentifier namePart : parts) { + stringJoiner.add(namePart.toSql(processing)); + } + + return stringJoiner.toString(); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + + if (o instanceof SqlIdentifier) { + return toString().equals(o.toString()); + } + + return false; + } + + @Override + public int hashCode() { + return toString().hashCode(); + } + + @Override + public String toString() { + return toSql(IdentifierProcessing.ANSI); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Condition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Condition.java new file mode 100644 index 0000000000..59ad5f2711 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Condition.java @@ -0,0 +1,56 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * AST {@link Segment} for a condition. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see Conditions + */ +public interface Condition extends Segment, Expression { + + /** + * Combine another {@link Condition} using {@code AND}. + * + * @param other the other {@link Condition}. + * @return the combined {@link Condition}. + */ + default Condition and(Condition other) { + return new AndCondition(this, other); + } + + /** + * Combine another {@link Condition} using {@code OR}. + * + * @param other the other {@link Condition}. + * @return the combined {@link Condition}. + */ + default Condition or(Condition other) { + return new OrCondition(this, other); + } + + /** + * Creates a {@link Condition} that negates this {@link Condition}. + * + * @return the negated {@link Condition}. + */ + default Condition not() { + return new Not(this); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Conditions.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Conditions.java new file mode 100644 index 0000000000..aa7f4e70e7 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Conditions.java @@ -0,0 +1,320 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; + +import org.springframework.util.Assert; + +/** + * Factory for common {@link Condition}s. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Meng Zuozhu + * @author Daniele Canteri + * @since 1.1 + * @see SQL + * @see Expressions + * @see Functions + */ +public abstract class Conditions { + + /** + * Creates a plain {@code sql} {@link Condition}. + * + * @param sql the SQL, must not be {@literal null} or empty. + * @return a SQL {@link Expression}. + */ + public static Condition just(String sql) { + return new ConstantCondition(sql); + } + + /** + * Creates a nested {@link Condition} that is enclosed with parentheses. Useful to combine {@code AND} and {@code OR} + * statements. + * + * @param condition the nested condition. + * @return a {@link NestedCondition}. + * @since 2.0 + */ + public static Condition nest(Condition condition) { + return new NestedCondition(condition); + } + + /** + * Creates a NOT {@link Condition} that reverses the condition. + * + * @param condition the condition to {@code NOT}. + * @return a NOT {@link Condition}. + * @since 3.1.6 + */ + public static Condition not(Condition condition) { + return new Not(condition); + } + + /** + * Creates a {@code IS NULL} condition. + * + * @param expression the expression to check for nullability, must not be {@literal null}. + * @return the {@code IS NULL} condition. + */ + public static IsNull isNull(Expression expression) { + return IsNull.create(expression); + } + + /** + * Creates a {@code =} (equals) {@link Condition}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Comparison isEqual(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Comparison.create(leftColumnOrExpression, "=", rightColumnOrExpression); + } + + /** + * Creates a {@code !=} (not equals) {@link Condition}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Comparison isNotEqual(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Comparison.create(leftColumnOrExpression, "!=", rightColumnOrExpression); + } + + /** + * Creates a {@code BETWEEN} {@link Condition}. + * + * @param columnOrExpression left side of the comparison. + * @param begin begin value of the comparison. + * @param end end value of the comparison. + * @return the {@link Comparison} condition. + * @since 2.0 + */ + public static Between between(Expression columnOrExpression, Expression begin, Expression end) { + return Between.create(columnOrExpression, begin, end); + } + + /** + * Creates a {@code NOT BETWEEN} {@link Condition}. + * + * @param columnOrExpression left side of the comparison. + * @param begin begin value of the comparison. + * @param end end value of the comparison. + * @return the {@link Comparison} condition. + * @since 2.0 + */ + public static Between notBetween(Expression columnOrExpression, Expression begin, Expression end) { + return between(columnOrExpression, begin, end).not(); + } + + /** + * Creates a {@code <} (less) {@link Condition} comparing {@code left} is less than {@code right}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Comparison isLess(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Comparison.create(leftColumnOrExpression, "<", rightColumnOrExpression); + } + + /** + * Creates a {@code <=} (less or equal to) {@link Condition} comparing {@code left} is less than or equal to + * {@code right}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Comparison isLessOrEqualTo(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Comparison.create(leftColumnOrExpression, "<=", rightColumnOrExpression); + } + + /** + * Creates a {@code <=} (greater ) {@link Condition} comparing {@code left} is greater than {@code right}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Comparison isGreater(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Comparison.create(leftColumnOrExpression, ">", rightColumnOrExpression); + } + + /** + * Creates a {@code <=} (greater or equal to) {@link Condition} comparing {@code left} is greater than or equal to + * {@code right}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Comparison isGreaterOrEqualTo(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Comparison.create(leftColumnOrExpression, ">=", rightColumnOrExpression); + } + + /** + * Creates a {@code LIKE} {@link Condition}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + */ + public static Like like(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Like.create(leftColumnOrExpression, rightColumnOrExpression); + } + + /** + * Creates a {@code NOT LIKE} {@link Condition}. + * + * @param leftColumnOrExpression left side of the comparison. + * @param rightColumnOrExpression right side of the comparison. + * @return the {@link Comparison} condition. + * @since 2.0 + */ + public static Like notLike(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + return Like.create(leftColumnOrExpression, rightColumnOrExpression).not(); + } + + /** + * Creates a {@code IN} {@link Condition clause}. + * + * @param columnOrExpression left side of the comparison. + * @param arg IN argument. + * @return the {@link In} condition. + */ + public static In in(Expression columnOrExpression, Expression arg) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(arg, "Expression argument must not be null"); + + return In.create(columnOrExpression, arg); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static Condition in(Expression columnOrExpression, Collection expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return In.create(columnOrExpression, new ArrayList<>(expressions)); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In in(Expression columnOrExpression, Expression... expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return In.create(columnOrExpression, Arrays.asList(expressions)); + } + + /** + * Creates a {@code IN} {@link Condition clause} for a {@link Select subselect}. + * + * @param column the column to compare. + * @param subselect the subselect. + * @return the {@link In} condition. + */ + public static In in(Column column, Select subselect) { + + Assert.notNull(column, "Column must not be null"); + Assert.notNull(subselect, "Subselect must not be null"); + + return in(column, new SubselectExpression(subselect)); + } + + /** + * Creates a {@code NOT IN} {@link Condition clause}. + * + * @param columnOrExpression left side of the comparison. + * @param arg IN argument. + * @return the {@link In} condition. + */ + public static In notIn(Expression columnOrExpression, Expression arg) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(arg, "Expression argument must not be null"); + + return In.createNotIn(columnOrExpression, arg); + } + + /** + * Creates a new {@code NOT IN} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static Condition notIn(Expression columnOrExpression, Collection expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return In.createNotIn(columnOrExpression, new ArrayList<>(expressions)); + } + + /** + * Creates a new {@code NOT IN} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In NOT IN} {@link Condition}. + */ + public static In notIn(Expression columnOrExpression, Expression... expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return In.createNotIn(columnOrExpression, Arrays.asList(expressions)); + } + + /** + * Creates a {@code NOT IN} {@link Condition clause} for a {@link Select subselect}. + * + * @param column the column to compare. + * @param subselect the subselect. + * @return the {@link In NOT IN} condition. + */ + public static In notIn(Column column, Select subselect) { + + Assert.notNull(column, "Column must not be null"); + Assert.notNull(subselect, "Subselect must not be null"); + + return notIn(column, new SubselectExpression(subselect)); + } + + // Utility constructor. + private Conditions() {} +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/ConstantCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/ConstantCondition.java new file mode 100644 index 0000000000..8d8b6a1229 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/ConstantCondition.java @@ -0,0 +1,36 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * {@link Condition} representing fixed sql predicate. + * + * @author Daniele Canteri + * @since 2.3 + */ +public class ConstantCondition extends AbstractSegment implements Condition { + + private final String condition; + + ConstantCondition(String condition) { + this.condition = condition; + } + + @Override + public String toString() { + return condition; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultDelete.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultDelete.java new file mode 100644 index 0000000000..a475d71fe5 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultDelete.java @@ -0,0 +1,67 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Default {@link Delete} implementation. + * + * @author Mark Paluch + * @since 1.1 + */ +class DefaultDelete implements Delete { + + private final From from; + private final @Nullable Where where; + + DefaultDelete(Table table, @Nullable Condition where) { + + this.from = new From(table); + this.where = where != null ? new Where(where) : null; + } + + @Override + public void visit(Visitor visitor) { + + Assert.notNull(visitor, "Visitor must not be null"); + + visitor.enter(this); + + from.visit(visitor); + + if (where != null) { + where.visit(visitor); + } + + visitor.leave(this); + } + + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + + builder.append("DELETE ").append(this.from); + + if (this.where != null) { + builder.append(' ').append(this.where); + } + + return builder.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultDeleteBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultDeleteBuilder.java new file mode 100644 index 0000000000..3d2d3169c9 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultDeleteBuilder.java @@ -0,0 +1,74 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Default {@link SelectBuilder} implementation. + * + * @author Mark Paluch + * @since 1.1 + */ +class DefaultDeleteBuilder implements DeleteBuilder, DeleteBuilder.DeleteWhereAndOr, DeleteBuilder.DeleteWhere { + + private @Nullable Table from; + private @Nullable Condition where; + + @Override + public DeleteWhere from(Table table) { + + Assert.notNull(table, "Table must not be null"); + + this.from = table; + return this; + } + + @Override + public DeleteWhereAndOr where(Condition condition) { + + Assert.notNull(condition, "Where Condition must not be null"); + this.where = condition; + return this; + } + + @Override + public DeleteWhereAndOr and(Condition condition) { + + Assert.notNull(condition, "Condition must not be null"); + this.where = this.where.and(condition); + return this; + } + + @Override + public DeleteWhereAndOr or(Condition condition) { + + Assert.notNull(condition, "Condition must not be null"); + this.where = this.where.or(condition); + return this; + } + + @Override + public Delete build() { + + DefaultDelete delete = new DefaultDelete(this.from, this.where); + + DeleteValidator.validate(delete); + + return delete; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultIdentifierProcessing.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultIdentifierProcessing.java new file mode 100644 index 0000000000..222093df32 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultIdentifierProcessing.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * An {@link IdentifierProcessing} implementation based on two implementations for the quoting and for the letter case + * standardization. + * + * @author Jens Schauder + * @since 2.0 + */ +class DefaultIdentifierProcessing implements IdentifierProcessing { + + private final Quoting quoting; + private final LetterCasing letterCasing; + + DefaultIdentifierProcessing(Quoting quoting, LetterCasing letterCasing) { + this.quoting = quoting; + this.letterCasing = letterCasing; + } + + @Override + public String quote(String identifier) { + return quoting.apply(identifier); + } + + @Override + public String standardizeLetterCase(String identifier) { + return letterCasing.apply(identifier); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultInsert.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultInsert.java new file mode 100644 index 0000000000..2a5d4540bd --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultInsert.java @@ -0,0 +1,72 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Default {@link Insert} implementation. + * + * @author Mark Paluch + * @since 1.1 + */ +class DefaultInsert implements Insert { + + private final Into into; + private final List columns; + private final Values values; + + DefaultInsert(@Nullable Table into, List columns, List values) { + this.into = new Into(into); + this.columns = new ArrayList<>(columns); + this.values = new Values(new ArrayList<>(values)); + } + + @Override + public void visit(Visitor visitor) { + + Assert.notNull(visitor, "Visitor must not be null"); + + visitor.enter(this); + + into.visit(visitor); + columns.forEach(it -> it.visit(visitor)); + values.visit(visitor); + + visitor.leave(this); + } + + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + + builder.append("INSERT ").append(this.into); + + if (!this.columns.isEmpty()) { + builder.append(" (").append(StringUtils.collectionToDelimitedString(this.columns, ", ")).append(")"); + } + + builder.append(" ").append(this.values); + + return builder.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultInsertBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultInsertBuilder.java new file mode 100644 index 0000000000..91d2942df5 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultInsertBuilder.java @@ -0,0 +1,108 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Default {@link InsertBuilder} implementation. + * + * @author Mark Paluch + * @since 1.1 + */ +class DefaultInsertBuilder + implements InsertBuilder, InsertBuilder.InsertIntoColumnsAndValuesWithBuild, InsertBuilder.InsertValuesWithBuild { + + private @Nullable Table into; + private List columns = new ArrayList<>(); + private List values = new ArrayList<>(); + + @Override + public InsertIntoColumnsAndValuesWithBuild into(Table table) { + + Assert.notNull(table, "Insert Into Table must not be null"); + + this.into = table; + return this; + } + + @Override + public InsertIntoColumnsAndValuesWithBuild column(Column column) { + + Assert.notNull(column, "Column must not be null"); + + this.columns.add(column); + + return this; + } + + @Override + public InsertIntoColumnsAndValuesWithBuild columns(Column... columns) { + + Assert.notNull(columns, "Columns must not be null"); + + return columns(Arrays.asList(columns)); + } + + @Override + public InsertIntoColumnsAndValuesWithBuild columns(Collection columns) { + + Assert.notNull(columns, "Columns must not be null"); + + this.columns.addAll(columns); + + return this; + } + + @Override + public InsertValuesWithBuild value(Expression value) { + + Assert.notNull(value, "Value must not be null"); + + this.values.add(value); + + return this; + } + + @Override + public InsertValuesWithBuild values(Expression... values) { + + Assert.notNull(values, "Values must not be null"); + + return values(Arrays.asList(values)); + } + + @Override + public InsertValuesWithBuild values(Collection values) { + + Assert.notNull(values, "Values must not be null"); + + this.values.addAll(values); + + return this; + } + + @Override + public Insert build() { + return new DefaultInsert(this.into, this.columns, this.values); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSelect.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSelect.java new file mode 100644 index 0000000000..9aa5975dd1 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSelect.java @@ -0,0 +1,117 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.OptionalLong; +import java.util.function.Consumer; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Default {@link Select} implementation. + * + * @author Mark Paluch + * @author Myeonghyeon Lee + * @since 1.1 + */ +class DefaultSelect implements Select { + + private final boolean distinct; + private final SelectList selectList; + private final From from; + private final long limit; + private final long offset; + private final List joins; + private final @Nullable Where where; + private final List orderBy; + private final @Nullable LockMode lockMode; + + DefaultSelect(boolean distinct, List selectList, List from, long limit, long offset, + List joins, @Nullable Condition where, List orderBy, @Nullable LockMode lockMode) { + + this.distinct = distinct; + this.selectList = new SelectList(new ArrayList<>(selectList)); + this.from = new From(new ArrayList<>(from)); + this.limit = limit; + this.offset = offset; + this.joins = new ArrayList<>(joins); + this.orderBy = Collections.unmodifiableList(new ArrayList<>(orderBy)); + this.where = where != null ? new Where(where) : null; + this.lockMode = lockMode; + } + + @Override + public From getFrom() { + return this.from; + } + + @Override + public List getOrderBy() { + return this.orderBy; + } + + @Override + public OptionalLong getLimit() { + return limit == -1 ? OptionalLong.empty() : OptionalLong.of(limit); + } + + @Override + public OptionalLong getOffset() { + return offset == -1 ? OptionalLong.empty() : OptionalLong.of(offset); + } + + @Override + public boolean isDistinct() { + return distinct; + } + + @Nullable + @Override + public LockMode getLockMode() { + return lockMode; + } + + @Override + public void visit(Visitor visitor) { + + Assert.notNull(visitor, "Visitor must not be null"); + + Consumer action = it -> it.visit(visitor); + + visitor.enter(this); + + selectList.visit(visitor); + from.visit(visitor); + joins.forEach(action); + + visitIfNotNull(where, visitor); + + orderBy.forEach(action); + + visitor.leave(this); + } + + private void visitIfNotNull(@Nullable Visitable visitable, Visitor visitor) { + + if (visitable != null) { + visitable.visit(visitor); + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSelectBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSelectBuilder.java new file mode 100644 index 0000000000..0bc6fe2d36 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSelectBuilder.java @@ -0,0 +1,370 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import org.springframework.data.relational.core.sql.Join.JoinType; +import org.springframework.data.relational.core.sql.SelectBuilder.SelectAndFrom; +import org.springframework.data.relational.core.sql.SelectBuilder.SelectFromAndJoin; +import org.springframework.data.relational.core.sql.SelectBuilder.SelectWhereAndOr; +import org.springframework.lang.Nullable; + +/** + * Default {@link SelectBuilder} implementation. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @since 1.1 + */ +class DefaultSelectBuilder implements SelectBuilder, SelectAndFrom, SelectFromAndJoin, SelectWhereAndOr { + + private boolean distinct = false; + private final List selectList = new ArrayList<>(); + private final List from = new ArrayList<>(); + private long limit = -1; + private long offset = -1; + private final List joins = new ArrayList<>(); + private @Nullable Condition where; + private final List orderBy = new ArrayList<>(); + private @Nullable LockMode lockMode; + + @Override + public SelectBuilder top(int count) { + + limit = count; + return this; + } + + @Override + public DefaultSelectBuilder select(Expression expression) { + selectList.add(expression); + return this; + } + + @Override + public DefaultSelectBuilder select(Expression... expressions) { + selectList.addAll(Arrays.asList(expressions)); + return this; + } + + @Override + public DefaultSelectBuilder select(Collection expressions) { + selectList.addAll(expressions); + return this; + } + + @Override + public DefaultSelectBuilder distinct() { + distinct = true; + return this; + } + + @Override + public SelectFromAndJoin from(String table) { + return from(Table.create(table)); + } + + @Override + public SelectFromAndJoin from(TableLike table) { + from.add(table); + return this; + } + + @Override + public SelectFromAndJoin from(TableLike... tables) { + from.addAll(Arrays.asList(tables)); + return this; + } + + @Override + public SelectFromAndJoin from(Collection tables) { + from.addAll(tables); + return this; + } + + @Override + public SelectFromAndJoin limitOffset(long limit, long offset) { + this.limit = limit; + this.offset = offset; + return this; + } + + @Override + public SelectFromAndJoin limit(long limit) { + this.limit = limit; + return this; + } + + @Override + public SelectFromAndJoin offset(long offset) { + this.offset = offset; + return this; + } + + @Override + public DefaultSelectBuilder orderBy(OrderByField... orderByFields) { + + this.orderBy.addAll(Arrays.asList(orderByFields)); + + return this; + } + + @Override + public DefaultSelectBuilder orderBy(Collection orderByFields) { + + this.orderBy.addAll(orderByFields); + + return this; + } + + @Override + public DefaultSelectBuilder orderBy(Expression... columns) { + + for (Expression column : columns) { + this.orderBy.add(OrderByField.from(column)); + } + + return this; + } + + @Override + public SelectWhereAndOr where(Condition condition) { + + where = condition; + return this; + } + + @Override + public SelectWhereAndOr and(Condition condition) { + + where = where.and(condition); + return this; + } + + @Override + public SelectWhereAndOr or(Condition condition) { + + where = where.or(condition); + return this; + } + + @Override + public SelectOn join(String table) { + return join(Table.create(table)); + } + + @Override + public SelectOn join(TableLike table) { + return new JoinBuilder(table, this); + } + + @Override + public SelectOn leftOuterJoin(TableLike table) { + return new JoinBuilder(table, this, JoinType.LEFT_OUTER_JOIN); + } + + @Override + public SelectOn join(TableLike table, JoinType joinType) { + return new JoinBuilder(table, this, joinType); + } + + public DefaultSelectBuilder join(Join join) { + this.joins.add(join); + + return this; + } + + @Override + public SelectLock lock(LockMode lockMode) { + + this.lockMode = lockMode; + return this; + } + + @Override + public Select build(boolean validate) { + + DefaultSelect select = new DefaultSelect(distinct, selectList, from, limit, offset, joins, where, orderBy, + lockMode); + + if (validate) { + SelectValidator.validate(select); + } + return select; + } + + /** + * Delegation builder to construct JOINs. + */ + static class JoinBuilder implements SelectOn, SelectOnConditionComparison, SelectFromAndJoinCondition { + + private final TableLike table; + private final DefaultSelectBuilder selectBuilder; + private final JoinType joinType; + private @Nullable Expression from; + private @Nullable Expression to; + private @Nullable Condition condition; + + JoinBuilder(TableLike table, DefaultSelectBuilder selectBuilder, JoinType joinType) { + + this.table = table; + this.selectBuilder = selectBuilder; + this.joinType = joinType; + } + + JoinBuilder(TableLike table, DefaultSelectBuilder selectBuilder) { + this(table, selectBuilder, JoinType.JOIN); + } + + @Override + public SelectOnConditionComparison on(Expression column) { + + this.from = column; + return this; + } + + @Override + public SelectFromAndJoinCondition on(Condition condition) { + + if (this.condition == null) { + this.condition = condition; + } else { + this.condition = this.condition.and(condition); + } + + return this; + } + + @Override + public JoinBuilder equals(Expression column) { + this.to = column; + return this; + } + + @Override + public SelectOnConditionComparison and(Expression column) { + + finishCondition(); + this.from = column; + return this; + } + + private void finishCondition() { + + // Nothing to do if a complete join condition was used. + if (from == null && to == null) { + return; + } + + Comparison comparison = Comparison.create(from, "=", to); + + if (condition == null) { + condition = comparison; + } else { + condition = condition.and(comparison); + } + + } + + private Join finishJoin() { + finishCondition(); + return new Join(joinType, table, condition); + } + + @Override + public SelectOrdered orderBy(OrderByField... orderByFields) { + selectBuilder.join(finishJoin()); + return selectBuilder.orderBy(orderByFields); + } + + @Override + public SelectOrdered orderBy(Collection orderByFields) { + selectBuilder.join(finishJoin()); + return selectBuilder.orderBy(orderByFields); + } + + @Override + public SelectOrdered orderBy(Expression... columns) { + selectBuilder.join(finishJoin()); + return selectBuilder.orderBy(columns); + } + + @Override + public SelectWhereAndOr where(Condition condition) { + selectBuilder.join(finishJoin()); + return selectBuilder.where(condition); + } + + @Override + public SelectOn join(String table) { + selectBuilder.join(finishJoin()); + return selectBuilder.join(table); + } + + @Override + public SelectOn join(TableLike table) { + selectBuilder.join(finishJoin()); + return selectBuilder.join(table); + } + + @Override + public SelectOn leftOuterJoin(TableLike table) { + selectBuilder.join(finishJoin()); + return selectBuilder.leftOuterJoin(table); + } + + @Override + public SelectOn join(TableLike table, JoinType joinType) { + selectBuilder.join(finishJoin()); + return selectBuilder.join(table, joinType); + } + + @Override + public SelectFromAndJoin limitOffset(long limit, long offset) { + selectBuilder.join(finishJoin()); + return selectBuilder.limitOffset(limit, offset); + } + + @Override + public SelectFromAndJoin limit(long limit) { + selectBuilder.join(finishJoin()); + return selectBuilder.limit(limit); + } + + @Override + public SelectFromAndJoin offset(long offset) { + selectBuilder.join(finishJoin()); + return selectBuilder.offset(offset); + } + + @Override + public SelectLock lock(LockMode lockMode) { + selectBuilder.join(finishJoin()); + return selectBuilder.lock(lockMode); + } + + @Override + public Select build(boolean validate) { + selectBuilder.join(finishJoin()); + return selectBuilder.build(validate); + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSqlIdentifier.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSqlIdentifier.java new file mode 100644 index 0000000000..f8de07d72a --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultSqlIdentifier.java @@ -0,0 +1,102 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Collections; +import java.util.Iterator; +import java.util.function.UnaryOperator; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Default {@link SqlIdentifier} implementation using a {@code name} and whether the identifier is quoted. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Kurt Niemi + * @since 2.0 + */ +class DefaultSqlIdentifier implements SqlIdentifier { + + private final String name; + private final boolean quoted; + private final String toString; + private volatile @Nullable CachedSqlName sqlName; + + DefaultSqlIdentifier(String name, boolean quoted) { + + Assert.hasText(name, "A database object name must not be null or empty"); + + this.name = name; + this.quoted = quoted; + this.toString = quoted ? toSql(IdentifierProcessing.ANSI) : this.name; + } + + @Override + public Iterator iterator() { + return Collections. singleton(this).iterator(); + } + + @Override + public SqlIdentifier transform(UnaryOperator transformationFunction) { + + Assert.notNull(transformationFunction, "Transformation function must not be null"); + + return new DefaultSqlIdentifier(transformationFunction.apply(name), quoted); + } + + @Override + public String toSql(IdentifierProcessing processing) { + + // using a local copy of volatile this.sqlName to ensure thread safety. + CachedSqlName sqlName = this.sqlName; + if (sqlName == null || sqlName.processing != processing) { + + this.sqlName = sqlName = new CachedSqlName(processing, quoted ? processing.quote(name) : name); + return sqlName.sqlName(); + } + + return sqlName.sqlName(); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + + if (o instanceof SqlIdentifier) { + return toString().equals(o.toString()); + } + + return false; + } + + @Override + public int hashCode() { + return toString().hashCode(); + } + + @Override + public String toString() { + return toString; + } + + record CachedSqlName(IdentifierProcessing processing, String sqlName) { + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultUpdate.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultUpdate.java new file mode 100644 index 0000000000..f6c4c0c377 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultUpdate.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Default {@link Update} implementation. + * + * @author Mark Paluch + * @since 1.1 + */ +class DefaultUpdate implements Update { + + private final Table table; + private final List assignments; + private final @Nullable Where where; + + DefaultUpdate(Table table, List assignments, @Nullable Condition where) { + this.table = table; + this.assignments = new ArrayList<>(assignments); + this.where = where != null ? new Where(where) : null; + } + + @Override + public void visit(Visitor visitor) { + + Assert.notNull(visitor, "Visitor must not be null"); + + visitor.enter(this); + + this.table.visit(visitor); + this.assignments.forEach(it -> it.visit(visitor)); + + if (this.where != null) { + this.where.visit(visitor); + } + + visitor.leave(this); + } + + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("UPDATE ").append(table); + + if (!assignments.isEmpty()) { + builder.append(" SET ").append(StringUtils.collectionToDelimitedString(this.assignments, ", ")); + } + + if (this.where != null) { + builder.append(" ").append(this.where); + } + + return builder.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultUpdateBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultUpdateBuilder.java new file mode 100644 index 0000000000..d0a745d783 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DefaultUpdateBuilder.java @@ -0,0 +1,113 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import org.springframework.data.relational.core.sql.UpdateBuilder.UpdateAssign; +import org.springframework.data.relational.core.sql.UpdateBuilder.UpdateWhere; +import org.springframework.data.relational.core.sql.UpdateBuilder.UpdateWhereAndOr; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Default {@link UpdateBuilder} implementation. + * + * @author Mark Paluch + * @since 1.1 + */ +class DefaultUpdateBuilder implements UpdateBuilder, UpdateWhere, UpdateWhereAndOr, UpdateAssign { + + private @Nullable Table table; + private List assignments = new ArrayList<>(); + private @Nullable Condition where; + + @Override + public UpdateAssign table(Table table) { + + Assert.notNull(table, "Table must not be null"); + + this.table = table; + + return this; + } + + @Override + public DefaultUpdateBuilder set(Assignment assignment) { + + Assert.notNull(assignment, "Assignment must not be null"); + + this.assignments.add(assignment); + + return this; + } + + @Override + public UpdateWhere set(Assignment... assignments) { + + Assert.notNull(assignments, "Assignment must not be null"); + + return set(Arrays.asList(assignments)); + } + + @Override + public UpdateWhere set(Collection assignments) { + + Assert.notNull(assignments, "Assignment must not be null"); + + this.assignments.addAll(assignments); + + return this; + } + + @Override + public UpdateWhereAndOr where(Condition condition) { + + Assert.notNull(condition, "Condition must not be null"); + + this.where = condition; + + return this; + } + + @Override + public UpdateWhereAndOr and(Condition condition) { + + Assert.notNull(condition, "Condition must not be null"); + + this.where = this.where.and(condition); + + return this; + } + + @Override + public UpdateWhereAndOr or(Condition condition) { + + Assert.notNull(condition, "Condition must not be null"); + + this.where = this.where.and(condition); + + return this; + } + + @Override + public Update build() { + return new DefaultUpdate(this.table, this.assignments, this.where); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Delete.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Delete.java new file mode 100644 index 0000000000..dc3ab5b6c4 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Delete.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * AST for a {@code DELETE} statement. Visiting order: + *
    + *
  1. Self
  2. + *
  3. {@link Table FROM tables} clause
  4. + *
  5. {@link Where WHERE} condition
  6. + *
+ * + * @author Mark Paluch + * @since 1.1 + * @see StatementBuilder + * @see DeleteBuilder + * @see SQL + */ +public interface Delete extends Segment, Visitable { + + /** + * Creates a new {@link DeleteBuilder}. + * + * @return a new {@link DeleteBuilder}. + */ + static DeleteBuilder builder() { + return new DefaultDeleteBuilder(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DeleteBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DeleteBuilder.java new file mode 100644 index 0000000000..1a94aeac4c --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DeleteBuilder.java @@ -0,0 +1,90 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Entry point to construct a {@link Delete} statement. + * + * @author Mark Paluch + * @since 1.1 + * @see StatementBuilder + */ +public interface DeleteBuilder { + + /** + * Declare a {@link Table} for {@code DELETE FROM}. + * + * @param table the table to {@code DELETE FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + DeleteWhere from(Table table); + + /** + * Interface exposing {@code WHERE} methods. + */ + interface DeleteWhere extends BuildDelete { + + /** + * Apply a {@code WHERE} clause. + * + * @param condition the {@code WHERE} condition. + * @return {@code this} builder. + * @see Where + * @see Condition + */ + DeleteWhereAndOr where(Condition condition); + } + + /** + * Interface exposing {@code AND}/{@code OR} combinator methods for {@code WHERE} {@link Condition}s. + */ + interface DeleteWhereAndOr extends BuildDelete { + + /** + * Combine the previous {@code WHERE} {@link Condition} using {@code AND}. + * + * @param condition the condition, must not be {@literal null}. + * @return {@code this} builder. + * @see Condition#and(Condition) + */ + DeleteWhereAndOr and(Condition condition); + + /** + * Combine the previous {@code WHERE} {@link Condition} using {@code OR}. + * + * @param condition the condition, must not be {@literal null}. + * @return {@code this} builder. + * @see Condition#or(Condition) + */ + DeleteWhereAndOr or(Condition condition); + } + + /** + * Interface exposing the {@link Delete} build method. + */ + interface BuildDelete { + + /** + * Build the {@link Delete} statement and verify basic relationship constraints such as all referenced columns have + * a {@code FROM} table import. + * + * @return the build and immutable {@link Delete} statement. + */ + Delete build(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DeleteValidator.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DeleteValidator.java new file mode 100644 index 0000000000..166a28a601 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/DeleteValidator.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Validator for {@link Delete} statements. + *

+ * Validates that all {@link Column}s using a table qualifier have a table import from the {@code FROM} clause. + *

+ * + * @author Mark Paluch + * @since 1.1 + */ +class DeleteValidator extends AbstractImportValidator { + + /** + * Validates a {@link Delete} statement. + * + * @param delete the {@link Delete} statement. + * @throws IllegalStateException if the statement is not valid. + */ + public static void validate(Delete delete) { + new DeleteValidator().doValidate(delete); + } + + private void doValidate(Delete select) { + + select.visit(this); + + for (Table table : requiredByWhere) { + if (!from.contains(table)) { + throw new IllegalStateException( + String.format("Required table [%s] by a WHERE predicate not imported by FROM %s", table, from)); + } + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Expression.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Expression.java new file mode 100644 index 0000000000..00750d3527 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Expression.java @@ -0,0 +1,26 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Expression that can be used in select lists. + * + * @author Mark Paluch + * @since 1.1 + * @see SQL + * @see Expressions + */ +public interface Expression extends Segment {} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Expressions.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Expressions.java new file mode 100644 index 0000000000..328c37218a --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Expressions.java @@ -0,0 +1,80 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Factory for common {@link Expression}s. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see SQL + * @see Conditions + * @see Functions + */ +public abstract class Expressions { + + private static Expression ASTERISK = new SimpleExpression("*"); + + /** + * @return a new asterisk {@code *} expression. + */ + public static Expression asterisk() { + return ASTERISK; + } + + /** + * Creates a plain {@code sql} {@link Expression}. + * + * @param sql the SQL, must not be {@literal null} or empty. + * @return a SQL {@link Expression}. + */ + public static Expression just(String sql) { + return new SimpleExpression(sql); + } + + /** + * @return a new {@link Table}.scoped asterisk {@code
.*} expression. + */ + public static Expression asterisk(Table table) { + return table.asterisk(); + } + + /** + * @return a new {@link Cast} expression. + * @since 2.3 + */ + public static Expression cast(Expression expression, String targetType) { + return Cast.create(expression, targetType); + } + + // Utility constructor. + private Expressions() {} + + static public class SimpleExpression extends AbstractSegment implements Expression { + + private final String expression; + + SimpleExpression(String expression) { + this.expression = expression; + } + + @Override + public String toString() { + return expression; + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/FalseCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/FalseCondition.java new file mode 100644 index 0000000000..d8208476a1 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/FalseCondition.java @@ -0,0 +1,34 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Simple condition that evaluates to SQL {@code FALSE}. + * + * @author Mark Paluch + * @since 2.1 + */ +public class FalseCondition implements Condition { + + public static final FalseCondition INSTANCE = new FalseCondition(); + + private FalseCondition() {} + + @Override + public String toString() { + return "1 = 0"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/From.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/From.java new file mode 100644 index 0000000000..bdff49fcd0 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/From.java @@ -0,0 +1,53 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.util.StringUtils; + +/** + * {@code FROM} clause. + * + * @author Mark Paluch + * @since 1.1 + */ +public class From extends AbstractSegment { + + private final List tables; + + From(TableLike... tables) { + this(Arrays.asList(tables)); + } + + From(List tables) { + + super(tables.toArray(new TableLike[] {})); + + this.tables = Collections.unmodifiableList(tables); + } + + public List getTables() { + return this.tables; + } + + @Override + public String toString() { + return "FROM " + StringUtils.collectionToDelimitedString(tables, ", "); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Functions.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Functions.java new file mode 100644 index 0000000000..b07339591f --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Functions.java @@ -0,0 +1,140 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.springframework.util.Assert; + +/** + * Factory for common {@link Expression function expressions}. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see SQL + * @see Expressions + * @see Functions + */ +public class Functions { + + // Utility constructor. + private Functions() {} + + /** + * Creates a new {@code COALESCE} function. + * + * @param expressions expressions to apply {@code COALESCE}, must not be {@literal null}. + * @return the new {@link SimpleFunction COALESCE function} for {@code expression}. + * @since 3.2 + */ + public static SimpleFunction coalesce(Expression... expressions) { + return SimpleFunction.create("COALESCE", Arrays.asList(expressions)); + } + + /** + * Creates a new {@code COUNT} function. + * + * @param columns columns to apply {@code COUNT}, must not be {@literal null}. + * @return the new {@link SimpleFunction COUNT function} for {@code columns}. + */ + public static SimpleFunction count(Expression... columns) { + + Assert.notNull(columns, "Columns must not be null"); + Assert.notEmpty(columns, "Columns must contains at least one column"); + + return SimpleFunction.create("COUNT", Arrays.asList(columns)); + } + + /** + * Creates a new {@code COUNT} function. + * + * @param columns columns to apply {@code COUNT}, must not be {@literal null}. + * @return the new {@link SimpleFunction COUNT function} for {@code columns}. + */ + public static SimpleFunction count(Collection columns) { + + Assert.notNull(columns, "Columns must not be null"); + + return SimpleFunction.create("COUNT", new ArrayList<>(columns)); + } + + /** + * Creates a new {@code GREATEST} function. + * + * @param expressions expressions to apply {@code GREATEST}, must not be {@literal null}. + * @return the new {@link SimpleFunction GREATEST function} for {@code expression}. + * @since 3.2 + */ + public static SimpleFunction greatest(Expression... expressions) { + return greatest(Arrays.asList(expressions)); + } + + /** + * Creates a new {@code GREATEST} function. + * + * @param expressions expressions to apply {@code GREATEST}, must not be {@literal null}. + * @return the new {@link SimpleFunction GREATEST function} for {@code expression}. + * @since 3.2 + */ + public static SimpleFunction greatest(List expressions) { + return SimpleFunction.create("GREATEST", expressions); + } + + /** + * Creates a new {@code LEAST} function. + * + * @param expressions expressions to apply {@code LEAST}, must not be {@literal null}. + * @return the new {@link SimpleFunction LEAST function} for {@code expression}. + * @since 3.2 + */ + public static SimpleFunction least(Expression... expressions) { + return SimpleFunction.create("LEAST", Arrays.asList(expressions)); + } + + /** + * Creates a new {@code LOWER} function. + * + * @param expression expression to apply {@code LOWER}, must not be {@literal null}. + * @return the new {@link SimpleFunction LOWER function} for {@code expression}. + * @since 2.0 + */ + public static SimpleFunction lower(Expression expression) { + + Assert.notNull(expression, "Columns must not be null"); + + return SimpleFunction.create("LOWER", Collections.singletonList(expression)); + } + + /** + * Creates a new {@code UPPER} function. + * + * @param expression expression to apply {@code UPPER}, must not be {@literal null}. + * @return the new {@link SimpleFunction UPPER function} for {@code expression}. + * @since 2.0 + */ + public static SimpleFunction upper(Expression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return SimpleFunction.create("UPPER", Collections.singletonList(expression)); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/IdentifierProcessing.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/IdentifierProcessing.java new file mode 100644 index 0000000000..f5f93e0fbf --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/IdentifierProcessing.java @@ -0,0 +1,144 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * An interface describing the processing steps for the conversion of {@link SqlIdentifier} to SQL snippets or column + * names. + * + * @author Jens Schauder + * @since 2.0 + */ +public interface IdentifierProcessing { + + /** + * An {@link IdentifierProcessing} that can be used for databases adhering to the SQL standard which uses double + * quotes ({@literal "}) for quoting and makes unquoted literals equivalent to upper case. + */ + IdentifierProcessing ANSI = create(Quoting.ANSI, LetterCasing.UPPER_CASE); + + /** + * An {@link IdentifierProcessing} without applying transformations. + */ + IdentifierProcessing NONE = create(Quoting.NONE, LetterCasing.AS_IS); + + /** + * Create a {@link IdentifierProcessing} rule given {@link Quoting} and {@link LetterCasing} rules. + * + * @param quoting quoting rules. + * @param letterCasing {@link LetterCasing} rules for identifier normalization. + * @return a new {@link IdentifierProcessing} object. + */ + static DefaultIdentifierProcessing create(Quoting quoting, LetterCasing letterCasing) { + return new DefaultIdentifierProcessing(quoting, letterCasing); + } + + /** + * Converts a {@link String} representing a bare name of an identifier to a {@link String} with proper quoting + * applied. + * + * @param identifier the name of an identifier. Must not be {@literal null}. + * @return a quoted name of an identifier. Guaranteed to be not {@literal null}. + */ + String quote(String identifier); + + /** + * Standardizes the use of upper and lower case letters in an identifier in such a way that semantically the same + * identifier results from the quoted and the unquoted version. If this is not possible use of + * {@link LetterCasing#AS_IS} is recommended. + * + * @param identifier an identifier with arbitrary upper and lower cases. must not be {@literal null}. + * @return an identifier with standardized use of upper and lower case letter. Guaranteed to be not {@literal null}. + */ + String standardizeLetterCase(String identifier); + + /** + * A conversion from unquoted identifiers to quoted identifiers. + * + * @author Jens Schauder + * @since 2.0 + */ + class Quoting { + + public static final Quoting ANSI = new Quoting("\""); + + public static final Quoting NONE = new Quoting(""); + + private final String prefix; + private final String suffix; + + /** + * Constructs a {@literal Quoting} with potential different prefix and suffix used for quoting. + * + * @param prefix a {@literal String} prefixed before the name for quoting it. + * @param suffix a {@literal String} suffixed at the end of the name for quoting it. + */ + public Quoting(String prefix, String suffix) { + + this.prefix = prefix; + this.suffix = suffix; + } + + /** + * Constructs a {@literal Quoting} with the same {@literal String} appended in front and end of an identifier. + * + * @param quoteCharacter the value appended at the beginning and the end of a name in order to quote it. + */ + public Quoting(String quoteCharacter) { + this(quoteCharacter, quoteCharacter); + } + + public String apply(String identifier) { + return prefix + identifier + suffix; + } + } + + /** + * Encapsulates the three kinds of letter casing supported. + * + * @author Jens Schauder + * @since 2.0 + */ + enum LetterCasing { + + UPPER_CASE { + + @Override + public String apply(String identifier) { + return identifier.toUpperCase(); + } + }, + + LOWER_CASE { + + @Override + public String apply(String identifier) { + return identifier.toLowerCase(); + } + }, + + AS_IS { + + @Override + public String apply(String identifier) { + return identifier; + } + }; + + abstract String apply(String identifier); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/In.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/In.java new file mode 100644 index 0000000000..2d66d0796d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/In.java @@ -0,0 +1,178 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * {@code IN} {@link Condition} clause. + * + * @author Jens Schauder + * @author Mark Paluch + * @since 1.1 + */ +public class In extends AbstractSegment implements Condition { + + private final Expression left; + private final Collection expressions; + private final boolean notIn; + + private In(Expression left, Collection expressions, boolean notIn) { + + super(toArray(left, expressions)); + + this.left = left; + this.expressions = expressions; + this.notIn = notIn; + } + + private static Segment[] toArray(Expression expression, Collection expressions) { + + Segment[] segments = new Segment[1 + expressions.size()]; + segments[0] = expression; + + int index = 1; + + for (Expression e : expressions) { + segments[index++] = e; + } + + return segments; + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param arg right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In create(Expression columnOrExpression, Expression arg) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(arg, "Expression argument must not be null"); + + return new In(columnOrExpression, Collections.singletonList(arg), false); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In create(Expression columnOrExpression, Collection expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return new In(columnOrExpression, new ArrayList<>(expressions), false); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In create(Expression columnOrExpression, Expression... expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return new In(columnOrExpression, Arrays.asList(expressions), false); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param arg right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In createNotIn(Expression columnOrExpression, Expression arg) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(arg, "Expression argument must not be null"); + + return new In(columnOrExpression, Collections.singletonList(arg), true); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In createNotIn(Expression columnOrExpression, Collection expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return new In(columnOrExpression, new ArrayList<>(expressions), true); + } + + /** + * Creates a new {@link In} {@link Condition} given left and right {@link Expression}s. + * + * @param columnOrExpression left hand side of the {@link Condition} must not be {@literal null}. + * @param expressions right hand side (collection {@link Expression}) must not be {@literal null}. + * @return the {@link In} {@link Condition}. + */ + public static In createNotIn(Expression columnOrExpression, Expression... expressions) { + + Assert.notNull(columnOrExpression, "Comparison column or expression must not be null"); + Assert.notNull(expressions, "Expression argument must not be null"); + + return new In(columnOrExpression, Arrays.asList(expressions), true); + } + + @Override + public Condition not() { + return new In(left, expressions, !notIn); + } + + /** + * @return {@code true} if this condition has at least one expression. + * @since 2.1 + */ + public boolean hasExpressions() { + return !expressions.isEmpty(); + } + + @Override + public String toString() { + + if (hasExpressions()) { + return left + (notIn ? " NOT" : "") + " IN (" + StringUtils.collectionToDelimitedString(expressions, ", ") + ")"; + } + + return notIn ? TrueCondition.INSTANCE.toString() : FalseCondition.INSTANCE.toString(); + } + + public boolean isNotIn() { + return notIn; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/InlineQuery.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/InlineQuery.java new file mode 100644 index 0000000000..80046aee40 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/InlineQuery.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * Represents a inline query within a SQL statement. Typically, used in {@code FROM} or {@code JOIN} clauses. + *

+ * Renders to: {@code (>SELECT<) AS >ALIAS<} in a from or join clause, and to {@code >ALIAS<} when + * used in an expression. + *

+ * Note that this does not implement {@link Aliased} because the Alias is not optional but required and therefore more + * like a name although the SQL term is "alias". + * + * @author Jens Schauder + * @since 2.3 + */ +public class InlineQuery extends Subselect implements TableLike { + + private final SqlIdentifier alias; + + InlineQuery(Select select, SqlIdentifier alias) { + + super(select); + + this.alias = alias; + } + + /** + * Creates a new {@link InlineQuery} using an {@code alias}. + * + * @param select must not be {@literal null}. + * @param alias must not be {@literal null} or empty. + * @return the new {@link InlineQuery} using the {@code alias}. + */ + public static InlineQuery create(Select select, SqlIdentifier alias) { + + Assert.notNull(select, "Select must not be null"); + Assert.notNull(alias, "Alias must not be null or empty"); + + return new InlineQuery(select, alias); + } + + /** + * Creates a new {@link InlineQuery} using an {@code alias}. + * + * @param select must not be {@literal null} or empty. + * @param alias must not be {@literal null} or empty. + * @return the new {@link InlineQuery} using the {@code alias}. + */ + public static InlineQuery create(Select select, String alias) { + return create(select, SqlIdentifier.unquoted(alias)); + } + + @Override + public SqlIdentifier getName() { + return alias; + } + + /** + * @return the table name as it is used in references. This can be the actual {@link #getName() name} or an + * {@link Aliased#getAlias() alias}. + */ + @Override + public SqlIdentifier getReferenceName() { + return alias; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Insert.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Insert.java new file mode 100644 index 0000000000..32703e1442 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Insert.java @@ -0,0 +1,43 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * AST for a {@code INSERT} statement. Visiting order: + *

    + *
  1. Self
  2. + *
  3. {@link Into INTO table} clause
  4. + *
  5. {@link Column columns}
  6. + *
  7. {@link Values VALUEs}
  8. + *
+ * + * @author Mark Paluch + * @since 1.1 + * @see StatementBuilder + * @see InsertBuilder + * @see SQL + */ +public interface Insert extends Segment, Visitable { + + /** + * Creates a new {@link InsertBuilder}. + * + * @return a new {@link InsertBuilder}. + */ + static InsertBuilder builder() { + return new DefaultInsertBuilder(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/InsertBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/InsertBuilder.java new file mode 100644 index 0000000000..af30bf1077 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/InsertBuilder.java @@ -0,0 +1,202 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Collection; + +/** + * Entry point to construct an {@link Insert} statement. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see StatementBuilder + */ +public interface InsertBuilder { + + /** + * Declare a {@link Table} to {@code INSERT INTO}. + * + * @param table the table to {@code INSERT INTO} must not be {@literal null}. + * @return {@code this} builder. + * @see Into + * @see SQL#table(String) + */ + InsertIntoColumnsAndValuesWithBuild into(Table table); + + /** + * Interface exposing {@code WHERE} methods. + */ + interface InsertIntoColumnsAndValues extends InsertValues { + + /** + * Add a {@link Column} to the {@code INTO} column list. Calling this method multiple times will add the + * {@link Column} multiple times. + * + * @param column the column. + * @return {@code this} builder. + * @see Column + */ + InsertIntoColumnsAndValuesWithBuild column(Column column); + + /** + * Add a one or more {@link Column} to the {@code INTO} column list. Calling this method multiple times will add the + * {@link Column} multiple times. + * + * @param columns the columns. + * @return {@code this} builder. + * @see Column + */ + InsertIntoColumnsAndValuesWithBuild columns(Column... columns); + + /** + * Add a one or more {@link Column} to the {@code INTO} column list. Calling this method multiple times will add the + * {@link Column} multiple times. + * + * @param columns the columns. + * @return {@code this} builder. + * @see Column + */ + InsertIntoColumnsAndValuesWithBuild columns(Collection columns); + } + + /** + * Interface exposing {@code value} methods to add values to the {@code INSERT} statement and the build method. + */ + interface InsertIntoColumnsAndValuesWithBuild extends InsertIntoColumnsAndValues, InsertValues, BuildInsert { + + /** + * Add a {@link Expression value} to the {@code VALUES} list. Calling this method multiple times will add a + * {@link Expression value} multiple times. + * + * @param value the value to use. + * @return {@code this} builder. + * @see Column + */ + @Override + InsertValuesWithBuild value(Expression value); + + /** + * Add one or more {@link Expression values} to the {@code VALUES} list. Calling this method multiple times will add + * a {@link Expression values} multiple times. + * + * @param values the values. + * @return {@code this} builder. + * @see Column + */ + @Override + InsertValuesWithBuild values(Expression... values); + + /** + * Add one or more {@link Expression values} to the {@code VALUES} list. Calling this method multiple times will add + * a {@link Expression values} multiple times. + * + * @param values the values. + * @return {@code this} builder. + * @see Column + */ + @Override + InsertValuesWithBuild values(Collection values); + } + + /** + * Interface exposing {@code value} methods to add values to the {@code INSERT} statement and the build method. + */ + interface InsertValuesWithBuild extends InsertValues, BuildInsert { + + /** + * Add a {@link Expression value} to the {@code VALUES} list. Calling this method multiple times will add a + * {@link Expression value} multiple times. + * + * @param value the value to use. + * @return {@code this} builder. + * @see Column + */ + @Override + InsertValuesWithBuild value(Expression value); + + /** + * Add one or more {@link Expression values} to the {@code VALUES} list. Calling this method multiple times will add + * a {@link Expression values} multiple times. + * + * @param values the values. + * @return {@code this} builder. + * @see Column + */ + @Override + InsertValuesWithBuild values(Expression... values); + + /** + * Add one or more {@link Expression values} to the {@code VALUES} list. Calling this method multiple times will add + * a {@link Expression values} multiple times. + * + * @param values the values. + * @return {@code this} builder. + * @see Column + */ + @Override + InsertValuesWithBuild values(Collection values); + } + + /** + * Interface exposing {@code value} methods to add values to the {@code INSERT} statement. + */ + interface InsertValues { + + /** + * Add a {@link Expression value} to the {@code VALUES} list. Calling this method multiple times will add a + * {@link Expression value} multiple times. + * + * @param value the value to use. + * @return {@code this} builder. + * @see Column + */ + InsertValuesWithBuild value(Expression value); + + /** + * Add one or more {@link Expression values} to the {@code VALUES} list. Calling this method multiple times will add + * a {@link Expression values} multiple times. + * + * @param values the values. + * @return {@code this} builder. + * @see Column + */ + InsertValuesWithBuild values(Expression... values); + + /** + * Add one or more {@link Expression values} to the {@code VALUES} list. Calling this method multiple times will add + * a {@link Expression values} multiple times. + * + * @param values the values. + * @return {@code this} builder. + * @see Column + */ + InsertValuesWithBuild values(Collection values); + } + + /** + * Interface exposing the {@link Insert} build method. + */ + interface BuildInsert { + + /** + * Build the {@link Insert} statement. + * + * @return the build and immutable {@link Insert} statement. + */ + Insert build(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Into.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Into.java new file mode 100644 index 0000000000..c0f76456a7 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Into.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.util.StringUtils; + +/** + * {@code INTO} clause. + * + * @author Mark Paluch + * @since 1.1 + */ +public class Into extends AbstractSegment { + + private final List
tables; + + Into(Table... tables) { + this(Arrays.asList(tables)); + } + + Into(List
tables) { + + super(tables.toArray(new Table[] {})); + + this.tables = tables; + } + + @Override + public String toString() { + return "INTO " + StringUtils.collectionToDelimitedString(tables, ", "); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/IsNull.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/IsNull.java new file mode 100644 index 0000000000..885144440e --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/IsNull.java @@ -0,0 +1,69 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * {@code IS NULL} {@link Condition}. + * + * @author Jens Schauder + * @since 1.1 + */ +public class IsNull extends AbstractSegment implements Condition { + + private final Expression expression; + private final boolean negated; + + private IsNull(Expression expression) { + this(expression, false); + } + + private IsNull(Expression expression, boolean negated) { + + super(expression); + + this.expression = expression; + this.negated = negated; + } + + /** + * Creates a new {@link IsNull} expression. + * + * @param expression must not be {@literal null}. + * @return + */ + public static IsNull create(Expression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new IsNull(expression); + } + + @Override + public Condition not() { + return new IsNull(expression, !negated); + } + + public boolean isNegated() { + return negated; + } + + @Override + public String toString() { + return expression + (negated ? " IS NOT NULL" : " IS NULL"); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Join.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Join.java new file mode 100644 index 0000000000..12720b3670 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Join.java @@ -0,0 +1,113 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * {@link Segment} for a {@code JOIN} declaration. + *

+ * Renders to: {@code JOIN + * +

+ * ON }. + *

+ * + * @author Mark Paluch + * @since 1.1 + */ +public class Join extends AbstractSegment { + + private final JoinType type; + private final TableLike joinTable; + private final Condition on; + + Join(JoinType type, TableLike joinTable, Condition on) { + + super(joinTable, on); + + this.joinTable = joinTable; + this.type = type; + this.on = on; + } + + /** + * @return join type. + */ + public JoinType getType() { + return type; + } + + /** + * @return the joined {@link Table}. + */ + public TableLike getJoinTable() { + return joinTable; + } + + /** + * @return join condition (the ON or USING part). + */ + public Condition getOn() { + return on; + } + + @Override + public String toString() { + return type + " " + joinTable + " ON " + on; + } + + public enum JoinType { + + /** + * {@code INNER JOIN} for two tables. + */ + + JOIN("JOIN"), + + /** + * {@code CROSS JOIN} for two tables. + */ + + CROSS_JOIN("CROSS JOIN"), + + /** + * {@code LEFT OUTER JOIN} two tables. + */ + + LEFT_OUTER_JOIN("LEFT OUTER JOIN"), + + /** + * {@code RIGHT OUTER JOIN} two tables. + */ + + RIGHT_OUTER_JOIN("RIGHT OUTER JOIN"), + + /** + * {@code FULL OUTER JOIN} two tables. + */ + + FULL_OUTER_JOIN("FULL OUTER JOIN"); + + private final String sql; + + JoinType(String sql) { + this.sql = sql; + } + + public String getSql() { + return sql; + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Like.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Like.java new file mode 100644 index 0000000000..425121dba5 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Like.java @@ -0,0 +1,86 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * LIKE {@link Condition} comparing two {@link Expression}s. + *

+ * Results in a rendered condition: {@code LIKE }. + *

+ * @author Mark Paluch + * @author Meng Zuozhu + * @since 1.1 + */ +public class Like extends AbstractSegment implements Condition { + + private final Expression left; + private final Expression right; + private final boolean negated; + + private Like(Expression left, Expression right, boolean negated) { + + super(left, right); + + this.left = left; + this.right = right; + this.negated = negated; + } + + /** + * Creates a new {@link Like} {@link Condition} given two {@link Expression}s. + * + * @param leftColumnOrExpression the left {@link Expression}. + * @param rightColumnOrExpression the right {@link Expression}. + * @return the {@link Like} condition. + */ + public static Like create(Expression leftColumnOrExpression, Expression rightColumnOrExpression) { + + Assert.notNull(leftColumnOrExpression, "Left expression must not be null"); + Assert.notNull(rightColumnOrExpression, "Right expression must not be null"); + + return new Like(leftColumnOrExpression, rightColumnOrExpression, false); + } + + /** + * @return the left {@link Expression}. + */ + public Expression getLeft() { + return left; + } + + /** + * @return the right {@link Expression}. + */ + public Expression getRight() { + return right; + } + + public boolean isNegated() { + return negated; + } + + @Override + public Like not() { + return new Like(this.left, this.right, !negated); + } + + @Override + public String toString() { + return left + (negated ? " NOT" : "") + " LIKE " + right; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Literal.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Literal.java new file mode 100644 index 0000000000..d9c46e6cfc --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Literal.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.Nullable; + +/** + * Represents a literal. + * + * @author Mark Paluch + * @since 1.1 + */ +public class Literal extends AbstractSegment implements Expression { + + private @Nullable T content; + + Literal(@Nullable T content) { + this.content = content; + } + + /** + * @return the content of the literal. + */ + @Nullable + public T getContent() { + return content; + } + + @Override + public String toString() { + + if (this.content == null) { + return "NULL"; + } + + return content.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/LockMode.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/LockMode.java new file mode 100644 index 0000000000..1086412065 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/LockMode.java @@ -0,0 +1,27 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Lock Mode Types of SELECT statements. + * + * @author Myeonghyeon Lee + * @since 2.0 + */ +public enum LockMode { + + PESSIMISTIC_READ, PESSIMISTIC_WRITE +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/LockOptions.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/LockOptions.java new file mode 100644 index 0000000000..6179667189 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/LockOptions.java @@ -0,0 +1,47 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * Value object providing lock options to apply to a {@link Select} statement. + * + * @author Myeonghyeon Lee + * @since 2.0 + */ +public class LockOptions { + + private final LockMode lockMode; + private final From from; + + public LockOptions(LockMode lockMode, From from) { + + Assert.notNull(lockMode, "LockMode must not be null"); + Assert.notNull(from, "From must not be null"); + + this.lockMode = lockMode; + this.from = from; + } + + public LockMode getLockMode() { + return this.lockMode; + } + + public From getFrom() { + return this.from; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/MultipleCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/MultipleCondition.java new file mode 100644 index 0000000000..41b8b02caf --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/MultipleCondition.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Arrays; +import java.util.List; +import java.util.StringJoiner; + +/** + * Wrapper for multiple {@link Condition}s. + * + * @author Jens Schauder + * @since 1.1 + */ +public abstract class MultipleCondition extends AbstractSegment implements Condition { + + private final List conditions; + private final String delimiter; + + MultipleCondition(String delimiter, Condition... conditions) { + + super(conditions); + + this.delimiter = delimiter; + this.conditions = Arrays.asList(conditions); + } + + public List getConditions() { + return conditions; + } + + @Override + public String toString() { + + StringJoiner joiner = new StringJoiner(delimiter); + conditions.forEach(c -> joiner.add(c.toString())); + return joiner.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Named.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Named.java new file mode 100644 index 0000000000..0324bd8978 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Named.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Named element exposing a {@link #getName() name}. + * + * @author Mark Paluch + * @since 1.1 + */ +public interface Named { + + /** + * @return the name of the underlying element. + */ + SqlIdentifier getName(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/NestedCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/NestedCondition.java new file mode 100644 index 0000000000..9c3711bc10 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/NestedCondition.java @@ -0,0 +1,34 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Condition group wrapping a nested {@link Condition} with parentheses. + * + * @author Mark Paluch + * @since 2.0 + */ +public class NestedCondition extends MultipleCondition implements Condition { + + NestedCondition(Condition condition) { + super("", condition); + } + + @Override + public String toString() { + return "(" + super.toString() + ")"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Not.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Not.java new file mode 100644 index 0000000000..f6d2f99d92 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Not.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * @author Jens Schauder + * @since 1.1 + */ +public class Not extends AbstractSegment implements Condition { + + private final Condition condition; + + Not(Condition condition) { + + super(condition); + + this.condition = condition; + } + + @Override + public Condition not() { + return condition; + } + + @Override + public String toString() { + return "NOT " + condition; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/NumericLiteral.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/NumericLiteral.java new file mode 100644 index 0000000000..743201e5e1 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/NumericLiteral.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.Nullable; + +/** + * Represents a {@link Number} literal. + * + * @author Mark Paluch + * @since 1.1 + */ +public class NumericLiteral extends Literal { + + NumericLiteral(@Nullable Number content) { + super(content); + } + + @Override + @Nullable + public Number getContent() { + return super.getContent(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrCondition.java new file mode 100644 index 0000000000..aef90388c8 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrCondition.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * {@link Condition} representing an {@code OR} relation between two {@link Condition}s. + * + * @author Mark Paluch + * @since 1.1 + * @see Condition#or(Condition) + */ +public class OrCondition extends MultipleCondition { + + OrCondition(Condition... conditions) { + super(" OR ", conditions); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrderBy.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrderBy.java new file mode 100644 index 0000000000..b8ba6e0251 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrderBy.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Represents an `ORDER BY` clause. Currently, only used in {@link AnalyticFunction}. + * + * @author Jens Schauder + * @since 2.7 + */ +public class OrderBy extends SegmentList { + + OrderBy(OrderByField... fields) { + super(fields); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrderByField.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrderByField.java new file mode 100644 index 0000000000..2044366440 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/OrderByField.java @@ -0,0 +1,116 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.NullHandling; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Represents a field in the {@code ORDER BY} clause. + * + * @author Mark Paluch + * @author Milan Milanov + * @since 1.1 + */ +public class OrderByField extends AbstractSegment { + + private final Expression expression; + private final @Nullable Sort.Direction direction; + private final Sort.NullHandling nullHandling; + + private OrderByField(Expression expression, @Nullable Direction direction, NullHandling nullHandling) { + + super(expression); + Assert.notNull(expression, "Order by expression must not be null"); + Assert.notNull(nullHandling, "NullHandling by expression must not be null"); + + this.expression = expression; + this.direction = direction; + this.nullHandling = nullHandling; + } + + /** + * Creates a new {@link OrderByField} from an {@link Expression} applying default ordering. + * + * @param expression must not be {@literal null}. + * @return the {@link OrderByField}. + */ + public static OrderByField from(Expression expression) { + return new OrderByField(expression, null, NullHandling.NATIVE); + } + + /** + * Creates a new {@link OrderByField} from an {@link Expression} applying a given ordering. + * + * @param expression must not be {@literal null}. + * @param direction order direction + * @return the {@link OrderByField}. + */ + public static OrderByField from(Expression expression, Direction direction) { + return new OrderByField(expression, direction, NullHandling.NATIVE); + } + + /** + * Creates a new {@link OrderByField} from a the current one using ascending sorting. + * + * @return the new {@link OrderByField} with ascending sorting. + * @see #desc() + */ + public OrderByField asc() { + return new OrderByField(expression, Direction.ASC, nullHandling); + } + + /** + * Creates a new {@link OrderByField} from a the current one using descending sorting. + * + * @return the new {@link OrderByField} with descending sorting. + * @see #asc() + */ + public OrderByField desc() { + return new OrderByField(expression, Direction.DESC, nullHandling); + } + + /** + * Creates a new {@link OrderByField} with {@link NullHandling} applied. + * + * @param nullHandling must not be {@literal null}. + * @return the new {@link OrderByField} with {@link NullHandling} applied. + */ + public OrderByField withNullHandling(NullHandling nullHandling) { + return new OrderByField(expression, direction, nullHandling); + } + + public Expression getExpression() { + return expression; + } + + @Nullable + public Direction getDirection() { + return direction; + } + + public NullHandling getNullHandling() { + return nullHandling; + } + + @Override + public String toString() { + return direction != null ? expression + " " + direction : expression.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SQL.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SQL.java new file mode 100644 index 0000000000..22dabaf31f --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SQL.java @@ -0,0 +1,135 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.data.relational.core.sql.BindMarker.NamedBindMarker; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Utility to create SQL {@link Segment}s. Typically used as entry point to the Statement Builder. Objects and dependent + * objects created by the Query AST are immutable except for builders. + *

+ * The Statement Builder API is intended for framework usage to produce SQL required for framework operations. + *

+ * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see Expressions + * @see Conditions + * @see Functions + * @see StatementBuilder + */ +public abstract class SQL { + + /** + * Creates a new {@link Column} associated with a source {@link Table}. + * + * @param name column name, must not be {@literal null} or empty. + * @param table table name, must not be {@literal null}. + * @return the column with {@code name} associated with {@link Table}. + */ + public static Column column(String name, Table table) { + return Column.create(name, table); + } + + /** + * Creates a new {@link Table}. + * + * @param name table name, must not be {@literal null} or empty. + * @return the column with {@code name}. + */ + public static Table table(String name) { + return Table.create(name); + } + + /** + * Creates a new parameter bind marker. + * + * @return a new {@link BindMarker}. + */ + public static BindMarker bindMarker() { + return new BindMarker(); + } + + /** + * Creates a new parameter bind marker associated with a {@code name} hint. + * + * @param name name hint, must not be {@literal null} or empty. + * @return a new {@link BindMarker}. + */ + public static BindMarker bindMarker(String name) { + + Assert.hasText(name, "Name must not be null or empty"); + + return new NamedBindMarker(name); + } + + /** + * Creates a new {@link BooleanLiteral} rendering either {@code TRUE} or {@literal FALSE} depending on the given + * {@code value}. + * + * @param value the literal content. + * @return a new {@link BooleanLiteral}. + * @since 2.0 + */ + public static BooleanLiteral literalOf(boolean value) { + return new BooleanLiteral(value); + } + + /** + * Creates a new {@link StringLiteral} from the {@code content}. + * + * @param content the literal content. + * @return a new {@link StringLiteral}. + */ + public static StringLiteral literalOf(@Nullable CharSequence content) { + return new StringLiteral(content); + } + + /** + * Creates a new {@link NumericLiteral} from the {@code content}. + * + * @param content the literal content. + * @return a new {@link NumericLiteral}. + */ + public static NumericLiteral literalOf(@Nullable Number content) { + return new NumericLiteral(content); + } + + /** + * Creates a new {@link Literal} from the {@code content}. + * + * @param content the literal content. + * @return a new {@link Literal}. + */ + public static Literal literalOf(@Nullable T content) { + return new Literal<>(content); + } + + /** + * Creates a new {@code NULL} {@link Literal}. + * + * @return a new {@link Literal}. + */ + public static Literal nullLiteral() { + return new Literal<>(null); + } + + // Utility constructor. + private SQL() {} +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Segment.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Segment.java new file mode 100644 index 0000000000..ddc9b7210c --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Segment.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Supertype of all Abstract Syntax Tree (AST) segments. Segments are typically immutable and mutator methods return new + * instances instead of changing the called instance. + * + * @author Mark Paluch + * @since 1.1 + */ +public interface Segment extends Visitable { + + /** + * Check whether this {@link Segment} is equal to another {@link Segment}. + *

+ * Equality is typically given if the {@link #toString()} representation matches. + *

+ * + * @param other the reference object with which to compare. + * @return {@literal true} if this object is the same as the {@code other} argument; {@literal false} otherwise. + */ + @Override + boolean equals(Object other); + + /** + * Generate a hash code from this{@link Segment}. + *

+ * Hashcode typically derives from the {@link #toString()} representation so two {@link Segment}s yield the same + * {@link #hashCode()} if their {@link #toString()} representation matches. + *

+ * + * @return a hash code value for this object. + */ + @Override + int hashCode(); + + /** + * Return a SQL string representation of this {@link Segment}. + *

+ * The representation is intended for debugging purposes and an approximation to the generated SQL. While it might + * work in the context of a specific dialect, you should not assume that the {@link #toString()} representation works across + * multiple databases. + *

+ * + * @return a SQL string representation of this {@link Segment}. + */ + @Override + String toString(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SegmentList.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SegmentList.java new file mode 100644 index 0000000000..1a2b55c95f --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SegmentList.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * A list of {@link Segment} instances. Normally used by inheritance to derive a more specific list. + * + * @see org.springframework.data.relational.core.sql.AnalyticFunction.Partition + * @see OrderBy + * @param the type of the elements. + */ +public class SegmentList extends AbstractSegment { + SegmentList(T... segments) { + super(segments); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Select.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Select.java new file mode 100644 index 0000000000..b04da5cf82 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Select.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.List; +import java.util.OptionalLong; + +import org.springframework.lang.Nullable; + +/** + * AST for a {@code SELECT} statement. Visiting order: + *
    + *
  1. Self
  2. + *
  3. {@link Column SELECT columns}
  4. + *
  5. {@link Table FROM tables} clause
  6. + *
  7. {@link Join JOINs}
  8. + *
  9. {@link Condition WHERE} condition
  10. + *
  11. {@link OrderByField ORDER BY fields}
  12. + *
+ * + * @author Mark Paluch + * @author Myeonghyeon Lee + * @since 1.1 + * @see StatementBuilder + * @see SelectBuilder + * @see SQL + */ +public interface Select extends Segment, Visitable { + + /** + * Creates a new {@link SelectBuilder}. + * + * @return a new {@link SelectBuilder}. + */ + static SelectBuilder builder() { + return new DefaultSelectBuilder(); + } + + From getFrom(); + + /** + * @return the {@link List} of {@link OrderByField ORDER BY} fields. + */ + List getOrderBy(); + + /** + * Optional limit. Used for limit/offset paging. + * + * @return + */ + OptionalLong getLimit(); + + /** + * Optional offset. Used for limit/offset paging. + * + * @return + */ + OptionalLong getOffset(); + + /** + * Flag if this select is to return distinct rows. + * + * @return + */ + boolean isDistinct(); + + @Nullable + LockMode getLockMode(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectBuilder.java new file mode 100644 index 0000000000..8552d09a8b --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectBuilder.java @@ -0,0 +1,602 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Collection; + +/** + * Entry point to construct a {@link Select} statement. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @since 1.1 + * @see StatementBuilder + */ +public interface SelectBuilder { + + /** + * Apply a {@code TOP} clause given {@code count}. + * + * @param count the top count. + * @return {@code this} {@link SelectBuilder}. + */ + SelectBuilder top(int count); + + /** + * Include a {@link Expression} in the select list. + * + * @param expression the expression to include. + * @return {@code this} builder. + * @see Table#column(String) + */ + SelectAndFrom select(Expression expression); + + /** + * Include one or more {@link Expression}s in the select list. + * + * @param expressions the expressions to include. + * @return {@code this} builder. + * @see Table#columns(String...) + */ + SelectAndFrom select(Expression... expressions); + + /** + * Include one or more {@link Expression}s in the select list. + * + * @param expressions the expressions to include. + * @return {@code this} builder. + * @see Table#columns(String...) + */ + SelectAndFrom select(Collection expressions); + + /** + * Makes the select statement distinct + * + * @return {@code this} builder. + */ + SelectAndFrom distinct(); + + /** + * Builder exposing {@code SELECT} and {@code FROM} methods. + */ + interface SelectAndFrom extends SelectFrom { + + /** + * Include a {@link Expression} in the select list. Multiple calls to this or other {@code select} methods keep + * adding items to the select list and do not replace previously contained items. + * + * @param expression the expression to include. + * @return {@code this} builder. + * @see Table#column(String) + */ + SelectFrom select(Expression expression); + + /** + * Include one or more {@link Expression}s in the select list. Multiple calls to this or other {@code select} + * methods keep adding items to the select list and do not replace previously contained items. + * + * @param expressions the expressions to include. + * @return {@code this} builder. + * @see Table#columns(String...) + */ + SelectFrom select(Expression... expressions); + + /** + * Include one or more {@link Expression}s in the select list. Multiple calls to this or other {@code select} + * methods keep adding items to the select list and do not replace previously contained items. + * + * @param expressions the expressions to include. + * @return {@code this} builder. + * @see Table#columns(String...) + */ + SelectFrom select(Collection expressions); + + /** + * Makes the select statement distinct + * + * @return {@code this} builder. + */ + SelectAndFrom distinct(); + + /** + * Declare a {@link Table} to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods keep + * adding items to the select list and do not replace previously contained items. + * + * @param table the table name to {@code SELECT … FROM} must not be {@literal null} or empty. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(String table); + + /** + * Declare a {@link Table} to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods keep + * adding items to the select list and do not replace previously contained items. + * + * @param table the table to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(TableLike table); + + /** + * Declare one or more {@link Table}s to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods + * keep adding items to the select list and do not replace previously contained items. + * + * @param tables the tables to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(TableLike... tables); + + /** + * Declare one or more {@link Table}s to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods + * keep adding items to the select list and do not replace previously contained items. + * + * @param tables the tables to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(Collection tables); + } + + /** + * Builder exposing {@code FROM} methods. + */ + interface SelectFrom extends BuildSelect { + + /** + * Declare a {@link Table} to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods keep + * adding items to the select list and do not replace previously contained items. + * + * @param table the table name to {@code SELECT … FROM} must not be {@literal null} or empty. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + SelectFromAndOrderBy from(String table); + + /** + * Declare a {@link Table} to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods keep + * adding items to the select list and do not replace previously contained items. + * + * @param table the table to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + SelectFromAndOrderBy from(TableLike table); + + /** + * Declare one or more {@link Table}s to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods + * keep adding items to the select list and do not replace previously contained items. + * + * @param tables the tables to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + SelectFromAndOrderBy from(TableLike... tables); + + /** + * Declare one or more {@link Table}s to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods + * keep adding items to the select list and do not replace previously contained items. + * + * @param tables the tables to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + SelectFromAndOrderBy from(Collection tables); + } + + /** + * Builder exposing {@code FROM}, {@code JOIN}, {@code WHERE}, {@code LIMIT/OFFSET} and {@code LOCK} methods. + */ + interface SelectFromAndOrderBy extends SelectFrom, SelectOrdered, SelectLimitOffset, SelectLock, BuildSelect { + + @Override + SelectFromAndOrderBy limitOffset(long limit, long offset); + + @Override + SelectFromAndOrderBy limit(long limit); + + @Override + SelectFromAndOrderBy offset(long offset); + + @Override + SelectFromAndOrderBy from(String table); + + @Override + SelectFromAndOrderBy from(TableLike table); + + @Override + SelectFromAndOrderBy from(TableLike... tables); + + @Override + SelectFromAndOrderBy from(Collection tables); + + @Override + SelectFromAndOrderBy orderBy(Expression... columns); + + @Override + SelectFromAndOrderBy orderBy(OrderByField... orderByFields); + + @Override + SelectFromAndOrderBy orderBy(Collection orderByFields); + } + + /** + * Builder exposing {@code FROM}, {@code JOIN}, {@code WHERE}, {@code LIMIT/OFFSET} and {@code LOCK} methods. + */ + interface SelectFromAndJoin + extends SelectFromAndOrderBy, BuildSelect, SelectJoin, SelectWhere, SelectLimitOffset, SelectLock { + + /** + * Declare a {@link Table} to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods keep + * adding items to the select list and do not replace previously contained items. + * + * @param table the table to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(TableLike table); + + /** + * Declare one or more {@link Table}s to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods + * keep adding items to the select list and do not replace previously contained items. + * + * @param tables the tables to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(TableLike... tables); + + /** + * Declare one or more {@link Table}s to {@code SELECT … FROM}. Multiple calls to this or other {@code from} methods + * keep adding items to the select list and do not replace previously contained items. + * + * @param tables the tables to {@code SELECT … FROM} must not be {@literal null}. + * @return {@code this} builder. + * @see From + * @see SQL#table(String) + */ + @Override + SelectFromAndJoin from(Collection tables); + + /** + * Apply {@code limit} and {@code offset} parameters to the select statement. To read the first 20 rows from start + * use {@code limitOffset(20, 0)}. to read the next 20 use {@code limitOffset(20, 20)}. + * + * @param limit rows to read. + * @param offset row offset, zero-based. + * @return {@code this} builder. + */ + @Override + SelectFromAndJoin limitOffset(long limit, long offset); + + /** + * Apply a limit of rows to read. + * + * @param limit rows to read. + * @return {@code this} builder. + */ + @Override + SelectFromAndJoin limit(long limit); + + /** + * Apply an offset where to start reading rows. + * + * @param offset start offset. + * @return {@code this} builder. + */ + @Override + SelectFromAndJoin offset(long offset); + } + + /** + * Builder exposing {@code FROM}, {@code WHERE}, {@code LIMIT/OFFSET}, JOIN {@code AND} and {@code LOCK} continuation + * methods. + */ + interface SelectFromAndJoinCondition + extends BuildSelect, SelectJoin, SelectWhere, SelectOnCondition, SelectLimitOffset, SelectLock { + + /** + * Apply {@code limit} and {@code offset} parameters to the select statement. To read the first 20 rows from start + * use {@code limitOffset(20, 0)}. to read the next 20 use {@code limitOffset(20, 20)}. + * + * @param limit rows to read. + * @param offset row offset, zero-based. + * @return {@code this} builder. + */ + @Override + SelectFromAndJoin limitOffset(long limit, long offset); + + /** + * Apply a limit of rows to read. + * + * @param limit rows to read. + * @return {@code this} builder. + */ + @Override + SelectFromAndJoin limit(long limit); + + /** + * Apply an offset where to start reading rows. + * + * @param offset start offset. + * @return {@code this} builder. + */ + @Override + SelectFromAndJoin offset(long offset); + } + + /** + * Limit/offset methods. + */ + interface SelectLimitOffset { + + /** + * Apply {@code limit} and {@code offset} parameters to the select statement. To read the first 20 rows from start + * use {@code limitOffset(20, 0)}. to read the next 20 use {@code limitOffset(20, 20)}. + * + * @param limit rows to read. + * @param offset row offset, zero-based. + * @return {@code this} builder. + */ + SelectLimitOffset limitOffset(long limit, long offset); + + /** + * Apply a limit of rows to read. + * + * @param limit rows to read. + * @return {@code this} builder. + */ + SelectLimitOffset limit(long limit); + + /** + * Apply an offset where to start reading rows. + * + * @param offset start offset. + * @return {@code this} builder. + */ + SelectLimitOffset offset(long offset); + } + + /** + * Builder exposing {@code ORDER BY} and {@code LOCK} methods. + */ + interface SelectOrdered extends SelectLock, BuildSelect { + + /** + * Add one or more {@link Column columns} to order by. + * + * @param columns the columns to order by. + * @return {@code this} builder. + */ + SelectOrdered orderBy(Expression... columns); + + /** + * Add one or more {@link OrderByField order by fields}. + * + * @param orderByFields the fields to order by. + * @return {@code this} builder. + */ + SelectOrdered orderBy(OrderByField... orderByFields); + + /** + * Add one or more {@link OrderByField order by fields}. + * + * @param orderByFields the fields to order by. + * @return {@code this} builder. + */ + SelectOrdered orderBy(Collection orderByFields); + } + + /** + * Interface exposing {@code WHERE}, {@code LOCK} methods. + */ + interface SelectWhere extends SelectOrdered, SelectLock, BuildSelect { + + /** + * Apply a {@code WHERE} clause. + * + * @param condition the {@code WHERE} condition. + * @return {@code this} builder. + * @see Where + * @see Condition + */ + SelectWhereAndOr where(Condition condition); + } + + /** + * Interface exposing {@code AND}/{@code OR} combinator methods for {@code WHERE} {@link Condition}s. + */ + interface SelectWhereAndOr extends SelectOrdered, SelectLock, BuildSelect { + + /** + * Combine the previous {@code WHERE} {@link Condition} using {@code AND}. + * + * @param condition the condition, must not be {@literal null}. + * @return {@code this} builder. + * @see Condition#and(Condition) + */ + SelectWhereAndOr and(Condition condition); + + /** + * Combine the previous {@code WHERE} {@link Condition} using {@code OR}. + * + * @param condition the condition, must not be {@literal null}. + * @return {@code this} builder. + * @see Condition#or(Condition) + */ + SelectWhereAndOr or(Condition condition); + } + + /** + * Interface exposing {@code JOIN} methods. + */ + interface SelectJoin extends SelectLock, BuildSelect { + + /** + * Declare a {@code JOIN} {@code table}. + * + * @param table name of the table, must not be {@literal null} or empty. + * @return {@code this} builder. + * @see Join + * @see SQL#table(String) + */ + SelectOn join(String table); + + /** + * Declare a {@code JOIN} {@link Table}. + * + * @param table name of the table, must not be {@literal null}. + * @return {@code this} builder. + * @see Join + * @see SQL#table(String) + */ + SelectOn join(TableLike table); + + /** + * Declare a {@code LEFT OUTER JOIN} {@link Table}. + * + * @param table must not be {@literal null}. + * @return {@code this} builder. + * @see Join + * @see SQL#table(String) + */ + SelectOn leftOuterJoin(TableLike table); + + /** + * Declare a join, where the join type ({@code INNER}, {@code LEFT OUTER}, {@code RIGHT OUTER}, {@code FULL OUTER}) + * is specified by an extra argument. + * + * @param table the table to join. Must not be {@literal null}. + * @param joinType the type of join. Must not be {@literal null}. + * @return {@code this} builder. + */ + SelectOn join(TableLike table, Join.JoinType joinType); + } + + /** + * Interface exposing {@code ON} methods to declare {@code JOIN} relationships. + */ + interface SelectOn { + + /** + * Declare the source column in the {@code JOIN}. + * + * @param column the source column, must not be {@literal null} or empty. + * @return {@code this} builder. + * @see Table#column(String) + */ + SelectOnConditionComparison on(Expression column); + + /** + * Declare a join {@link Condition condition} in one step. Using conditions allows more flexibility in comparison to + * {@link #on(Expression)} which only allows for equality comparisons chained together with {@code AND}. + * + * @param condition must not be {@literal null}. + * @return {@code this} builder. + * @see Conditions + * @since 2.3 + */ + SelectFromAndJoinCondition on(Condition condition); + } + + /** + * Interface declaring the target column comparison relationship. + */ + interface SelectOnConditionComparison { + + /** + * Declare an equals {@link Condition} between the source column and the target {@link Column}. + * + * @param column the target column, must not be {@literal null}. + * @return {@code this} builder. + * @see Table#column(String) + */ + SelectFromAndJoinCondition equals(Expression column); + } + + /** + * Builder exposing JOIN and {@code JOIN … ON} continuation methods. + */ + interface SelectOnCondition extends SelectJoin, SelectLock, BuildSelect { + + /** + * Declare an additional source column in the {@code JOIN}. + * + * @param column the column, must not be {@literal null}. + * @return {@code this} builder. + * @see Table#column(String) + */ + SelectOnConditionComparison and(Expression column); + } + + /** + * Lock methods. + */ + interface SelectLock extends BuildSelect { + + /** + * Apply lock to read. + * + * @param lockMode lockMode to read. + * @return {@code this} builder. + */ + SelectLock lock(LockMode lockMode); + } + + /** + * Interface exposing the {@link Select} build method. + */ + interface BuildSelect { + + /** + * Build the {@link Select} statement and verify basic relationship constraints such as all referenced columns have + * a {@code FROM} or {@code JOIN} table import. + * + * @return the built and immutable {@link Select} statement. + */ + default Select build() { + return build(true); + } + + /** + * Build the {@link Select} statement. + * + * @param validate whether to validate the generated select by checking basic relationship constraints such as all + * referenced columns have a {@code FROM} or {@code JOIN} table import. + * @return the built and immutable {@link Select} statement. + * @since 3.2 + */ + Select build(boolean validate); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectList.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectList.java new file mode 100644 index 0000000000..ac985c5b18 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectList.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.List; + +import org.springframework.util.StringUtils; + +/** + * Value object representing the select list (selected columns, functions). + * + * @author Mark Paluch + * @since 1.1 + */ +public class SelectList extends AbstractSegment { + + private final List selectList; + + SelectList(List selectList) { + super(selectList.toArray(new Expression[0])); + this.selectList = selectList; + } + + @Override + public String toString() { + return StringUtils.collectionToDelimitedString(selectList, ", "); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectValidator.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectValidator.java new file mode 100644 index 0000000000..60fddb8459 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/SelectValidator.java @@ -0,0 +1,140 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.HashSet; +import java.util.Set; +import java.util.Stack; + +/** + * Validator for {@link Select} statements. + *

+ * Validates that all {@link Column}s using a table qualifier have a table import from either the {@code FROM} or + * {@code JOIN} clause. + *

+ * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class SelectValidator extends AbstractImportValidator { + + private final Stack
+ * .*}. + * + * @return the select all marker for this {@link Table}. + */ + default AsteriskFromTable asterisk() { + return new AsteriskFromTable(this); + } + + /** + * @return the table name. + */ + SqlIdentifier getName(); + + /** + * @return the table name as it is used in references. This can be the actual {@link #getName() name} or an + * {@link Aliased#getAlias() alias}. + */ + SqlIdentifier getReferenceName(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/TrueCondition.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/TrueCondition.java new file mode 100644 index 0000000000..f73e9a2f41 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/TrueCondition.java @@ -0,0 +1,34 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Simple condition that evaluates to SQL {@code TRUE}. + * + * @author Mark Paluch + * @since 2.1 + */ +public class TrueCondition implements Condition { + + public static final TrueCondition INSTANCE = new TrueCondition(); + + private TrueCondition() {} + + @Override + public String toString() { + return "1 = 1"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Update.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Update.java new file mode 100644 index 0000000000..e876661fb7 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Update.java @@ -0,0 +1,43 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * AST for aa {@code UPDATE} statement. Visiting order: + *
    + *
  1. Self
  2. + *
  3. {@link Table table}
  4. + *
  5. {@link Assignments assignments}
  6. + *
  7. {@link Where WHERE} condition
  8. + *
+ * + * @author Mark Paluch + * @since 1.1 + * @see StatementBuilder + * @see SelectBuilder + * @see SQL + */ +public interface Update extends Segment, Visitable { + + /** + * Creates a new {@link UpdateBuilder}. + * + * @return a new {@link UpdateBuilder}. + */ + static UpdateBuilder builder() { + return new DefaultUpdateBuilder(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/UpdateBuilder.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/UpdateBuilder.java new file mode 100644 index 0000000000..61f5dd0ba7 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/UpdateBuilder.java @@ -0,0 +1,122 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Collection; + +/** + * Entry point to construct an {@link Update} statement. + * + * @author Mark Paluch + * @since 1.1 + * @see StatementBuilder + */ +public interface UpdateBuilder { + + /** + * Configure the {@link Table} to which the update is applied. + * + * @param table the table to update. + * @return {@code this} {@link SelectBuilder}. + */ + UpdateAssign table(Table table); + + /** + * Interface exposing {@code SET} methods. + */ + interface UpdateAssign { + + /** + * Apply a {@link Assignment SET assignment}. + * + * @param assignment a single {@link Assignment column assignment}. + * @return {@code this} builder. + * @see Assignment + */ + UpdateWhere set(Assignment assignment); + + /** + * Apply one or more {@link Assignment SET assignments}. + * + * @param assignments the {@link Assignment column assignments}. + * @return {@code this} builder. + * @see Assignment + */ + UpdateWhere set(Assignment... assignments); + + /** + * Apply one or more {@link Assignment SET assignments}. + * + * @param assignments the {@link Assignment column assignments}. + * @return {@code this} builder. + * @see Assignment + */ + UpdateWhere set(Collection assignments); + } + + /** + * Interface exposing {@code WHERE} methods. + */ + interface UpdateWhere extends BuildUpdate { + + /** + * Apply a {@code WHERE} clause. + * + * @param condition the {@code WHERE} condition. + * @return {@code this} builder. + * @see Where + * @see Condition + */ + UpdateWhereAndOr where(Condition condition); + } + + /** + * Interface exposing {@code AND}/{@code OR} combinator methods for {@code WHERE} {@link Condition}s. + */ + interface UpdateWhereAndOr extends BuildUpdate { + + /** + * Combine the previous {@code WHERE} {@link Condition} using {@code AND}. + * + * @param condition the condition, must not be {@literal null}. + * @return {@code this} builder. + * @see Condition#and(Condition) + */ + UpdateWhereAndOr and(Condition condition); + + /** + * Combine the previous {@code WHERE} {@link Condition} using {@code OR}. + * + * @param condition the condition, must not be {@literal null}. + * @return {@code this} builder. + * @see Condition#or(Condition) + */ + UpdateWhereAndOr or(Condition condition); + } + + /** + * Interface exposing the {@link Update} build method. + */ + interface BuildUpdate { + + /** + * Build the {@link Update}. + * + * @return the build and immutable {@link Update} statement. + */ + Update build(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Values.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Values.java new file mode 100644 index 0000000000..20bd773c87 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Values.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.util.StringUtils; + +/** + * {@code VALUES} clause. + * + * @author Mark Paluch + * @since 1.1 + */ +public class Values extends AbstractSegment { + + private final List tables; + + Values(Expression... tables) { + this(Arrays.asList(tables)); + } + + Values(List expressions) { + + super(expressions.toArray(new Expression[0])); + + this.tables = expressions; + } + + @Override + public String toString() { + return "VALUES(" + StringUtils.collectionToDelimitedString(tables, ", ") + ")"; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Visitable.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Visitable.java new file mode 100644 index 0000000000..ac31f38937 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Visitable.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.springframework.util.Assert; + +/** + * Interface for implementations that wish to be visited by a {@link Visitor}. + * + * @author Mark Paluch + * @since 1.1 + * @see Visitor + */ +public interface Visitable { + + /** + * Accept a {@link Visitor} visiting this {@link Visitable} and its nested {@link Visitable}s if applicable. + * + * @param visitor the visitor to notify, must not be {@literal null}. + */ + default void visit(Visitor visitor) { + + Assert.notNull(visitor, "Visitor must not be null"); + + visitor.enter(this); + visitor.leave(this); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Visitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Visitor.java new file mode 100644 index 0000000000..5c12a99d51 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Visitor.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * AST {@link Segment} visitor. Visitor methods get called by segments on entering a {@link Visitable}, their child + * {@link Visitable}s and on leaving the {@link Visitable}. + * + * @author Mark Paluch + * @since 1.1 + */ +@FunctionalInterface +public interface Visitor { + + /** + * Enter a {@link Visitable}. + * + * @param segment the segment to visit. + */ + void enter(Visitable segment); + + /** + * Leave a {@link Visitable}. + * + * @param segment the visited segment. + */ + default void leave(Visitable segment) {} +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/When.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/When.java new file mode 100644 index 0000000000..43ea343162 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/When.java @@ -0,0 +1,54 @@ +package org.springframework.data.relational.core.sql; + +/** + * When segment for Case statement. + *

+ * Results in a rendered condition: {@code WHEN THEN }. + *

+ * + * @author Sven Rienstra + * @since 3.4 + */ +public class When extends AbstractSegment { + + private final Condition condition; + private final Expression value; + + private When(Condition condition, Expression value) { + + super(condition, value); + + this.condition = condition; + this.value = value; + } + + /** + * Creates a new {@link When} given two {@link Expression} condition and {@link Literal} value. + * + * @param condition the condition {@link Expression}. + * @param value the {@link Literal} value. + * @return the {@link When}. + */ + public static When when(Condition condition, Expression value) { + return new When(condition, value); + } + + /** + * @return the condition + */ + public Condition getCondition() { + return condition; + } + + /** + * @return the value + */ + public Expression getValue() { + return value; + } + + @Override + public String toString() { + return "WHEN " + condition + " THEN " + value; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Where.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Where.java new file mode 100644 index 0000000000..c9860b62e2 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/Where.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * {@code Where} clause. + * + * @author Mark Paluch + * @since 1.1 + */ +public class Where extends AbstractSegment { + + private final Condition condition; + + Where(Condition condition) { + + super(condition); + + this.condition = condition; + } + + @Override + public String toString() { + return "WHERE " + condition; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/package-info.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/package-info.java new file mode 100644 index 0000000000..7332ca9a79 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/package-info.java @@ -0,0 +1,17 @@ + +/** + * Statement Builder implementation. Use {@link org.springframework.data.relational.core.sql.StatementBuilder} to create + * statements and {@link org.springframework.data.relational.core.sql.SQL} to create SQL objects. Objects and dependent + * objects created by the Statement Builder are immutable except for builders. + *

+ * The Statement Builder API is intended for framework usage to produce SQL required for framework operations. + *

+ * + * @since 1.1 + */ +@NonNullApi +@NonNullFields +package org.springframework.data.relational.core.sql; + +import org.springframework.lang.NonNullApi; +import org.springframework.lang.NonNullFields; diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/AnalyticFunctionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/AnalyticFunctionVisitor.java new file mode 100644 index 0000000000..c14e04b277 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/AnalyticFunctionVisitor.java @@ -0,0 +1,102 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.AnalyticFunction; +import org.springframework.data.relational.core.sql.OrderBy; +import org.springframework.data.relational.core.sql.SimpleFunction; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * Renderer for {@link AnalyticFunction}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Jens Schauder + * @since 2.7 + */ +class AnalyticFunctionVisitor extends TypedSingleConditionRenderSupport implements PartRenderer { + + private final StringBuilder part = new StringBuilder(); + private final RenderContext context; + @Nullable private PartRenderer delegate; + private boolean addSpace = false; + + AnalyticFunctionVisitor(RenderContext context) { + super(context); + this.context = context; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof SimpleFunction) { + + delegate = new SimpleFunctionVisitor(context); + return Delegation.delegateTo((DelegatingVisitor) delegate); + } + + if (segment instanceof AnalyticFunction.Partition) { + + delegate = new SegmentListVisitor("PARTITION BY ", ", ", new ExpressionVisitor(context)); + return Delegation.delegateTo((DelegatingVisitor) delegate); + } + + if (segment instanceof OrderBy) { + + delegate = new SegmentListVisitor("ORDER BY ", ", ", new OrderByClauseVisitor(context)); + return Delegation.delegateTo((DelegatingVisitor) delegate); + } + return super.enterNested(segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (delegate instanceof SimpleFunctionVisitor) { + + part.append(delegate.getRenderedPart()); + part.append(" OVER("); + } + + if (delegate instanceof SegmentListVisitor) { + + final CharSequence renderedPart = delegate.getRenderedPart(); + if (renderedPart.length() != 0) { + + if (addSpace) { + part.append(' '); + } + part.append(renderedPart); + addSpace = true; + } + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(AnalyticFunction segment) { + + part.append(")"); + + return super.leaveMatched(segment); + } + + @Override + public CharSequence getRenderedPart() { + return part; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/AssignmentVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/AssignmentVisitor.java new file mode 100644 index 0000000000..167de15662 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/AssignmentVisitor.java @@ -0,0 +1,83 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Assignment; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * {@link org.springframework.data.relational.core.sql.Visitor} rendering {@link Assignment}. Uses a + * {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @since 1.1 + * @see Assignment + */ +class AssignmentVisitor extends TypedSubtreeVisitor { + + private final ColumnVisitor columnVisitor; + private final ExpressionVisitor expressionVisitor; + private final RenderTarget target; + private final StringBuilder part = new StringBuilder(); + + AssignmentVisitor(RenderContext context, RenderTarget target) { + this.columnVisitor = new ColumnVisitor(context, false, part::append); + this.expressionVisitor = new ExpressionVisitor(context); + this.target = target; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Column) { + return Delegation.delegateTo(columnVisitor); + } + + if (segment instanceof Expression) { + return Delegation.delegateTo(expressionVisitor); + } + + throw new IllegalStateException("Cannot provide visitor for " + segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (segment instanceof Column) { + if (part.length() != 0) { + part.append(" = "); + } + return super.leaveNested(segment); + } + + if (segment instanceof Expression) { + part.append(expressionVisitor.getRenderedPart()); + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Assignment segment) { + + target.onRendered(new StringBuilder(part)); + part.setLength(0); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/BetweenVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/BetweenVisitor.java new file mode 100644 index 0000000000..c43eefad1c --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/BetweenVisitor.java @@ -0,0 +1,111 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Between; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * {@link org.springframework.data.relational.core.sql.Visitor} rendering comparison {@link Condition}. Uses a + * {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @see Between + * @since 2.0 + */ +class BetweenVisitor extends FilteredSubtreeVisitor { + + private final Between between; + private final RenderContext context; + private final RenderTarget target; + private final StringBuilder part = new StringBuilder(); + private boolean renderedTestExpression = false; + private boolean renderedPreamble = false; + private boolean done = false; + private @Nullable PartRenderer current; + + BetweenVisitor(Between condition, RenderContext context, RenderTarget target) { + super(it -> it == condition); + this.between = condition; + this.context = context; + this.target = target; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Expression) { + ExpressionVisitor visitor = new ExpressionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof Condition) { + ConditionVisitor visitor = new ConditionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + throw new IllegalStateException("Cannot provide visitor for " + segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (current != null && !done) { + + if (renderedPreamble) { + + part.append(" AND "); + part.append(current.getRenderedPart()); + done = true; + } + + if (renderedTestExpression && !renderedPreamble) { + + part.append(' '); + + if (between.isNegated()) { + part.append("NOT "); + } + + part.append("BETWEEN "); + renderedPreamble = true; + part.append(current.getRenderedPart()); + } + + if (!renderedTestExpression) { + part.append(current.getRenderedPart()); + renderedTestExpression = true; + } + + current = null; + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Visitable segment) { + + target.onRendered(part); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/CaseExpressionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/CaseExpressionVisitor.java new file mode 100644 index 0000000000..8e66ccb7ba --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/CaseExpressionVisitor.java @@ -0,0 +1,59 @@ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.CaseExpression; +import org.springframework.data.relational.core.sql.Literal; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.When; + +/** + * Renderer for {@link CaseExpression}. + * + * @author Sven Rienstra + * @since 3.4 + */ +public class CaseExpressionVisitor extends TypedSingleConditionRenderSupport implements PartRenderer { + private final StringBuilder part = new StringBuilder(); + + CaseExpressionVisitor(RenderContext context) { + super(context); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (hasDelegatedRendering()) { + CharSequence renderedPart = consumeRenderedPart(); + + if (segment instanceof When) { + part.append(" "); + part.append(renderedPart); + } else if (segment instanceof Literal) { + part.append(" ELSE "); + part.append(renderedPart); + } + } + + return super.leaveNested(segment); + } + + @Override + Delegation enterMatched(CaseExpression segment) { + + part.append("CASE"); + + return super.enterMatched(segment); + } + + @Override + Delegation leaveMatched(CaseExpression segment) { + + part.append(" END"); + + return super.leaveMatched(segment); + } + + @Override + public CharSequence getRenderedPart() { + return part; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/CastVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/CastVisitor.java new file mode 100644 index 0000000000..3bdee8df97 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/CastVisitor.java @@ -0,0 +1,77 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.StringJoiner; + +import org.springframework.data.relational.core.sql.Cast; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Renders a CAST expression, by delegating to an {@link ExpressionVisitor} and building the expression out of the + * rendered parts. + * + * @author Jens Schauder + * @since 2.3 + */ +class CastVisitor extends TypedSubtreeVisitor implements PartRenderer { + + private final RenderContext context; + @Nullable private StringJoiner joiner; + @Nullable private ExpressionVisitor expressionVisitor; + + CastVisitor(RenderContext context) { + + this.context = context; + } + + @Override + Delegation enterMatched(Cast cast) { + + joiner = new StringJoiner(", ", "CAST(", " AS " + cast.getTargetType() + ")"); + + return super.enterMatched(cast); + } + + @Override + Delegation enterNested(Visitable segment) { + + expressionVisitor = new ExpressionVisitor(context, ExpressionVisitor.AliasHandling.IGNORE); + return Delegation.delegateTo(expressionVisitor); + } + + @Override + Delegation leaveNested(Visitable segment) { + + Assert.state(joiner != null, "Joiner must not be null"); + Assert.state(expressionVisitor != null, "ExpressionVisitor must not be null"); + + joiner.add(expressionVisitor.getRenderedPart()); + return super.leaveNested(segment); + } + + @Override + public CharSequence getRenderedPart() { + + if (joiner == null) { + throw new IllegalStateException("Joiner must not be null"); + } + + return joiner.toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ColumnVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ColumnVisitor.java new file mode 100644 index 0000000000..4b5ad003bb --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ColumnVisitor.java @@ -0,0 +1,67 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.TableLike; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * Renderer for {@link Column}s. Renders a column as {@literal >table<.>column<} or + * {@literal >column<}. + * + * @author Mark Paluch + * @since 1.1 + */ +class ColumnVisitor extends TypedSubtreeVisitor { + + private final RenderContext context; + private final RenderTarget target; + private final boolean considerTablePrefix; + + private @Nullable SqlIdentifier tableName; + + ColumnVisitor(RenderContext context, boolean considerTablePrefix, RenderTarget target) { + this.context = context; + this.target = target; + this.considerTablePrefix = considerTablePrefix; + } + + @Override + Delegation leaveMatched(Column segment) { + + SqlIdentifier column = context.getNamingStrategy().getName(segment); + + CharSequence name = considerTablePrefix && tableName != null + ? NameRenderer.render(context, SqlIdentifier.from(tableName, column)) + : NameRenderer.render(context, segment); + + target.onRendered(name); + return super.leaveMatched(segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (segment instanceof TableLike) { + tableName = context.getNamingStrategy().getReferenceName((TableLike) segment); + } + + return super.leaveNested(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ComparisonVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ComparisonVisitor.java new file mode 100644 index 0000000000..f1e5a12e57 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ComparisonVisitor.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Comparison; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * {@link org.springframework.data.relational.core.sql.Visitor} rendering comparison {@link Condition}. Uses a + * {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see Comparison + */ +class ComparisonVisitor extends FilteredSubtreeVisitor { + + private final RenderContext context; + private final Comparison condition; + private final RenderTarget target; + private final StringBuilder part = new StringBuilder(); + private @Nullable PartRenderer current; + + ComparisonVisitor(RenderContext context, Comparison condition, RenderTarget target) { + + super(it -> it == condition); + + this.condition = condition; + this.target = target; + this.context = context; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Expression) { + ExpressionVisitor visitor = new ExpressionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + throw new IllegalStateException("Cannot provide visitor for " + segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (current != null) { + if (part.length() != 0) { + part.append(' ').append(condition.getComparator()).append(' '); + } + + part.append(current.getRenderedPart()); + current = null; + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Visitable segment) { + + target.onRendered(part); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ConditionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ConditionVisitor.java new file mode 100644 index 0000000000..925915b430 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ConditionVisitor.java @@ -0,0 +1,108 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.*; +import org.springframework.lang.Nullable; + +/** + * {@link org.springframework.data.relational.core.sql.Visitor} delegating {@link Condition} rendering to condition + * {@link org.springframework.data.relational.core.sql.Visitor}s. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Daniele Canteri + * @since 1.1 + * @see AndCondition + * @see OrCondition + * @see IsNull + * @see Comparison + * @see Like + * @see In + */ +class ConditionVisitor extends TypedSubtreeVisitor implements PartRenderer { + + private final RenderContext context; + private final StringBuilder builder = new StringBuilder(); + + ConditionVisitor(RenderContext context) { + this.context = context; + } + + @Override + Delegation enterMatched(Condition segment) { + + DelegatingVisitor visitor = getDelegation(segment); + + return visitor != null ? Delegation.delegateTo(visitor) : Delegation.retain(); + } + + @Nullable + private DelegatingVisitor getDelegation(Condition segment) { + + if (segment instanceof AndCondition) { + return new MultiConcatConditionVisitor(context, (AndCondition) segment, builder::append); + } + + if (segment instanceof OrCondition) { + return new MultiConcatConditionVisitor(context, (OrCondition) segment, builder::append); + } + + if (segment instanceof IsNull) { + return new IsNullVisitor(context, builder::append); + } + + if (segment instanceof Between) { + return new BetweenVisitor((Between) segment, context, builder::append); + } + + if (segment instanceof Comparison) { + return new ComparisonVisitor(context, (Comparison) segment, builder::append); + } + + if (segment instanceof Like) { + return new LikeVisitor((Like) segment, context, builder::append); + } + + if (segment instanceof In) { + + if (((In) segment).hasExpressions()) { + return new InVisitor(context, builder::append); + } else { + return new EmptyInVisitor(context, builder::append); + } + } + + if (segment instanceof NestedCondition) { + return new NestedConditionVisitor(context, builder::append); + } + + if (segment instanceof ConstantCondition) { + return new ConstantConditionVisitor(context, builder::append); + } + + if (segment instanceof Not) { + return new NotConditionVisitor(context, builder::append); + } + + return null; + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ConstantConditionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ConstantConditionVisitor.java new file mode 100644 index 0000000000..535d2a06b5 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ConstantConditionVisitor.java @@ -0,0 +1,43 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.ConstantCondition; + +/** + * Renderer for {@link ConstantCondition}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Daniele Canteri + * @since 2.3 + */ +class ConstantConditionVisitor extends TypedSingleConditionRenderSupport { + + private final RenderTarget target; + + ConstantConditionVisitor(RenderContext context, RenderTarget target) { + super(context); + this.target = target; + } + + @Override + Delegation leaveMatched(ConstantCondition segment) { + + target.onRendered(segment.toString()); + + return super.leaveMatched(segment); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/DelegatingVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/DelegatingVisitor.java new file mode 100644 index 0000000000..95da9908b2 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/DelegatingVisitor.java @@ -0,0 +1,195 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.Stack; + +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Visitor; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Abstract base class for delegating {@link Visitor} implementations. This class implements a delegation pattern using + * visitors. A delegating {@link Visitor} can implement {@link #doEnter(Visitable)} and {@link #doLeave(Visitable)} + * methods to provide its functionality. + *

+ *

Delegation

Typically, a {@link Visitor} is scoped to a single responsibility. If a {@link Visitor segment} + * requires {@link #doEnter(Visitable) processing} that is not directly implemented by the visitor itself, the current + * {@link Visitor} can delegate processing to a {@link DelegatingVisitor delegate}. Once a delegation is installed, the + * {@link DelegatingVisitor delegate} is used as {@link Visitor} for the current and all subsequent items until it + * {@link #doLeave(Visitable) signals} that it is no longer responsible. + *

+ *

+ * Nested visitors are required to properly signal once they are no longer responsible for a {@link Visitor segment} to + * step back from the delegation. Otherwise, parents are no longer involved in the visitation. + *

+ *

+ * Delegation is recursive and limited by the stack size. + *

+ * + * @author Mark Paluch + * @since 1.1 + * @see FilteredSubtreeVisitor + * @see TypedSubtreeVisitor + */ +abstract class DelegatingVisitor implements Visitor { + + private Stack delegation = new Stack<>(); + + /** + * Invoked for a {@link Visitable segment} when entering the segment. + *

+ * This method can signal whether it is responsible for handling the {@link Visitor segment} or whether the segment + * requires delegation to a sub-{@link Visitor}. When delegating to a sub-{@link Visitor}, {@link #doEnter(Visitable)} + * is called on the {@link DelegatingVisitor delegate}. + *

+ * + * @param segment must not be {@literal null}. + * @return + */ + @Nullable + public abstract Delegation doEnter(Visitable segment); + + @Override + public final void enter(Visitable segment) { + + if (delegation.isEmpty()) { + + Delegation visitor = doEnter(segment); + Assert.notNull(visitor, + () -> String.format("Visitor must not be null Caused by %s.doEnter(…)", getClass().getName())); + Assert.state(!visitor.isLeave(), + () -> String.format("Delegation indicates leave. Caused by %s.doEnter(…)", getClass().getName())); + + if (visitor.isDelegate()) { + delegation.push(visitor.getDelegate()); + visitor.getDelegate().enter(segment); + } + } else { + delegation.peek().enter(segment); + } + } + + /** + * Invoked for a {@link Visitable segment} when leaving the segment. + *

+ * This method can signal whether this {@link Visitor} should remain responsible for handling subsequent + * {@link Visitor segments} or whether it should step back from delegation. When stepping back from delegation, + * {@link #doLeave(Visitable)} is called on the {@link DelegatingVisitor parent delegate}. + *

+ * + * @param segment must not be {@literal null}. + * @return + */ + public abstract Delegation doLeave(Visitable segment); + + public final void leave(Visitable segment) { + doLeave0(segment); + } + + private Delegation doLeave0(Visitable segment) { + + if (delegation.isEmpty()) { + return doLeave(segment); + } else { + + DelegatingVisitor visitor = delegation.peek(); + while (visitor != null) { + + Delegation result = visitor.doLeave0(segment); + Assert.notNull(visitor, + () -> String.format("Visitor must not be null Caused by %s.doLeave(…)", getClass().getName())); + + if (visitor == this) { + if (result.isLeave()) { + return delegation.isEmpty() ? Delegation.leave() : Delegation.retain(); + } + return Delegation.retain(); + } + + if (result.isRetain()) { + return result; + } + + if (result.isLeave()) { + + if (!delegation.isEmpty()) { + delegation.pop(); + } + + if (!delegation.isEmpty()) { + visitor = delegation.peek(); + } else { + visitor = this; + } + } + } + } + + return Delegation.leave(); + } + + /** + * Value object to control delegation. + */ + static class Delegation { + + private static Delegation RETAIN = new Delegation(true, false, null); + private static Delegation LEAVE = new Delegation(false, true, null); + + private final boolean retain; + private final boolean leave; + + private final @Nullable DelegatingVisitor delegate; + + private Delegation(boolean retain, boolean leave, @Nullable DelegatingVisitor delegate) { + this.retain = retain; + this.leave = leave; + this.delegate = delegate; + } + + public static Delegation retain() { + return RETAIN; + } + + public static Delegation leave() { + return LEAVE; + } + + public static Delegation delegateTo(DelegatingVisitor visitor) { + return new Delegation(false, false, visitor); + } + + boolean isDelegate() { + return delegate != null; + } + + boolean isRetain() { + return retain; + } + + boolean isLeave() { + return leave; + } + + DelegatingVisitor getDelegate() { + + Assert.state(isDelegate(), "No delegate available"); + return delegate; + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/DeleteStatementVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/DeleteStatementVisitor.java new file mode 100644 index 0000000000..9bb694ccbe --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/DeleteStatementVisitor.java @@ -0,0 +1,91 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Delete; +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Where; + +/** + * {@link PartRenderer} for {@link Delete} statements. + * + * @author Mark Paluch + * @since 1.1 + */ +class DeleteStatementVisitor extends DelegatingVisitor implements PartRenderer { + + private StringBuilder builder = new StringBuilder(); + private StringBuilder from = new StringBuilder(); + private StringBuilder where = new StringBuilder(); + + private FromClauseVisitor fromClauseVisitor; + private WhereClauseVisitor whereClauseVisitor; + + DeleteStatementVisitor(RenderContext context) { + + this.fromClauseVisitor = new FromClauseVisitor(context, it -> { + + if (from.length() != 0) { + from.append(", "); + } + + from.append(it); + }); + + this.whereClauseVisitor = new WhereClauseVisitor(context, where::append); + } + + @Override + public Delegation doEnter(Visitable segment) { + + if (segment instanceof From) { + return Delegation.delegateTo(fromClauseVisitor); + } + + if (segment instanceof Where) { + return Delegation.delegateTo(whereClauseVisitor); + } + + return Delegation.retain(); + } + + @Override + public Delegation doLeave(Visitable segment) { + + if (segment instanceof Delete) { + + builder.append("DELETE "); + + if (from.length() != 0) { + builder.append("FROM ").append(from); + } + + if (where.length() != 0) { + builder.append(" WHERE ").append(where); + } + + return Delegation.leave(); + } + + return Delegation.retain(); + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/EmptyInVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/EmptyInVisitor.java new file mode 100644 index 0000000000..f40ec0e2a7 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/EmptyInVisitor.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.FalseCondition; +import org.springframework.data.relational.core.sql.In; +import org.springframework.data.relational.core.sql.TrueCondition; + +/** + * Renderer for empty {@link In}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @since 2.1 + */ +class EmptyInVisitor extends TypedSingleConditionRenderSupport { + + private final RenderTarget target; + + EmptyInVisitor(RenderContext context, RenderTarget target) { + super(context); + this.target = target; + } + + @Override + Delegation leaveMatched(In segment) { + + target.onRendered(segment.isNotIn() ? TrueCondition.INSTANCE.toString() : FalseCondition.INSTANCE.toString()); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ExpressionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ExpressionVisitor.java new file mode 100644 index 0000000000..32ce15dee1 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ExpressionVisitor.java @@ -0,0 +1,175 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.*; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * {@link PartRenderer} for {@link Expression}s. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Sven Rienstra + * @see Column + * @see SubselectExpression + */ +class ExpressionVisitor extends TypedSubtreeVisitor implements PartRenderer { + + private final RenderContext context; + private final AliasHandling aliasHandling; + + private CharSequence value = ""; + private @Nullable PartRenderer partRenderer; + + /** + * Creates an {@code ExpressionVisitor} that does not use aliases for column names + * + * @param context must not be {@literal null}. + */ + ExpressionVisitor(RenderContext context) { + this(context, AliasHandling.IGNORE); + } + + /** + * Creates an {@code ExpressionVisitor}. + * + * @param context must not be {@literal null}. + * @param aliasHandling controls if columns should be rendered as their alias or using their table names. + * @since 2.3 + */ + ExpressionVisitor(RenderContext context, AliasHandling aliasHandling) { + + Assert.notNull(context, "The render context must not be null"); + Assert.notNull(aliasHandling, "The aliasHandling must not be null"); + + this.context = context; + this.aliasHandling = aliasHandling; + } + + @Override + Delegation enterMatched(Expression segment) { + + if (segment instanceof SubselectExpression) { + + SelectStatementVisitor visitor = new SelectStatementVisitor(context); + partRenderer = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof SimpleFunction) { + + SimpleFunctionVisitor visitor = new SimpleFunctionVisitor(context); + partRenderer = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof AnalyticFunction) { + + AnalyticFunctionVisitor visitor = new AnalyticFunctionVisitor(context); + partRenderer = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof Column column) { + + value = aliasHandling == AliasHandling.USE ? NameRenderer.fullyQualifiedReference(context, column) + : NameRenderer.fullyQualifiedUnaliasedReference(context, column); + } else if (segment instanceof BindMarker) { + + if (segment instanceof Named) { + value = NameRenderer.render(context, (Named) segment); + } else { + value = segment.toString(); + } + } else if (segment instanceof AsteriskFromTable asteriskFromTable) { + + TableLike table = asteriskFromTable.getTable(); + CharSequence renderedTable = table instanceof Aliased aliasedTable ? NameRenderer.render(context, aliasedTable) + : NameRenderer.render(context, table); + + value = renderedTable + ".*"; + } else if (segment instanceof Cast) { + + CastVisitor visitor = new CastVisitor(context); + partRenderer = visitor; + return Delegation.delegateTo(visitor); + } else if (segment instanceof CaseExpression) { + + CaseExpressionVisitor visitor = new CaseExpressionVisitor(context); + partRenderer = visitor; + return Delegation.delegateTo(visitor); + } else { + // works for literals and just and possibly more + value = segment.toString(); + } + + return Delegation.retain(); + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Condition) { + + ConditionVisitor visitor = new ConditionVisitor(context); + partRenderer = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof InlineQuery) { + + NoopVisitor partRenderer = new NoopVisitor<>(InlineQuery.class); + return Delegation.delegateTo(partRenderer); + } + return super.enterNested(segment); + } + + @Override + Delegation leaveMatched(Expression segment) { + + if (partRenderer != null) { + + value = partRenderer.getRenderedPart(); + partRenderer = null; + } + + return super.leaveMatched(segment); + } + + @Override + public CharSequence getRenderedPart() { + return value; + } + + /** + * Describes how aliases of columns should be rendered. + * + * @since 2.3 + */ + enum AliasHandling { + /** + * The alias does not get used. + */ + IGNORE, + + /** + * The alias gets used. This means aliased columns get rendered as {@literal }. + */ + USE + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FilteredSingleConditionRenderSupport.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FilteredSingleConditionRenderSupport.java new file mode 100644 index 0000000000..f02b214fd8 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FilteredSingleConditionRenderSupport.java @@ -0,0 +1,94 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.function.Predicate; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Support class for {@link FilteredSubtreeVisitor filtering visitors} that want to render a single {@link Condition} + * and delegate nested {@link Expression} and {@link Condition} rendering. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +abstract class FilteredSingleConditionRenderSupport extends FilteredSubtreeVisitor { + + private final RenderContext context; + private @Nullable PartRenderer current; + + /** + * Creates a new {@link FilteredSingleConditionRenderSupport} given the filter {@link Predicate}. + * + * @param context + * @param filter filter predicate to identify when to {@link #enterMatched(Visitable) + * enter}/{@link #leaveMatched(Visitable) leave} the {@link Visitable segment} that this visitor is + * responsible for. + */ + FilteredSingleConditionRenderSupport(RenderContext context, Predicate filter) { + super(filter); + this.context = context; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Condition) { + ConditionVisitor visitor = new ConditionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof Expression) { + ExpressionVisitor visitor = new ExpressionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + throw new IllegalStateException("Cannot provide visitor for " + segment); + } + + /** + * Returns whether rendering was delegated to a {@link ExpressionVisitor} or {@link ConditionVisitor}. + * + * @return {@literal true} when rendering was delegated to a {@link ExpressionVisitor} or {@link ConditionVisitor}. + */ + protected boolean hasDelegatedRendering() { + return current != null; + } + + /** + * Consumes the delegated rendering part. Call {@link #hasDelegatedRendering()} to check whether rendering was + * actually delegated. Consumption releases the delegated rendered. + * + * @return the delegated rendered part. + * @throws IllegalStateException if rendering was not delegate. + */ + protected CharSequence consumeRenderedPart() { + + Assert.state(hasDelegatedRendering(), "Rendering not delegated; Cannot consume delegated rendering part"); + + PartRenderer current = this.current; + this.current = null; + return current.getRenderedPart(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FilteredSubtreeVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FilteredSubtreeVisitor.java new file mode 100644 index 0000000000..5aae4df41b --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FilteredSubtreeVisitor.java @@ -0,0 +1,139 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.function.Predicate; + +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Visitor; +import org.springframework.lang.Nullable; + +/** + * Filtering {@link DelegatingVisitor visitor} applying a {@link Predicate filter}. Typically used as base class for + * {@link Visitor visitors} that wish to apply hierarchical processing based on a well-defined entry {@link Visitor + * segment}. + *

+ * Filtering is a three-way process: + *

    + *
  1. Ignores elements that do not match the filter {@link Predicate}.
  2. + *
  3. {@link #enterMatched(Visitable) enter}/{@link #leaveMatched(Visitable) leave} matched callbacks for the + * {@link Visitable segment} that matches the {@link Predicate}.
  4. + *
  5. {@link #enterNested(Visitable) enter}/{@link #leaveNested(Visitable) leave} nested callbacks for direct/nested + * children of the matched {@link Visitable} until {@link #leaveMatched(Visitable) leaving the matched} + * {@link Visitable}.
  6. + *
+ *

+ * + * @author Mark Paluch + * @see TypedSubtreeVisitor + * @since 1.1 + */ +abstract class FilteredSubtreeVisitor extends DelegatingVisitor { + + private final Predicate filter; + + private @Nullable Visitable currentSegment; + + /** + * Creates a new {@link FilteredSubtreeVisitor} given the filter {@link Predicate}. + * + * @param filter filter predicate to identify when to {@link #enterMatched(Visitable) + * enter}/{@link #leaveMatched(Visitable) leave} the {@link Visitable segment} that this visitor is + * responsible for. + */ + FilteredSubtreeVisitor(Predicate filter) { + this.filter = filter; + } + + /** + * {@link Visitor#enter(Visitable) Enter} callback for a {@link Visitable} that this {@link Visitor} is responsible + * for. The default implementation retains delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or + * {@link Delegation#delegateTo(DelegatingVisitor)}. + * @see Delegation#retain() + */ + Delegation enterMatched(Visitable segment) { + return Delegation.retain(); + } + + /** + * {@link Visitor#enter(Visitable) Enter} callback for a nested {@link Visitable}. The default implementation retains + * delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or + * {@link Delegation#delegateTo(DelegatingVisitor)}. + * @see Delegation#retain() + */ + Delegation enterNested(Visitable segment) { + return Delegation.retain(); + } + + /** + * {@link Visitor#leave(Visitable) Leave} callback for the matched {@link Visitable}. The default implementation steps + * back from delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or {@link Delegation#leave()}. + * @see Delegation#leave() + */ + Delegation leaveMatched(Visitable segment) { + return Delegation.leave(); + } + + /** + * {@link Visitor#leave(Visitable) Leave} callback for a nested {@link Visitable}. The default implementation retains + * delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or {@link Delegation#leave()}. + * @see Delegation#retain() + */ + Delegation leaveNested(Visitable segment) { + return Delegation.retain(); + } + + @Override + public final Delegation doEnter(Visitable segment) { + + if (currentSegment == null) { + + if (filter.test(segment)) { + currentSegment = segment; + return enterMatched(segment); + } + } else { + return enterNested(segment); + } + + return Delegation.retain(); + } + + @Override + public final Delegation doLeave(Visitable segment) { + + if (currentSegment == null) { + return Delegation.leave(); + } else if (segment == currentSegment) { + currentSegment = null; + return leaveMatched(segment); + } else { + return leaveNested(segment); + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FromClauseVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FromClauseVisitor.java new file mode 100644 index 0000000000..84c8632524 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FromClauseVisitor.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link From}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class FromClauseVisitor extends TypedSubtreeVisitor { + + private final FromTableVisitor visitor; + private final RenderTarget parent; + private final StringBuilder builder = new StringBuilder(); + private boolean first = true; + + FromClauseVisitor(RenderContext context, RenderTarget parent) { + + this.visitor = new FromTableVisitor(context, it -> { + + if (first) { + first = false; + } else { + builder.append(", "); + } + + builder.append(it); + }); + + this.parent = parent; + } + + @Override + Delegation enterNested(Visitable segment) { + return Delegation.delegateTo(visitor); + } + + @Override + Delegation leaveMatched(From segment) { + parent.onRendered(builder); + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FromTableVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FromTableVisitor.java new file mode 100644 index 0000000000..f1ff2a29e4 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/FromTableVisitor.java @@ -0,0 +1,72 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Aliased; +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.InlineQuery; +import org.springframework.data.relational.core.sql.TableLike; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Renderer for {@link TableLike} used within a {@link From} or + * {@link org.springframework.data.relational.core.sql.Join} clause. Uses a {@link RenderTarget} to call back for render + * results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class FromTableVisitor extends TypedSubtreeVisitor { + + private final RenderContext context; + private final RenderTarget parent; + @Nullable private StringBuilder builder = null; + + FromTableVisitor(RenderContext context, RenderTarget parent) { + super(); + this.context = context; + this.parent = parent; + } + + @Override + Delegation enterMatched(TableLike segment) { + + builder = new StringBuilder(); + + if (segment instanceof InlineQuery) { + return Delegation.delegateTo(new SubselectVisitor(context, builder::append)); + } + + return super.enterMatched(segment); + } + + @Override + Delegation leaveMatched(TableLike segment) { + + Assert.state(builder != null, "Builder must not be null in leaveMatched"); + + builder.append(NameRenderer.render(context, segment)); + if (segment instanceof Aliased) { + builder.append(" ").append(NameRenderer.render(context, (Aliased) segment)); + } + + parent.onRendered(builder); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/InVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/InVisitor.java new file mode 100644 index 0000000000..2535c2db72 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/InVisitor.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.In; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link In}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class InVisitor extends TypedSingleConditionRenderSupport { + + private final RenderTarget target; + private final StringBuilder part = new StringBuilder(); + private boolean needsComma = false; + private boolean notIn = false; + + InVisitor(RenderContext context, RenderTarget target) { + super(context); + this.target = target; + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (hasDelegatedRendering()) { + CharSequence renderedPart = consumeRenderedPart(); + + if (needsComma) { + part.append(", "); + } + + if (part.isEmpty()) { + part.append(renderedPart); + if (notIn) { + part.append(" NOT"); + } + part.append(" IN ("); + } else { + part.append(renderedPart); + needsComma = true; + } + } + + return super.leaveNested(segment); + } + + @Override + Delegation enterMatched(In segment) { + + notIn = segment.isNotIn(); + + return super.enterMatched(segment); + } + + @Override + Delegation leaveMatched(In segment) { + + part.append(")"); + target.onRendered(part); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/InsertStatementVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/InsertStatementVisitor.java new file mode 100644 index 0000000000..b897dc41f0 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/InsertStatementVisitor.java @@ -0,0 +1,140 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Insert; +import org.springframework.data.relational.core.sql.Into; +import org.springframework.data.relational.core.sql.Values; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.util.Assert; + +/** + * {@link PartRenderer} for {@link Insert} statements. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Mikhail Polivakha + * @since 1.1 + */ +class InsertStatementVisitor extends DelegatingVisitor implements PartRenderer { + + private final StringBuilder builder = new StringBuilder(); + private final StringBuilder into = new StringBuilder(); + private final StringBuilder columns = new StringBuilder(); + private final StringBuilder values = new StringBuilder(); + + private final IntoClauseVisitor intoClauseVisitor; + private final ColumnVisitor columnVisitor; + private final ValuesVisitor valuesVisitor; + private final RenderContext renderContext; + + InsertStatementVisitor(RenderContext renderContext) { + + Assert.notNull(renderContext, "renderContext must not be null"); + + this.renderContext = renderContext; + this.intoClauseVisitor = createIntoClauseVisitor(renderContext); + this.columnVisitor = createColumnVisitor(renderContext); + this.valuesVisitor = new ValuesVisitor(renderContext, values::append); + } + + @Override + public Delegation doEnter(Visitable segment) { + + if (segment instanceof Into) { + return Delegation.delegateTo(this.intoClauseVisitor); + } + + if (segment instanceof Column) { + return Delegation.delegateTo(this.columnVisitor); + } + + if (segment instanceof Values) { + return Delegation.delegateTo(this.valuesVisitor); + } + + return Delegation.retain(); + } + + @Override + public Delegation doLeave(Visitable segment) { + + if (segment instanceof Insert) { + + builder.append("INSERT"); + + builder.append(" INTO ").append(into); + + addInsertColumnsIfPresent(); + + addInsertValuesIfPresentElseDefault(); + + return Delegation.leave(); + } + + return Delegation.retain(); + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } + + private void addInsertValuesIfPresentElseDefault() { + + if (values.length() != 0) { + builder.append(" VALUES (").append(values).append(")"); + } else { + addInsertWithDefaultValuesToBuilder(); + } + } + + private void addInsertColumnsIfPresent() { + + if (columns.length() != 0) { + builder.append(" (").append(columns).append(")"); + } + } + + private void addInsertWithDefaultValuesToBuilder() { + builder.append(renderContext.getInsertRenderContext().getDefaultValuesInsertPart()); + } + + private ColumnVisitor createColumnVisitor(RenderContext context) { + + return new ColumnVisitor(context, false, it -> { + + if (columns.length() != 0) { + columns.append(", "); + } + + columns.append(it); + }); + } + + private IntoClauseVisitor createIntoClauseVisitor(RenderContext context) { + + return new IntoClauseVisitor(context, it -> { + + if (into.length() != 0) { + into.append(", "); + } + + into.append(it); + }); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/IntoClauseVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/IntoClauseVisitor.java new file mode 100644 index 0000000000..c527487a05 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/IntoClauseVisitor.java @@ -0,0 +1,60 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Into; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link Into}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @since 1.1 + */ +class IntoClauseVisitor extends TypedSubtreeVisitor { + + private final FromTableVisitor visitor; + private final RenderTarget parent; + private final StringBuilder builder = new StringBuilder(); + private boolean first = true; + + IntoClauseVisitor(RenderContext context, RenderTarget parent) { + + this.visitor = new FromTableVisitor(context, it -> { + + if (first) { + first = false; + } else { + builder.append(", "); + } + + builder.append(it); + }); + + this.parent = parent; + } + + @Override + Delegation enterNested(Visitable segment) { + return Delegation.delegateTo(visitor); + } + + @Override + Delegation leaveMatched(Into segment) { + parent.onRendered(builder); + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/IsNullVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/IsNullVisitor.java new file mode 100644 index 0000000000..3d216a7f05 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/IsNullVisitor.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.IsNull; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link IsNull}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class IsNullVisitor extends TypedSingleConditionRenderSupport { + + private final RenderTarget target; + private final StringBuilder part = new StringBuilder(); + + IsNullVisitor(RenderContext context, RenderTarget target) { + super(context); + this.target = target; + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (hasDelegatedRendering()) { + part.append(consumeRenderedPart()); + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(IsNull segment) { + + if (segment.isNegated()) { + part.append(" IS NOT NULL"); + } else { + part.append(" IS NULL"); + } + + target.onRendered(part); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/JoinVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/JoinVisitor.java new file mode 100644 index 0000000000..94c36b11be --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/JoinVisitor.java @@ -0,0 +1,95 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Join; +import org.springframework.data.relational.core.sql.TableLike; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link Join} segments. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class JoinVisitor extends TypedSubtreeVisitor { + + private final RenderTarget parent; + private final StringBuilder joinClause = new StringBuilder(); + private final FromTableVisitor fromTableVisitor; + private final ConditionVisitor conditionVisitor; + private boolean inCondition = false; + private boolean hasSeenCondition = false; + + JoinVisitor(RenderContext context, RenderTarget parent) { + + this.parent = parent; + this.conditionVisitor = new ConditionVisitor(context); + this.fromTableVisitor = new FromTableVisitor(context, joinClause::append); + } + + @Override + Delegation enterMatched(Join segment) { + + joinClause.append(segment.getType().getSql()).append(' '); + + return super.enterMatched(segment); + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof TableLike && !inCondition) { + return Delegation.delegateTo(fromTableVisitor); + } else if (segment instanceof Condition) { + + inCondition = true; + if (!hasSeenCondition) { + hasSeenCondition = true; + return Delegation.delegateTo(conditionVisitor); + } + } + + return super.enterNested(segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (segment instanceof Condition) { + + inCondition = false; + + if (hasSeenCondition) { + + joinClause.append(" ON "); + joinClause.append(conditionVisitor.getRenderedPart()); + + hasSeenCondition = false; + } + } + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Join segment) { + + parent.onRendered(joinClause); + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/LikeVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/LikeVisitor.java new file mode 100644 index 0000000000..8649ac04b3 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/LikeVisitor.java @@ -0,0 +1,94 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Like; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * {@link org.springframework.data.relational.core.sql.Visitor} rendering comparison {@link Condition}. Uses a + * {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @see Like + * @since 1.1 + */ +class LikeVisitor extends FilteredSubtreeVisitor { + + private final Like like; + private final RenderContext context; + private final RenderTarget target; + private final StringBuilder part = new StringBuilder(); + private @Nullable PartRenderer current; + + LikeVisitor(Like condition, RenderContext context, RenderTarget target) { + super(it -> it == condition); + this.like = condition; + this.context = context; + this.target = target; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Expression) { + ExpressionVisitor visitor = new ExpressionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof Condition) { + ConditionVisitor visitor = new ConditionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + throw new IllegalStateException("Cannot provide visitor for " + segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (current != null) { + if (part.length() != 0) { + + part.append(' '); + + if (like.isNegated()) { + part.append("NOT "); + } + + part.append("LIKE "); + } + + part.append(current.getRenderedPart()); + current = null; + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Visitable segment) { + + target.onRendered(part); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/MultiConcatConditionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/MultiConcatConditionVisitor.java new file mode 100644 index 0000000000..1ccb0d9c24 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/MultiConcatConditionVisitor.java @@ -0,0 +1,69 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.AndCondition; +import org.springframework.data.relational.core.sql.OrCondition; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link AndCondition} and {@link OrCondition}. Uses a {@link RenderTarget} to call back for render + * results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class MultiConcatConditionVisitor extends FilteredSingleConditionRenderSupport { + + private final RenderTarget target; + private final String concat; + private final StringBuilder part = new StringBuilder(); + + MultiConcatConditionVisitor(RenderContext context, AndCondition condition, RenderTarget target) { + super(context, it -> it == condition); + this.target = target; + this.concat = " AND "; + } + + MultiConcatConditionVisitor(RenderContext context, OrCondition condition, RenderTarget target) { + super(context, it -> it == condition); + this.target = target; + this.concat = " OR "; + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (hasDelegatedRendering()) { + if (part.length() != 0) { + part.append(concat); + } + + part.append(consumeRenderedPart()); + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Visitable segment) { + + target.onRendered(part); + + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NameRenderer.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NameRenderer.java new file mode 100644 index 0000000000..66bdb808c6 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NameRenderer.java @@ -0,0 +1,122 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Aliased; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.Named; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.TableLike; + +/** + * Utility to render {@link Column} and {@link Table} names using {@link SqlIdentifier} and {@link RenderContext} to + * SQL. + * + * @author Mark Paluch + * @author Jens Schauder + */ +class NameRenderer { + + /** + * Render the {@link TableLike#getName() table name } with considering the + * {@link RenderNamingStrategy#getName(TableLike) naming strategy}. + */ + static CharSequence render(RenderContext context, TableLike table) { + return render(context, context.getNamingStrategy().getName(table)); + } + + /** + * Render the {@link Column#getName() column name} with considering the {@link RenderNamingStrategy#getName(Column) + * naming strategy}. + */ + static CharSequence render(RenderContext context, Column column) { + return render(context, context.getNamingStrategy().getName(column)); + } + + /** + * Render the {@link Named#getName() name}. + */ + static CharSequence render(RenderContext context, Named named) { + return render(context, named.getName()); + } + + /** + * Render the {@link Aliased#getAlias() alias}. + */ + static CharSequence render(RenderContext context, Aliased aliased) { + return render(context, aliased.getAlias()); + } + + /** + * Render the {@link Table#getReferenceName()} table reference name} with considering the + * {@link RenderNamingStrategy#getReferenceName(TableLike) naming strategy}. + */ + static CharSequence reference(RenderContext context, TableLike table) { + return render(context, context.getNamingStrategy().getReferenceName(table)); + } + + /** + * Render the {@link Column#getReferenceName()} column reference name} with considering the + * {@link RenderNamingStrategy#getReferenceName(Column) naming strategy}. + */ + static CharSequence reference(RenderContext context, Column column) { + return render(context, context.getNamingStrategy().getReferenceName(column)); + } + + /** + * Render the fully-qualified table and column name with considering the naming strategies of each component. + * + * @see RenderNamingStrategy#getReferenceName + */ + static CharSequence fullyQualifiedReference(RenderContext context, Column column) { + + RenderNamingStrategy namingStrategy = context.getNamingStrategy(); + + if (column instanceof Aliased) { + return render(context, namingStrategy.getReferenceName(column)); + } + + return render(context, SqlIdentifier.from(namingStrategy.getReferenceName(column.getTable()), + namingStrategy.getReferenceName(column))); + } + + /** + * Render the fully-qualified table and column name with considering the naming strategies of each component without + * using the alias for the column. For the table the alias is still used. + * + * @see #fullyQualifiedReference(RenderContext, Column) + * @since 2.3 + */ + static CharSequence fullyQualifiedUnaliasedReference(RenderContext context, Column column) { + + RenderNamingStrategy namingStrategy = context.getNamingStrategy(); + + return render(context, + SqlIdentifier.from(namingStrategy.getReferenceName(column.getTable()), namingStrategy.getName(column))); + } + + /** + * Render the {@link SqlIdentifier#toSql(IdentifierProcessing) identifier to SQL} considering + * {@link IdentifierProcessing}. + */ + static CharSequence render(RenderContext context, SqlIdentifier identifier) { + return identifier.toSql(context.getIdentifierProcessing()); + } + + private NameRenderer() {} +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NamingStrategies.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NamingStrategies.java new file mode 100644 index 0000000000..a31d801df4 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NamingStrategies.java @@ -0,0 +1,148 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.Locale; +import java.util.function.Function; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.TableLike; +import org.springframework.util.Assert; + +/** + * Factory for {@link RenderNamingStrategy} objects. + * + * @author Mark Paluch + * @since 1.1 + */ +public abstract class NamingStrategies { + + private NamingStrategies() {} + + /** + * Creates a as-is {@link RenderNamingStrategy} that preserves {@link Column} and {@link Table} names as they were + * expressed during their declaration. + * + * @return as-is {@link RenderNamingStrategy}. + */ + public static RenderNamingStrategy asIs() { + return AsIs.INSTANCE; + } + + /** + * Creates a mapping {@link RenderNamingStrategy} that applies a {@link Function mapping function} to {@link Column} + * and {@link Table} names. + * + * @param mappingFunction the mapping {@link Function}, must not be {@literal null}. + * @return the mapping {@link RenderNamingStrategy}. + */ + public static RenderNamingStrategy mapWith(Function mappingFunction) { + return AsIs.INSTANCE.map(mappingFunction); + } + + /** + * Creates a mapping {@link RenderNamingStrategy} that converts {@link Column} and {@link Table} names to upper case + * using the default {@link Locale}. + * + * @return upper-casing {@link RenderNamingStrategy}. + * @see String#toUpperCase() + * @see Locale + */ + public static RenderNamingStrategy toUpper() { + return toUpper(Locale.getDefault()); + } + + /** + * Creates a mapping {@link RenderNamingStrategy} that converts {@link Column} and {@link Table} names to upper case + * using the given {@link Locale}. + * + * @param locale the locale to use. + * @return upper-casing {@link RenderNamingStrategy}. + * @see String#toUpperCase(Locale) + */ + public static RenderNamingStrategy toUpper(Locale locale) { + + Assert.notNull(locale, "Locale must not be null"); + + return AsIs.INSTANCE.map(it -> it.toUpperCase(locale)); + } + + /** + * Creates a mapping {@link RenderNamingStrategy} that converts {@link Column} and {@link Table} names to lower case + * using the default {@link Locale}. + * + * @return lower-casing {@link RenderNamingStrategy}. + * @see String#toLowerCase() + * @see Locale + */ + public static RenderNamingStrategy toLower() { + return toLower(Locale.getDefault()); + } + + /** + * Creates a mapping {@link RenderNamingStrategy} that converts {@link Column} and {@link Table} names to lower case + * using the given {@link Locale}. + * + * @param locale the locale to use. + * @return lower-casing {@link RenderNamingStrategy}. + * @see String#toLowerCase(Locale) + * @see Locale + */ + public static RenderNamingStrategy toLower(Locale locale) { + + Assert.notNull(locale, "Locale must not be null"); + + return AsIs.INSTANCE.map(it -> it.toLowerCase(locale)); + } + + enum AsIs implements RenderNamingStrategy { + INSTANCE + } + + static class DelegatingRenderNamingStrategy implements RenderNamingStrategy { + + private final RenderNamingStrategy delegate; + private final Function mappingFunction; + + DelegatingRenderNamingStrategy(RenderNamingStrategy delegate, Function mappingFunction) { + + this.delegate = delegate; + this.mappingFunction = mappingFunction; + } + + @Override + public SqlIdentifier getName(Column column) { + return delegate.getName(column).transform(mappingFunction::apply); + } + + @Override + public SqlIdentifier getReferenceName(Column column) { + return delegate.getReferenceName(column).transform(mappingFunction::apply); + } + + @Override + public SqlIdentifier getName(TableLike table) { + return delegate.getName(table).transform(mappingFunction::apply); + } + + @Override + public SqlIdentifier getReferenceName(TableLike table) { + return delegate.getReferenceName(table).transform(mappingFunction::apply); + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NestedConditionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NestedConditionVisitor.java new file mode 100644 index 0000000000..25511b22da --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NestedConditionVisitor.java @@ -0,0 +1,71 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.NestedCondition; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * Renderer for {@link NestedCondition}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @since 2.0 + */ +class NestedConditionVisitor extends TypedSubtreeVisitor { + + private final RenderContext context; + private final RenderTarget target; + + private @Nullable ConditionVisitor conditionVisitor; + + NestedConditionVisitor(RenderContext context, RenderTarget target) { + + this.context = context; + this.target = target; + } + + @Override + Delegation enterNested(Visitable segment) { + + DelegatingVisitor visitor = getDelegation(segment); + + return visitor != null ? Delegation.delegateTo(visitor) : Delegation.retain(); + } + + @Nullable + private DelegatingVisitor getDelegation(Visitable segment) { + + if (segment instanceof Condition) { + return conditionVisitor = new ConditionVisitor(context); + } + + return null; + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (conditionVisitor != null) { + + target.onRendered("(" + conditionVisitor.getRenderedPart() + ")"); + conditionVisitor = null; + } + + return super.leaveNested(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NoopVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NoopVisitor.java new file mode 100644 index 0000000000..8f3eb0795d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NoopVisitor.java @@ -0,0 +1,26 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sql.render; + + +import org.springframework.data.relational.core.sql.Visitable; + +class NoopVisitor extends TypedSubtreeVisitor { + NoopVisitor(Class type) { + super(type); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NotConditionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NotConditionVisitor.java new file mode 100644 index 0000000000..058fa1c3f4 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/NotConditionVisitor.java @@ -0,0 +1,71 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Not; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * Renderer for {@link Not}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Jens Schauder + * @since 3.1.6 + */ +class NotConditionVisitor extends TypedSubtreeVisitor { + + private final RenderContext context; + private final RenderTarget target; + + private @Nullable ConditionVisitor conditionVisitor; + + NotConditionVisitor(RenderContext context, RenderTarget target) { + + this.context = context; + this.target = target; + } + + @Override + Delegation enterNested(Visitable segment) { + + DelegatingVisitor visitor = getDelegation(segment); + + return visitor != null ? Delegation.delegateTo(visitor) : Delegation.retain(); + } + + @Nullable + private DelegatingVisitor getDelegation(Visitable segment) { + + if (segment instanceof Condition) { + return conditionVisitor = new ConditionVisitor(context); + } + + return null; + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (conditionVisitor != null) { + + target.onRendered("NOT " + conditionVisitor.getRenderedPart()); + conditionVisitor = null; + } + + return super.leaveNested(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/OrderByClauseVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/OrderByClauseVisitor.java new file mode 100644 index 0000000000..1fc2594b8d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/OrderByClauseVisitor.java @@ -0,0 +1,117 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + + +import org.springframework.data.relational.core.sql.CaseExpression; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Expressions; +import org.springframework.data.relational.core.sql.OrderByField; +import org.springframework.data.relational.core.sql.SimpleFunction; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * {@link PartRenderer} for {@link OrderByField}s. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Chirag Tailor + * @author Koen Punt + * @author Sven Rienstra + * @since 1.1 + */ +class OrderByClauseVisitor extends TypedSubtreeVisitor implements PartRenderer { + + private final RenderContext context; + + private final StringBuilder builder = new StringBuilder(); + + @Nullable + private PartRenderer delegate; + + private boolean first = true; + + OrderByClauseVisitor(RenderContext context) { + this.context = context; + } + + @Override + Delegation enterMatched(OrderByField segment) { + + if (!first) { + builder.append(", "); + } + first = false; + + return super.enterMatched(segment); + } + + @Override + Delegation leaveMatched(OrderByField segment) { + + if (segment.getDirection() != null) { + + builder.append(" ") // + .append(segment.getDirection()); + } + + String nullPrecedence = context.getSelectRenderContext().evaluateOrderByNullHandling(segment.getNullHandling()); + if (!nullPrecedence.isEmpty()) { + + builder.append(" ") // + .append(nullPrecedence); + } + + return Delegation.leave(); + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof SimpleFunction) { + delegate = new SimpleFunctionVisitor(context); + return Delegation.delegateTo((SimpleFunctionVisitor) delegate); + } + + if (segment instanceof Expressions.SimpleExpression || segment instanceof CaseExpression) { + delegate = new ExpressionVisitor(context); + return Delegation.delegateTo((ExpressionVisitor) delegate); + } + + return super.enterNested(segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (delegate instanceof SimpleFunctionVisitor || delegate instanceof ExpressionVisitor) { + builder.append(delegate.getRenderedPart()); + delegate = null; + } + + if (segment instanceof Column) { + builder.append(NameRenderer.fullyQualifiedReference(context, (Column) segment)); + } + + return super.leaveNested(segment); + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/PartRenderer.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/PartRenderer.java new file mode 100644 index 0000000000..946016310f --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/PartRenderer.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Visitor; + +/** + * {@link Visitor} that renders a specific partial clause or expression. + * + * @author Mark Paluch + * @since 1.1 + */ +interface PartRenderer extends Visitor { + + /** + * Returns the rendered part. + * + * @return the rendered part. + */ + CharSequence getRenderedPart(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderContext.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderContext.java new file mode 100644 index 0000000000..1976e57abe --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderContext.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.dialect.InsertRenderContext; +import org.springframework.data.relational.core.sql.IdentifierProcessing; + +/** + * Render context providing {@link RenderNamingStrategy} and other resources that are required during rendering. + * + * @author Mark Paluch + * @author Mikhail Polivakha + * @author Jens Schauder + * @since 1.1 + */ +public interface RenderContext { + + /** + * Returns the configured {@link RenderNamingStrategy}. + * + * @return the {@link RenderNamingStrategy}. + */ + RenderNamingStrategy getNamingStrategy(); + + /** + * Returns the configured {@link IdentifierProcessing}. + * + * @return the {@link IdentifierProcessing}. + * @since 2.0 + */ + IdentifierProcessing getIdentifierProcessing(); + + /** + * @return the {@link SelectRenderContext}. + */ + SelectRenderContext getSelectRenderContext(); + + /** + * @return the {@link InsertRenderContext} + */ + InsertRenderContext getInsertRenderContext(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderNamingStrategy.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderNamingStrategy.java new file mode 100644 index 0000000000..2ac398b06d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderNamingStrategy.java @@ -0,0 +1,94 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.function.Function; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.TableLike; +import org.springframework.data.relational.core.sql.render.NamingStrategies.DelegatingRenderNamingStrategy; +import org.springframework.util.Assert; + +/** + * Naming strategy for SQL rendering. + * + * @author Mark Paluch + * @author Jens Schauder + * @see NamingStrategies + * @since 1.1 + */ +public interface RenderNamingStrategy { + + /** + * Return the {@link Column#getName() column name}. + * + * @param column the column. + * @return the {@link Column#getName() column name}. + * @see Column#getName() + */ + default SqlIdentifier getName(Column column) { + return column.getName(); + } + + /** + * Return the {@link Column#getName() column reference name}. + * + * @param column the column. + * @return the {@link Column#getName() column reference name}. + * @see Column#getReferenceName() () + */ + default SqlIdentifier getReferenceName(Column column) { + return column.getReferenceName(); + } + + /** + * Return the {@link TableLike#getName() table name}. + * + * @param table the table. + * @return the {@link TableLike#getName() table name}. + * @see Table#getName() + */ + default SqlIdentifier getName(TableLike table) { + return table.getName(); + } + + /** + * Return the {@link TableLike#getReferenceName() table reference name}. + * + * @param table the table. + * @return the {@link TableLike#getReferenceName() table name}. + * @see TableLike#getReferenceName() + */ + default SqlIdentifier getReferenceName(TableLike table) { + return table.getReferenceName(); + } + + /** + * Applies a {@link Function mapping function} after retrieving the object (column name, column reference name, …) + * name. + * + * @param mappingFunction the function that maps an object name. + * @return a new {@link RenderNamingStrategy} applying {@link Function mapping function}. + */ + default RenderNamingStrategy map(Function mappingFunction) { + + Assert.notNull(mappingFunction, "Mapping function must not be null"); + + return new DelegatingRenderNamingStrategy(this, mappingFunction); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderTarget.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderTarget.java new file mode 100644 index 0000000000..a783609d2e --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/RenderTarget.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Visitor; + +/** + * Callback interface for {@link Visitor visitors} that wish to notify a render target when they are complete with + * rendering. + * + * @author Mark Paluch + * @since 1.1 + */ +@FunctionalInterface +interface RenderTarget { + + /** + * Callback method that is invoked once the rendering for a part or expression is finished. When called multiple + * times, it's the responsibility of the implementor to ensure proper concatenation of render results. + * + * @param sequence the rendered part or expression. + */ + void onRendered(CharSequence sequence); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/Renderer.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/Renderer.java new file mode 100644 index 0000000000..60dbb97dba --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/Renderer.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Delete; +import org.springframework.data.relational.core.sql.Insert; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.Update; + +/** + * SQL renderer for {@link Select} and {@link Delete} statements. + * + * @author Mark Paluch + * @since 1.1 + */ +public interface Renderer { + + /** + * Render the {@link Select} AST into a SQL statement. + * + * @param select the statement to render, must not be {@literal null}. + * @return the rendered statement. + */ + String render(Select select); + + /** + * Render the {@link Insert} AST into a SQL statement. + * + * @param insert the statement to render, must not be {@literal null}. + * @return the rendered statement. + */ + String render(Insert insert); + + /** + * Render the {@link Update} AST into a SQL statement. + * + * @param update the statement to render, must not be {@literal null}. + * @return the rendered statement. + */ + String render(Update update); + + /** + * Render the {@link Delete} AST into a SQL statement. + * + * @param delete the statement to render, must not be {@literal null}. + * @return the rendered statement. + */ + String render(Delete delete); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SegmentListVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SegmentListVisitor.java new file mode 100644 index 0000000000..ed5c4c3fdc --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SegmentListVisitor.java @@ -0,0 +1,83 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.SegmentList; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.util.Assert; + +/** + * A part rendering visitor for lists of segments. It can be set up depending on the elements in the list it should + * handle and the way elemnts should get separated when rendered. + * + * @author Jens Schauder + * @since 2.7 + */ +class SegmentListVisitor extends TypedSubtreeVisitor> implements PartRenderer { + + private final StringBuilder part = new StringBuilder(); + private final String start; + private final String separator; + private final DelegatingVisitor nestedVisitor; + + private boolean first = true; + + /** + * @param start a {@literal String} to be rendered before the first element if there is at least one element. Must not + * be {@literal null}. + * @param separator a {@literal String} to be rendered between elements. Must not be {@literal null}. + * @param nestedVisitor the {@link org.springframework.data.relational.core.sql.Visitor} responsible for rendering the + * elements of the list. Must not be {@literal null}. + */ + SegmentListVisitor(String start, String separator, DelegatingVisitor nestedVisitor) { + + Assert.notNull(start, "Start must not be null"); + Assert.notNull(separator, "Separator must not be null"); + Assert.notNull(nestedVisitor, "Nested Visitor must not be null"); + Assert.isInstanceOf(PartRenderer.class, nestedVisitor, "Nested visitor must implement PartRenderer"); + + this.start = start; + this.separator = separator; + this.nestedVisitor = nestedVisitor; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (first) { + part.append(start); + first = false; + } else { + part.append(separator); + } + + return Delegation.delegateTo(nestedVisitor); + } + + @Override + Delegation leaveNested(Visitable segment) { + + part.append(((PartRenderer) nestedVisitor).getRenderedPart()); + + return super.leaveNested(segment); + } + + @Override + public CharSequence getRenderedPart() { + + return part; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectListVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectListVisitor.java new file mode 100644 index 0000000000..b52493a1bd --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectListVisitor.java @@ -0,0 +1,88 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Aliased; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.SelectList; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * {@link PartRenderer} for {@link SelectList}s. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class SelectListVisitor extends TypedSubtreeVisitor implements PartRenderer { + + private final RenderContext context; + private final StringBuilder builder = new StringBuilder(); + private final RenderTarget target; + private boolean requiresComma = false; + private ExpressionVisitor expressionVisitor; + // subelements. + + SelectListVisitor(RenderContext context, RenderTarget target) { + + this.context = context; + this.target = target; + this.expressionVisitor = new ExpressionVisitor(context, ExpressionVisitor.AliasHandling.IGNORE); + } + + @Override + Delegation enterNested(Visitable segment) { + + if (requiresComma) { + builder.append(", "); + requiresComma = false; + } + if (segment instanceof Expression) { + return Delegation.delegateTo(expressionVisitor); + } + + return super.enterNested(segment); + } + + @Override + Delegation leaveMatched(SelectList segment) { + + target.onRendered(builder); + return super.leaveMatched(segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (segment instanceof Expression) { + + builder.append(expressionVisitor.getRenderedPart()); + requiresComma = true; + } + + if (segment instanceof Aliased) { + builder.append(" AS ").append(NameRenderer.render(context, (Aliased) segment)); + } + + return super.leaveNested(segment); + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectRenderContext.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectRenderContext.java new file mode 100644 index 0000000000..73ad670d1e --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectRenderContext.java @@ -0,0 +1,103 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.OptionalLong; +import java.util.function.Function; + +import org.springframework.data.domain.Sort; +import org.springframework.data.relational.core.dialect.OrderByNullPrecedence; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.Select; + +/** + * Render context specifically for {@code SELECT} statements. This interface declares rendering hooks that are called + * before/after/during a specific {@code SELECT} clause part. The rendering content is appended directly after/before an + * element without further whitespace processing. Hooks are responsible for adding required surrounding whitespaces. + * + * @author Mark Paluch + * @author Myeonghyeon Lee + * @author Jens Schauder + * @author Chirag Tailor + * @since 1.1 + */ +public interface SelectRenderContext { + + /** + * Customization hook: Rendition of a part after the {@code SELECT} list and before any {@code FROM} renderings. + * Renders an empty string by default. + * + * @return render {@link Function} invoked after rendering {@code SELECT} list. + */ + default Function afterSelectList() { + return select -> ""; + } + + /** + * Customization hook: Rendition of a part after {@code FROM} table. Renders an empty string by default. + * + * @return render {@link Function} invoked after rendering {@code FROM} table. + */ + default Function afterFromTable() { + return select -> ""; + } + + /** + * Customization hook: Rendition of a part after {@code ORDER BY}. The rendering function is called always, regardless + * whether {@code ORDER BY} exists or not. + *

+ * Renders lock, limit and offset clause as appropriate. + *

+ * + * @param hasOrderBy the actual value whether the {@link Select} statement has a {@code ORDER BY} clause. + * @return render {@link Function} invoked after rendering {@code ORDER BY}. + */ + default Function afterOrderBy(boolean hasOrderBy) { + + return select -> { + + OptionalLong limit = select.getLimit(); + OptionalLong offset = select.getOffset(); + LockMode lockMode = select.getLockMode(); + + String lockPrefix = (lockMode == null) ? "" : " FOR UPDATE"; + + if (limit.isPresent() && offset.isPresent()) { + return String.format("%s OFFSET %d ROWS FETCH FIRST %d ROWS ONLY", lockPrefix, offset.getAsLong(), + limit.getAsLong()); + } + if (limit.isPresent()) { + return String.format("%s FETCH FIRST %d ROWS ONLY", lockPrefix, limit.getAsLong()); + } + if (offset.isPresent()) { + return String.format("%s OFFSET %d ROWS", lockPrefix, offset.getAsLong()); + } + + return lockPrefix; + }; + } + + /** + * Customization hook: Rendition of the null handling option for an {@code ORDER BY} sort expression. + * + * @param nullHandling the {@link Sort.NullHandling} for the {@code ORDER BY} sort expression. Must not be {@literal null}. + * @return render {@link String} SQL text to be included in an {@code ORDER BY} sort expression. + * @since 2.4 + */ + default String evaluateOrderByNullHandling(Sort.NullHandling nullHandling) { + return OrderByNullPrecedence.NONE.evaluate(nullHandling); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectStatementVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectStatementVisitor.java new file mode 100644 index 0000000000..cc2ec76b5b --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SelectStatementVisitor.java @@ -0,0 +1,146 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.Join; +import org.springframework.data.relational.core.sql.OrderByField; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.SelectList; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Where; + +/** + * {@link PartRenderer} for {@link Select} statements. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @since 1.1 + */ +class SelectStatementVisitor extends DelegatingVisitor implements PartRenderer { + + private final RenderContext context; + private final SelectRenderContext selectRenderContext; + + private StringBuilder builder = new StringBuilder(); + private StringBuilder selectList = new StringBuilder(); + private StringBuilder from = new StringBuilder(); + private StringBuilder join = new StringBuilder(); + private StringBuilder where = new StringBuilder(); + + private SelectListVisitor selectListVisitor; + private OrderByClauseVisitor orderByClauseVisitor; + private FromClauseVisitor fromClauseVisitor; + private WhereClauseVisitor whereClauseVisitor; + + SelectStatementVisitor(RenderContext context) { + + this.context = context; + this.selectRenderContext = context.getSelectRenderContext(); + this.selectListVisitor = new SelectListVisitor(context, selectList::append); + this.orderByClauseVisitor = new OrderByClauseVisitor(context); + this.fromClauseVisitor = new FromClauseVisitor(context, it -> { + + if (from.length() != 0) { + from.append(", "); + } + + from.append(it); + }); + + this.whereClauseVisitor = new WhereClauseVisitor(context, where::append); + } + + @Override + public Delegation doEnter(Visitable segment) { + + if (segment instanceof SelectList) { + return Delegation.delegateTo(selectListVisitor); + } + + if (segment instanceof OrderByField) { + return Delegation.delegateTo(orderByClauseVisitor); + } + + if (segment instanceof From) { + return Delegation.delegateTo(fromClauseVisitor); + } + + if (segment instanceof Join) { + return Delegation.delegateTo(new JoinVisitor(context, it -> { + + if (join.length() != 0) { + join.append(' '); + } + + join.append(it); + })); + } + + if (segment instanceof Where) { + return Delegation.delegateTo(whereClauseVisitor); + } + + return Delegation.retain(); + } + + @Override + public Delegation doLeave(Visitable segment) { + + if (segment instanceof Select select) { + + builder.append("SELECT "); + + if (select.isDistinct()) { + builder.append("DISTINCT "); + } + + builder.append(selectList); + builder.append(selectRenderContext.afterSelectList().apply(select)); + + if (from.length() != 0) { + builder.append(" FROM ").append(from); + } + + builder.append(selectRenderContext.afterFromTable().apply(select)); + + if (join.length() != 0) { + builder.append(' ').append(join); + } + + if (where.length() != 0) { + builder.append(" WHERE ").append(where); + } + + CharSequence orderBy = orderByClauseVisitor.getRenderedPart(); + if (orderBy.length() != 0) { + builder.append(" ORDER BY ").append(orderBy); + } + + builder.append(selectRenderContext.afterOrderBy(orderBy.length() != 0).apply(select)); + + return Delegation.leave(); + } + + return Delegation.retain(); + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SimpleFunctionVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SimpleFunctionVisitor.java new file mode 100644 index 0000000000..8e98a904b6 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SimpleFunctionVisitor.java @@ -0,0 +1,74 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.SimpleFunction; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Renderer for {@link org.springframework.data.relational.core.sql.SimpleFunction}. Uses a {@link RenderTarget} to call + * back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class SimpleFunctionVisitor extends TypedSingleConditionRenderSupport implements PartRenderer { + + private final StringBuilder part = new StringBuilder(); + private boolean needsComma = false; + + SimpleFunctionVisitor(RenderContext context) { + super(context); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (hasDelegatedRendering()) { + + if (needsComma) { + part.append(", "); + } + + part.append(consumeRenderedPart()); + needsComma = true; + } + + return super.leaveNested(segment); + } + + @Override + Delegation enterMatched(SimpleFunction segment) { + + part.append(segment.getFunctionName()).append("("); + + return super.enterMatched(segment); + } + + @Override + Delegation leaveMatched(SimpleFunction segment) { + + part.append(")"); + + return super.leaveMatched(segment); + } + + @Override + public CharSequence getRenderedPart() { + return part; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SimpleRenderContext.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SimpleRenderContext.java new file mode 100644 index 0000000000..dc2fe3b0e9 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SimpleRenderContext.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.dialect.InsertRenderContext; +import org.springframework.data.relational.core.dialect.InsertRenderContexts; +import org.springframework.data.relational.core.sql.IdentifierProcessing; + +/** + * Default {@link RenderContext} implementation. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +final class SimpleRenderContext implements RenderContext { + + private final RenderNamingStrategy namingStrategy; + + SimpleRenderContext(RenderNamingStrategy namingStrategy) { + this.namingStrategy = namingStrategy; + } + + @Override + public IdentifierProcessing getIdentifierProcessing() { + return IdentifierProcessing.NONE; + } + + @Override + public SelectRenderContext getSelectRenderContext() { + return DefaultSelectRenderContext.INSTANCE; + } + + @Override + public InsertRenderContext getInsertRenderContext() { + return InsertRenderContexts.DEFAULT; + } + + public RenderNamingStrategy getNamingStrategy() { + return this.namingStrategy; + } + + @Override + public String toString() { + + return "SimpleRenderContext{" + "namingStrategy=" + namingStrategy + '}'; + } + + enum DefaultSelectRenderContext implements SelectRenderContext { + INSTANCE + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SqlRenderer.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SqlRenderer.java new file mode 100644 index 0000000000..c7b705de14 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SqlRenderer.java @@ -0,0 +1,155 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Delete; +import org.springframework.data.relational.core.sql.Insert; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.Update; +import org.springframework.util.Assert; + +/** + * SQL renderer for {@link Select} and {@link Delete} statements. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + * @see RenderContext + */ +public class SqlRenderer implements Renderer { + + private final RenderContext context; + + private SqlRenderer(RenderContext context) { + + Assert.notNull(context, "RenderContext must not be null"); + + this.context = context; + } + + /** + * Creates a new {@link SqlRenderer}. + * + * @return the renderer. + */ + public static SqlRenderer create() { + return new SqlRenderer(new SimpleRenderContext(NamingStrategies.asIs())); + } + + /** + * Creates a new {@link SqlRenderer} using a {@link RenderContext}. + * + * @param context must not be {@literal null}. + * @return the renderer. + */ + public static SqlRenderer create(RenderContext context) { + return new SqlRenderer(context); + } + + /** + * Renders a {@link Select} statement into its SQL representation. + * + * @param select must not be {@literal null}. + * @return the rendered statement. + */ + public static String toString(Select select) { + return create().render(select); + } + + /** + * Renders a {@link Insert} statement into its SQL representation. + * + * @param insert must not be {@literal null}. + * @return the rendered statement. + */ + public static String toString(Insert insert) { + return create().render(insert); + } + + /** + * Renders a {@link Update} statement into its SQL representation. + * + * @param update must not be {@literal null}. + * @return the rendered statement. + */ + public static String toString(Update update) { + return create().render(update); + } + + /** + * Renders a {@link Delete} statement into its SQL representation. + * + * @param delete must not be {@literal null}. + * @return the rendered statement. + */ + public static String toString(Delete delete) { + return create().render(delete); + } + + /** + * Render the {@link Select} AST into a SQL statement. + * + * @return the rendered statement. + */ + @Override + public String render(Select select) { + + SelectStatementVisitor visitor = new SelectStatementVisitor(context); + select.visit(visitor); + + return visitor.getRenderedPart().toString(); + } + + /** + * Render the {@link Insert} AST into a SQL statement. + * + * @return the rendered statement. + */ + @Override + public String render(Insert insert) { + InsertStatementVisitor visitor = new InsertStatementVisitor(context); + insert.visit(visitor); + return visitor.getRenderedPart().toString(); + } + + /** + * Render the {@link Update} AST into a SQL statement. + * + * @return the rendered statement. + */ + @Override + public String render(Update update) { + + UpdateStatementVisitor visitor = new UpdateStatementVisitor(context); + update.visit(visitor); + + return visitor.getRenderedPart().toString(); + } + + /** + * Render the {@link Delete} AST into a SQL statement. + * + * @return the rendered statement. + */ + @Override + public String render(Delete delete) { + + DeleteStatementVisitor visitor = new DeleteStatementVisitor(context); + delete.visit(visitor); + + return visitor.getRenderedPart().toString(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SubselectVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SubselectVisitor.java new file mode 100644 index 0000000000..e57023989a --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/SubselectVisitor.java @@ -0,0 +1,52 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Subselect; + +public class SubselectVisitor extends TypedSubtreeVisitor { + + private final RenderContext context; + private final RenderTarget parent; + + private final SelectStatementVisitor delegate; + private final StringBuilder builder = new StringBuilder("("); + + public SubselectVisitor(RenderContext context, RenderTarget parent) { + + this.context = context; + this.parent = parent; + + this.delegate = new SelectStatementVisitor(context); + } + + @Override + Delegation enterMatched(Subselect segment) { + return Delegation.delegateTo(delegate); + } + + @Override + Delegation leaveMatched(Subselect segment) { + + builder.append(delegate.getRenderedPart()); + builder.append(") "); + + parent.onRendered(builder); + + return super.leaveMatched(segment); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/TypedSingleConditionRenderSupport.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/TypedSingleConditionRenderSupport.java new file mode 100644 index 0000000000..e338008c5e --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/TypedSingleConditionRenderSupport.java @@ -0,0 +1,90 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.When; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Support class for {@link TypedSubtreeVisitor typed visitors} that want to render a single {@link Condition} and + * delegate nested {@link Expression} and {@link Condition} rendering. + * + * @author Mark Paluch + * @author Sven Rienstra + * @since 1.1 + */ +abstract class TypedSingleConditionRenderSupport extends TypedSubtreeVisitor { + + private final RenderContext context; + private @Nullable PartRenderer current; + + TypedSingleConditionRenderSupport(RenderContext context) { + this.context = context; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof When) { + WhenVisitor visitor = new WhenVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof Condition) { + ConditionVisitor visitor = new ConditionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + if (segment instanceof Expression) { + ExpressionVisitor visitor = new ExpressionVisitor(context); + current = visitor; + return Delegation.delegateTo(visitor); + } + + throw new IllegalStateException("Cannot provide visitor for " + segment); + } + + /** + * Returns whether rendering was delegated to a {@link ExpressionVisitor} or {@link ConditionVisitor}. + * + * @return {@literal true} when rendering was delegated to a {@link ExpressionVisitor} or {@link ConditionVisitor}. + */ + protected boolean hasDelegatedRendering() { + return current != null; + } + + /** + * Consumes the delegated rendering part. Call {@link #hasDelegatedRendering()} to check whether rendering was + * actually delegated. Consumption releases the delegated rendered. + * + * @return the delegated rendered part. + * @throws IllegalStateException if rendering was not delegate. + */ + protected CharSequence consumeRenderedPart() { + + Assert.state(hasDelegatedRendering(), "Rendering not delegated; Cannot consume delegated rendering part"); + + PartRenderer current = this.current; + this.current = null; + return current.getRenderedPart(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/TypedSubtreeVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/TypedSubtreeVisitor.java new file mode 100644 index 0000000000..149785a622 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/TypedSubtreeVisitor.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import java.util.function.Predicate; + +import org.springframework.core.ResolvableType; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Visitor; +import org.springframework.lang.Nullable; +import org.springframework.util.ConcurrentReferenceHashMap; + +/** + * Type-filtering {@link DelegatingVisitor visitor} applying a {@link Class type filter} derived from the generic type + * parameter. Typically used as base class for {@link Visitor visitors} that wish to apply hierarchical processing based + * on a well-defined entry {@link Visitor segment}. + *

+ * Filtering is a three-way process: + *

    + *
  1. Ignores elements that do not match the filter {@link Predicate}.
  2. + *
  3. {@link #enterMatched(Visitable) enter}/{@link #leaveMatched(Visitable) leave} matched callbacks for the + * {@link Visitable segment} that matches the {@link Predicate}.
  4. + *
  5. {@link #enterNested(Visitable) enter}/{@link #leaveNested(Visitable) leave} nested callbacks for direct/nested + * children of the matched {@link Visitable} until {@link #leaveMatched(Visitable) leaving the matched} + * {@link Visitable}.
  6. + *
+ *

+ * + * @author Mark Paluch + * @since 1.1 + * @see FilteredSubtreeVisitor + */ +abstract class TypedSubtreeVisitor extends DelegatingVisitor { + + private static final ConcurrentReferenceHashMap, ResolvableType> refCache = new ConcurrentReferenceHashMap<>(); + + private final ResolvableType type; + private @Nullable Visitable currentSegment; + + enum Assignable { + YES, NO, + } + + /** + * Creates a new {@link TypedSubtreeVisitor}. + */ + TypedSubtreeVisitor() { + this.type = refCache.computeIfAbsent(this.getClass(), + key -> ResolvableType.forClass(key).as(TypedSubtreeVisitor.class).getGeneric(0)); + } + + /** + * Creates a new {@link TypedSubtreeVisitor} with an explicitly provided type. + */ + TypedSubtreeVisitor(Class type) { + this.type = refCache.computeIfAbsent(type, key -> ResolvableType.forClass(type)); + } + + /** + * {@link Visitor#enter(Visitable) Enter} callback for a {@link Visitable} that this {@link Visitor} is responsible + * for. The default implementation retains delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or + * {@link Delegation#delegateTo(DelegatingVisitor)}. + * @see Delegation#retain() + */ + Delegation enterMatched(T segment) { + return Delegation.retain(); + } + + /** + * {@link Visitor#enter(Visitable) Enter} callback for a nested {@link Visitable}. The default implementation retains + * delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or + * {@link Delegation#delegateTo(DelegatingVisitor)}. + * @see Delegation#retain() + */ + Delegation enterNested(Visitable segment) { + return Delegation.retain(); + } + + /** + * {@link Visitor#leave(Visitable) Leave} callback for the matched {@link Visitable}. The default implementation steps + * back from delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or {@link Delegation#leave()}. + * @see Delegation#leave() + */ + Delegation leaveMatched(T segment) { + return Delegation.leave(); + } + + /** + * {@link Visitor#leave(Visitable) Leave} callback for a nested {@link Visitable}. The default implementation retains + * delegation by default. + * + * @param segment the segment, must not be {@literal null}. + * @return delegation options. Can be either {@link Delegation#retain()} or {@link Delegation#leave()}. + * @see Delegation#retain() + */ + Delegation leaveNested(Visitable segment) { + return Delegation.retain(); + } + + @SuppressWarnings("unchecked") + @Override + public final Delegation doEnter(Visitable segment) { + + if (currentSegment == null) { + + if (type.isInstance(segment)) { + + currentSegment = segment; + return enterMatched((T) segment); + } + } else { + return enterNested(segment); + } + + return Delegation.retain(); + } + + @SuppressWarnings("unchecked") + @Override + public final Delegation doLeave(Visitable segment) { + + if (currentSegment == null) { + return Delegation.leave(); + } else if (segment == currentSegment) { + currentSegment = null; + return leaveMatched((T) segment); + } else { + return leaveNested(segment); + } + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/UpdateStatementVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/UpdateStatementVisitor.java new file mode 100644 index 0000000000..e2f0836d7b --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/UpdateStatementVisitor.java @@ -0,0 +1,111 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Assignment; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.Update; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Where; + +/** + * {@link PartRenderer} for {@link Update} statements. + * + * @author Mark Paluch + * @since 1.1 + */ +class UpdateStatementVisitor extends DelegatingVisitor implements PartRenderer { + + private StringBuilder builder = new StringBuilder(); + private StringBuilder table = new StringBuilder(); + private StringBuilder assignments = new StringBuilder(); + private StringBuilder where = new StringBuilder(); + + private FromTableVisitor tableVisitor; + private AssignmentVisitor assignmentVisitor; + private WhereClauseVisitor whereClauseVisitor; + + UpdateStatementVisitor(RenderContext context) { + + this.tableVisitor = new FromTableVisitor(context, it -> { + + if (table.length() != 0) { + table.append(", "); + } + + table.append(it); + }); + + this.assignmentVisitor = new AssignmentVisitor(context, it -> { + + if (assignments.length() != 0) { + assignments.append(", "); + } + + assignments.append(it); + }); + + this.whereClauseVisitor = new WhereClauseVisitor(context, where::append); + } + + @Override + public Delegation doEnter(Visitable segment) { + + if (segment instanceof Table) { + return Delegation.delegateTo(this.tableVisitor); + } + + if (segment instanceof Assignment) { + return Delegation.delegateTo(this.assignmentVisitor); + } + + if (segment instanceof Where) { + return Delegation.delegateTo(this.whereClauseVisitor); + } + + return Delegation.retain(); + } + + @Override + public Delegation doLeave(Visitable segment) { + + if (segment instanceof Update) { + + builder.append("UPDATE"); + + if (table.length() != 0) { + builder.append(" ").append(table); + } + + if (assignments.length() != 0) { + builder.append(" SET ").append(assignments); + } + + if (where.length() != 0) { + builder.append(" WHERE ").append(where); + } + + return Delegation.leave(); + } + + return Delegation.retain(); + } + + @Override + public CharSequence getRenderedPart() { + return builder; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ValuesVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ValuesVisitor.java new file mode 100644 index 0000000000..bf548325b3 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/ValuesVisitor.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Values; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.lang.Nullable; + +/** + * Renderer for {@link Values}. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @since 1.1 + */ +class ValuesVisitor extends TypedSubtreeVisitor { + + private final RenderTarget parent; + private final StringBuilder builder = new StringBuilder(); + private final RenderContext context; + + private @Nullable ExpressionVisitor current; + private boolean first = true; + + ValuesVisitor(RenderContext context, RenderTarget parent) { + + this.context = context; + this.parent = parent; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Expression) { + this.current = new ExpressionVisitor(context); + return Delegation.delegateTo(this.current); + } + + return super.enterNested(segment); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (this.current != null) { + + if (first) { + first = false; + } else { + builder.append(", "); + } + + builder.append(this.current.getRenderedPart()); + this.current = null; + } + + return super.leaveNested(segment); + } + + @Override + Delegation leaveMatched(Values segment) { + parent.onRendered(builder); + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/WhenVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/WhenVisitor.java new file mode 100644 index 0000000000..ed872d805c --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/WhenVisitor.java @@ -0,0 +1,49 @@ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.When; + +/** + * Renderer for {@link When} segments. + * + * @author Sven Rienstra + * @since 3.4 + */ +public class WhenVisitor extends TypedSingleConditionRenderSupport implements PartRenderer { + + private final StringBuilder part = new StringBuilder(); + private boolean conditionRendered; + + WhenVisitor(RenderContext context) { + super(context); + } + + @Override + Delegation leaveNested(Visitable segment) { + + if (hasDelegatedRendering()) { + + if (conditionRendered) { + part.append(" THEN "); + } + + part.append(consumeRenderedPart()); + conditionRendered = true; + } + + return super.leaveNested(segment); + } + + @Override + Delegation enterMatched(When segment) { + + part.append("WHEN "); + + return super.enterMatched(segment); + } + + @Override + public CharSequence getRenderedPart() { + return part; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/WhereClauseVisitor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/WhereClauseVisitor.java new file mode 100644 index 0000000000..20a9303ea3 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/WhereClauseVisitor.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Visitable; +import org.springframework.data.relational.core.sql.Where; + +/** + * Renderer for {@link Where} segments. Uses a {@link RenderTarget} to call back for render results. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 1.1 + */ +class WhereClauseVisitor extends TypedSubtreeVisitor { + + private final RenderTarget parent; + private final ConditionVisitor conditionVisitor; + + WhereClauseVisitor(RenderContext context, RenderTarget parent) { + this.conditionVisitor = new ConditionVisitor(context); + this.parent = parent; + } + + @Override + Delegation enterNested(Visitable segment) { + + if (segment instanceof Condition) { + return Delegation.delegateTo(conditionVisitor); + } + + return super.enterNested(segment); + } + + @Override + Delegation leaveMatched(Where segment) { + + parent.onRendered(conditionVisitor.getRenderedPart()); + return super.leaveMatched(segment); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/package-info.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/package-info.java new file mode 100644 index 0000000000..34c541d4c3 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sql/render/package-info.java @@ -0,0 +1,9 @@ +/** + * SQL rendering utilities to render SQL from the Statement Builder API. + */ +@NonNullApi +@NonNullFields +package org.springframework.data.relational.core.sql.render; + +import org.springframework.lang.NonNullApi; +import org.springframework.lang.NonNullFields; diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/AliasFactory.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/AliasFactory.java new file mode 100644 index 0000000000..3f17f92182 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/AliasFactory.java @@ -0,0 +1,93 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sqlgeneration; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.AggregatePathTraversal; + +/** + * Creates aliases to be used in SQL generation + * + * @author Jens Schauder + * @since 3.2 + */ +public class AliasFactory { + private final SingleAliasFactory columnAliases = new SingleAliasFactory("c"); + private final SingleAliasFactory tableAliases = new SingleAliasFactory("t"); + private final SingleAliasFactory rowNumberAliases = new SingleAliasFactory("rn"); + private final SingleAliasFactory rowCountAliases = new SingleAliasFactory("rc"); + private final SingleAliasFactory backReferenceAliases = new SingleAliasFactory("br"); + private final SingleAliasFactory keyAliases = new SingleAliasFactory("key"); + private final AtomicInteger counter = new AtomicInteger(); + + private static String sanitize(String name) { + return name.replaceAll("\\W", ""); + } + + public String getColumnAlias(AggregatePath path) { + return columnAliases.getOrCreateFor(path); + } + + public String getTableAlias(AggregatePath path) { + return tableAliases.getOrCreateFor(path); + } + + public String getRowNumberAlias(AggregatePath path) { + return rowNumberAliases.getOrCreateFor(AggregatePathTraversal.getTableOwningPath(path)); + } + + public String getRowCountAlias(AggregatePath path) { + return rowCountAliases.getOrCreateFor(path); + } + + public String getBackReferenceAlias(AggregatePath path) { + return backReferenceAliases.getOrCreateFor(path); + } + + public String getKeyAlias(AggregatePath path) { + return keyAliases.getOrCreateFor(path); + } + + private class SingleAliasFactory { + private final String prefix; + private final Map cache = new ConcurrentHashMap<>(); + + SingleAliasFactory(String prefix) { + this.prefix = prefix + "_"; + } + + String getOrCreateFor(AggregatePath path) { + return cache.computeIfAbsent(path, this::createName); + } + + private String createName(AggregatePath path) { + return prefix + getName(path) + "_" + (counter.incrementAndGet()); + } + } + + private static String getName(AggregatePath path) { + return sanitize( // + path.isEntity() // + ? path.getTableInfo().qualifiedTableName().getReference() // + : path.getColumnInfo().name().getReference()) // + .toLowerCase(); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/SingleQuerySqlGenerator.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/SingleQuerySqlGenerator.java new file mode 100644 index 0000000000..65b0ff095f --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/SingleQuerySqlGenerator.java @@ -0,0 +1,443 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sqlgeneration; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.*; +import org.springframework.data.relational.core.sql.render.SqlRenderer; +import org.springframework.lang.Nullable; + +/** + * A {@link SqlGenerator} that creates SQL statements for loading complete aggregates with a single statement. + * + * @author Jens Schauder + * @since 3.2 + */ +public class SingleQuerySqlGenerator implements SqlGenerator { + + private final RelationalMappingContext context; + private final Dialect dialect; + private final AliasFactory aliases; + + public SingleQuerySqlGenerator(RelationalMappingContext context, AliasFactory aliasFactory, Dialect dialect) { + + this.context = context; + this.aliases = aliasFactory; + this.dialect = dialect; + } + + @Override + public String findAll(RelationalPersistentEntity aggregate, @Nullable Condition condition) { + return createSelect(aggregate, condition); + } + + String createSelect(RelationalPersistentEntity aggregate, @Nullable Condition condition) { + + AggregatePath rootPath = context.getAggregatePath(aggregate); + QueryMeta queryMeta = createInlineQuery(rootPath, condition); + InlineQuery rootQuery = queryMeta.inlineQuery; + List columns = new ArrayList<>(queryMeta.selectableExpressions); + + List rownumbers = new ArrayList<>(); + rownumbers.add(queryMeta.rowNumber); + + PersistentPropertyPaths entityPaths = context + .findPersistentPropertyPaths(aggregate.getType(), PersistentProperty::isEntity); + List inlineQueries = createInlineQueries(entityPaths); + inlineQueries.forEach(qm -> { + columns.addAll(qm.selectableExpressions); + rownumbers.add(qm.rowNumber); + }); + + Expression totalRownumber = rownumbers.size() > 1 ? greatest(rownumbers).as("rn") + : new AliasedExpression(rownumbers.get(0), "rn"); + columns.add(totalRownumber); + + InlineQuery inlineQuery = createMainSelect(columns, rootPath, rootQuery, inlineQueries); + Expression rootId = just(aliases.getColumnAlias(rootPath.append(aggregate.getRequiredIdProperty()))); + + List selectList = getSelectList(queryMeta, inlineQueries, rootId); + Select fullQuery = StatementBuilder.select(selectList).from(inlineQuery).orderBy(rootId, just("rn")).build(false); + + return SqlRenderer.create(new RenderContextFactory(dialect).createRenderContext()).render(fullQuery); + } + + private static List getSelectList(QueryMeta queryMeta, List inlineQueries, Expression rootId) { + + List expressions = new ArrayList<>(inlineQueries.size() + queryMeta.simpleColumns.size() + 8); + + queryMeta.simpleColumns + .forEach(e -> expressions.add(filteredColumnExpression(queryMeta.rowNumber.toString(), e.toString()))); + + for (QueryMeta meta : inlineQueries) { + + meta.simpleColumns + .forEach(e -> expressions.add(filteredColumnExpression(meta.rowNumber.toString(), e.toString()))); + + if (meta.id != null) { + expressions.add(meta.id); + } + if (meta.key != null) { + expressions.add(meta.key); + } + } + + expressions.add(rootId); + return expressions; + } + + private InlineQuery createMainSelect(List columns, AggregatePath rootPath, InlineQuery rootQuery, + List inlineQueries) { + + SelectBuilder.SelectJoin select = StatementBuilder.select(columns).from(rootQuery); + select = applyJoins(rootPath, inlineQueries, select); + + SelectBuilder.BuildSelect buildSelect = applyWhereCondition(inlineQueries, select); + return InlineQuery.create(buildSelect.build(false), "main"); + } + + /** + * Creates inline queries for all entities referenced by the paths passed as an argument. + * + * @param paths the paths to consider. + * @return a {@link Map} that contains all the inline queries indexed by the path to the entity that gets loaded by + * the subquery. + */ + private List createInlineQueries(PersistentPropertyPaths paths) { + + List inlineQueries = new ArrayList<>(); + + for (PersistentPropertyPath ppp : paths) { + + QueryMeta queryMeta = createInlineQuery(context.getAggregatePath(ppp), null); + inlineQueries.add(queryMeta); + } + return inlineQueries; + } + + /** + * Creates a single inline query for the given basePath. The query selects all the columns for the entity plus a + * rownumber and a rowcount expression. The first numbers all rows of the subselect sequentially starting from 1. The + * rowcount contains the total number of child rows. All selected expressions are globally uniquely aliased and are + * referenced by that alias in the rest of the query. This ensures that we don't run into problems with column names + * that are not unique across tables and also the generated SQL doesn't contain quotes and funny column names, making + * them easier to understand and also potentially shorter. + * + * @param basePath the path for which to create the inline query. + * @param condition a condition that is to be applied to the query. May be {@literal null}. + * @return an inline query for the given path. + */ + private QueryMeta createInlineQuery(AggregatePath basePath, @Nullable Condition condition) { + + RelationalPersistentEntity entity = basePath.getRequiredLeafEntity(); + Table table = Table.create(entity.getQualifiedTableName()); + + List paths = getAggregatePaths(basePath, entity); + List columns = new ArrayList<>(); + + String rowNumberAlias = aliases.getRowNumberAlias(basePath); + Expression rownumber = basePath.isRoot() ? new AliasedExpression(SQL.literalOf(1), rowNumberAlias) + : createRowNumberExpression(basePath, table, rowNumberAlias); + columns.add(rownumber); + + String rowCountAlias = aliases.getRowCountAlias(basePath); + Expression count = basePath.isRoot() ? new AliasedExpression(SQL.literalOf(1), rowCountAlias) + : AnalyticFunction.create("count", Expressions.just("*")) + .partitionBy(table.column(basePath.getTableInfo().reverseColumnInfo().name())).as(rowCountAlias); + columns.add(count); + + String backReferenceAlias = null; + String keyAlias = null; + + if (!basePath.isRoot()) { + + backReferenceAlias = aliases.getBackReferenceAlias(basePath); + columns.add(table.column(basePath.getTableInfo().reverseColumnInfo().name()).as(backReferenceAlias)); + + keyAlias = aliases.getKeyAlias(basePath); + Expression keyExpression = basePath.isQualified() + ? table.column(basePath.getTableInfo().qualifierColumnInfo().name()).as(keyAlias) + : createRowNumberExpression(basePath, table, keyAlias); + columns.add(keyExpression); + } + + String id = getIdentifierProperty(paths); + List columnAliases = getColumnAliases(table, paths, columns); + SelectBuilder.SelectWhere select = StatementBuilder.select(columns).from(table); + SelectBuilder.BuildSelect buildSelect = condition != null ? select.where(condition) : select; + + InlineQuery inlineQuery = InlineQuery.create(buildSelect.build(false), aliases.getTableAlias(basePath)); + return QueryMeta.of(basePath, inlineQuery, columnAliases, just(id), just(backReferenceAlias), just(keyAlias), + just(rowNumberAlias), just(rowCountAlias)); + } + + private List getColumnAliases(Table table, List paths, List columns) { + + List columnAliases = new ArrayList<>(); + for (AggregatePath path : paths) { + + String alias = aliases.getColumnAlias(path); + if (!path.getRequiredLeafProperty().isIdProperty()) { + columnAliases.add(just(alias)); + } + columns.add(table.column(path.getColumnInfo().name()).as(alias)); + } + return columnAliases; + } + + private static List getAggregatePaths(AggregatePath basePath, RelationalPersistentEntity entity) { + + List paths = new ArrayList<>(); + + for (RelationalPersistentProperty property : entity) { + if (!property.isEntity()) { + paths.add(basePath.append(property)); + } + } + + return paths; + } + + @Nullable + private String getIdentifierProperty(List paths) { + + for (AggregatePath path : paths) { + if (path.getRequiredLeafProperty().isIdProperty()) { + return aliases.getColumnAlias(path); + } + } + + return null; + } + + private static AnalyticFunction createRowNumberExpression(AggregatePath basePath, Table table, + String rowNumberAlias) { + return AnalyticFunction.create("row_number") // + .partitionBy(table.column(basePath.getTableInfo().reverseColumnInfo().name())) // + .orderBy(table.column(basePath.getTableInfo().reverseColumnInfo().name())) // + .as(rowNumberAlias); + } + + /** + * Adds joins to a select. + * + * @param rootPath the AggregatePath that gets selected by the select in question. + * @param inlineQueries all the inline queries to added as joins as returned by + * {@link #createInlineQueries(PersistentPropertyPaths)} + * @param select the select to modify. + * @return the original select but with added joins + */ + private SelectBuilder.SelectJoin applyJoins(AggregatePath rootPath, List inlineQueries, + SelectBuilder.SelectJoin select) { + + RelationalPersistentProperty rootIdProperty = rootPath.getRequiredIdProperty(); + AggregatePath rootIdPath = rootPath.append(rootIdProperty); + for (QueryMeta queryMeta : inlineQueries) { + + AggregatePath path = queryMeta.basePath(); + String backReferenceAlias = aliases.getBackReferenceAlias(path); + Comparison joinCondition = Conditions.isEqual(Expressions.just(aliases.getColumnAlias(rootIdPath)), + Expressions.just(backReferenceAlias)); + select = select.leftOuterJoin(queryMeta.inlineQuery).on(joinCondition); + } + + return select; + } + + /** + * Applies a where condition to the select. The Where condition is constructed such that one root and multiple child + * selects are combined such that. + *
    + *
  1. all child elements with a given rn become part of a single row. I.e. all child rows with for example rownumber + * 3 are contained in a single row
  2. + *
  3. if for a given rownumber no matching element is present for a given child the columns for that child are either + * null (when there is no child elements at all) or the values for rownumber 1 are used for that child
  4. + *
+ * + * @param inlineQueries all in the inline queries for all the children, as returned by + * {@link #createInlineQueries(PersistentPropertyPaths)} + * @param select the select to which the where clause gets added. + * @return the modified select. + */ + private SelectBuilder.SelectOrdered applyWhereCondition(List inlineQueries, + SelectBuilder.SelectJoin select) { + + SelectBuilder.SelectWhere selectWhere = (SelectBuilder.SelectWhere) select; + + if (inlineQueries.isEmpty()) { + return selectWhere; + } + + Condition joins = null; + + for (int left = 0; left < inlineQueries.size(); left++) { + + QueryMeta leftQueryMeta = inlineQueries.get(left); + AggregatePath leftPath = leftQueryMeta.basePath; + Expression leftRowNumber = just(aliases.getRowNumberAlias(leftPath)); + Expression leftRowCount = just(aliases.getRowCountAlias(leftPath)); + + for (int right = left + 1; right < inlineQueries.size(); right++) { + + QueryMeta rightQueryMeta = inlineQueries.get(right); + AggregatePath rightPath = rightQueryMeta.basePath; + Expression rightRowNumber = just(aliases.getRowNumberAlias(rightPath)); + Expression rightRowCount = just(aliases.getRowCountAlias(rightPath)); + + Condition mutualJoin = Conditions.isEqual(leftRowNumber, rightRowNumber).or(Conditions.isNull(leftRowNumber)) + .or(Conditions.isNull(rightRowNumber)) + .or(Conditions.nest(Conditions.isGreater(leftRowNumber, rightRowCount) + .and(Conditions.isEqual(rightRowNumber, SQL.literalOf(1))))) + .or(Conditions.nest(Conditions.isGreater(rightRowNumber, leftRowCount) + .and(Conditions.isEqual(leftRowNumber, SQL.literalOf(1))))); + + mutualJoin = Conditions.nest(mutualJoin); + + if (joins == null) { + joins = mutualJoin; + } else { + joins = joins.and(mutualJoin); + } + } + } + + return selectWhere.where(joins); + } + + @Override + public AliasFactory getAliasFactory() { + return aliases; + } + + /** + * Constructs SQL of the form {@code CASE WHEN x = rn THEN alias ELSE NULL END AS ALIAS}. This expression is used to + * replace values that would appear multiple times in the result with {@code null} values in all but the first + * occurrence. Without this the result for an aggregate root with a single collection item would look like this: + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
+ * root valuechild value
root1child1
root1child2
root1child3
root1child4
+ * This expression transforms this into + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
+ * root valuechild value
root1child1
nullchild2
nullchild3
nullchild4
+ * + * @param rowNumberAlias the alias of the rownumber column of the subselect under consideration. This determines if + * the other value is replaced by null or not. + * @param alias the column potentially to be replaced by null + * @return a SQL expression. + */ + private static Expression filteredColumnExpression(String rowNumberAlias, String alias) { + return just(String.format("case when %s = rn THEN %s else null end as %s", rowNumberAlias, alias, alias)); + } + + private static Expression just(String alias) { + if (alias == null) { + return null; + } + return Expressions.just(alias); + } + + /** + * Constructs a SQL function of the following form + * {@code GREATEST(Coalesce(x1, 1), Coalesce(x2, 1), ..., Coalesce(xN, 1)}. this is used for cobining rownumbers from + * different child tables. The {@code coalesce} is used because the values {@code x1 ... xN} might be {@code null} and + * we want {@code null} to be equivalent with the first entry. + * + * @param expressions the different values to combined. + */ + private static SimpleFunction greatest(List expressions) { + + List guarded = new ArrayList<>(); + for (Expression expression : expressions) { + guarded.add(Functions.coalesce(expression, SQL.literalOf(1))); + } + return Functions.greatest(guarded); + } + + record QueryMeta(AggregatePath basePath, InlineQuery inlineQuery, Collection simpleColumns, + Collection selectableExpressions, Expression id, Expression backReference, Expression key, + Expression rowNumber, Expression rowCount) { + + static QueryMeta of(AggregatePath basePath, InlineQuery inlineQuery, Collection simpleColumns, + Expression id, Expression backReference, Expression key, Expression rowNumber, Expression rowCount) { + + List selectableExpressions = new ArrayList<>(simpleColumns); + selectableExpressions.add(rowNumber); + + if (id != null) { + selectableExpressions.add(id); + } + if (backReference != null) { + selectableExpressions.add(backReference); + } + if (key != null) { + selectableExpressions.add(key); + } + + return new QueryMeta(basePath, inlineQuery, simpleColumns, selectableExpressions, id, backReference, key, + rowNumber, rowCount); + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/SqlGenerator.java b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/SqlGenerator.java new file mode 100644 index 0000000000..35245f3514 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/core/sqlgeneration/SqlGenerator.java @@ -0,0 +1,37 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sqlgeneration; + +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.lang.Nullable; + +/** + * Generates SQL statements for loading aggregates. + * + * @author Jens Schauder + * @since 3.2 + */ +public interface SqlGenerator { + + default String findAll(RelationalPersistentEntity aggregate) { + return findAll(aggregate, null); + } + + String findAll(RelationalPersistentEntity aggregate, @Nullable Condition condition); + + AliasFactory getAliasFactory(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/domain/RowDocument.java b/spring-data-relational/src/main/java/org/springframework/data/relational/domain/RowDocument.java new file mode 100644 index 0000000000..2ecf2597fa --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/domain/RowDocument.java @@ -0,0 +1,275 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.domain; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Function; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.LinkedCaseInsensitiveMap; +import org.springframework.util.ObjectUtils; + +/** + * Represents a tabular structure as document to enable hierarchical traversal of SQL results. + * + * @author Mark Paluch + * @since 3.2 + */ +public class RowDocument implements Map { + + private final Map delegate; + + public RowDocument() { + this.delegate = new LinkedCaseInsensitiveMap<>(); + } + + public RowDocument(int expectedSize) { + this.delegate = new LinkedCaseInsensitiveMap<>(expectedSize); + } + + public RowDocument(Map map) { + + this.delegate = new LinkedCaseInsensitiveMap<>(); + this.delegate.putAll(map); + } + + /** + * Factory method to create a RowDocument from a field and value. + * + * @param field the file name to use. + * @param value the value to use, can be {@literal null}. + * @return + */ + public static RowDocument of(String field, @Nullable Object value) { + return new RowDocument().append(field, value); + } + + /** + * Retrieve the value at {@code key} as {@link List}. + * + * @param key + * @return the value or {@literal null}. + * @throws ClassCastException if {@code key} holds a value that is not a {@link List}. + */ + @Nullable + public List getList(String key) { + + Object item = get(key); + if (item instanceof List || item == null) { + return (List) item; + } + + throw new ClassCastException(String.format("Cannot cast element %s be cast to List", item)); + } + + /** + * Retrieve the value at {@code key} as {@link Map}. + * + * @param key + * @return the value or {@literal null}. + * @throws ClassCastException if {@code key} holds a value that is not a {@link Map}. + */ + @Nullable + public Map getMap(String key) { + + Object item = get(key); + if (item instanceof Map || item == null) { + return (Map) item; + } + + throw new ClassCastException(String.format("Cannot cast element %s be cast to Map", item)); + } + + /** + * Retrieve the value at {@code key} as {@link RowDocument}. + * + * @param key + * @return the value or {@literal null}. + * @throws ClassCastException if {@code key} holds a value that is not a {@link RowDocument}. + */ + public RowDocument getDocument(String key) { + + Object item = get(key); + if (item instanceof RowDocument || item == null) { + return (RowDocument) item; + } + + throw new ClassCastException(String.format("Cannot cast element %s be cast to RowDocument", item)); + } + + @Override + public int size() { + return delegate.size(); + } + + @Override + public boolean isEmpty() { + return delegate.isEmpty(); + } + + @Override + public boolean containsKey(Object key) { + return delegate.containsKey(key); + } + + @Override + public boolean containsValue(Object value) { + return delegate.containsValue(value); + } + + @Override + public Object get(Object key) { + return delegate.get(key); + } + + @Nullable + @Override + public Object put(String key, @Nullable Object value) { + + Assert.notNull(key, "Key must not be null!"); + + return delegate.put(key, value); + } + + /** + * Appends a new entry (or overwrites an existing value at {@code key}). + * + * @param key + * @param value + * @return + */ + public RowDocument append(String key, @Nullable Object value) { + + put(key, value); + return this; + } + + @Override + public Object remove(Object key) { + return delegate.remove(key); + } + + @Override + public void putAll(Map m) { + delegate.putAll(m); + } + + @Override + public void clear() { + delegate.clear(); + } + + @Override + public Set keySet() { + return delegate.keySet(); + } + + @Override + public Collection values() { + return delegate.values(); + } + + @Override + public Set> entrySet() { + return delegate.entrySet(); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + RowDocument that = (RowDocument) o; + + return ObjectUtils.nullSafeEquals(delegate, that.delegate); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(delegate); + } + + @Override + public Object getOrDefault(Object key, Object defaultValue) { + return delegate.getOrDefault(key, defaultValue); + } + + @Override + public void forEach(BiConsumer action) { + delegate.forEach(action); + } + + @Override + public void replaceAll(BiFunction function) { + delegate.replaceAll(function); + } + + @Nullable + @Override + public Object putIfAbsent(String key, Object value) { + return delegate.putIfAbsent(key, value); + } + + @Override + public boolean remove(Object key, Object value) { + return delegate.remove(key, value); + } + + @Override + public boolean replace(String key, Object oldValue, Object newValue) { + return delegate.replace(key, oldValue, newValue); + } + + @Nullable + @Override + public Object replace(String key, Object value) { + return delegate.replace(key, value); + } + + @Override + public Object computeIfAbsent(String key, Function mappingFunction) { + return delegate.computeIfAbsent(key, mappingFunction); + } + + @Override + public Object computeIfPresent(String key, BiFunction remappingFunction) { + return delegate.computeIfPresent(key, remappingFunction); + } + + @Override + public Object compute(String key, BiFunction remappingFunction) { + return delegate.compute(key, remappingFunction); + } + + @Override + public Object merge(String key, Object value, BiFunction remappingFunction) { + return delegate.merge(key, value, remappingFunction); + } + + @Override + public String toString() { + return getClass().getSimpleName() + delegate.toString(); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/domain/SqlSort.java b/spring-data-relational/src/main/java/org/springframework/data/relational/domain/SqlSort.java new file mode 100644 index 0000000000..2ee60cfcd7 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/domain/SqlSort.java @@ -0,0 +1,293 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.domain; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; +import java.util.regex.Pattern; + +import org.springframework.data.domain.Sort; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * SqlSort supports additional to {@link Sort} {@literal unsafe} sort expressions. Such sort expressions get included in + * a query as they are. The user has to ensure that they come from trusted sorted or are properly sanatized to prevent + * SQL injection attacks. + * + * @author Jens Schauder + * @since 3.1 + */ +public class SqlSort extends Sort { + + private static final Predicate predicate = Pattern.compile("^[0-9a-zA-Z_\\.\\(\\)]*$").asPredicate(); + + private static final long serialVersionUID = 1L; + + private SqlSort(Direction direction, List paths) { + this(Collections. emptyList(), direction, paths); + } + + private SqlSort(List orders, @Nullable Direction direction, List paths) { + super(combine(orders, direction, paths)); + } + + private SqlSort(List orders) { + super(orders); + } + + /** + * @param paths must not be {@literal null} or empty. + */ + public static SqlSort of(String... paths) { + return new SqlSort(DEFAULT_DIRECTION, Arrays.asList(paths)); + } + + /** + * @param direction the sorting direction. + * @param paths must not be {@literal null} or empty. + */ + public static SqlSort of(Direction direction, String... paths) { + return new SqlSort(direction, Arrays.asList(paths)); + } + + /** + * Validates a {@link org.springframework.data.domain.Sort.Order}, to be either safe for use in SQL or to be + * explicitely marked unsafe. + * + * @param order the {@link org.springframework.data.domain.Sort.Order} to validate. Must not be null. + */ + public static void validate(Sort.Order order) { + + String property = order.getProperty(); + boolean isMarkedUnsafe = order instanceof SqlSort.SqlOrder ro && ro.isUnsafe(); + if (isMarkedUnsafe) { + return; + } + + if (!predicate.test(property)) { + throw new IllegalArgumentException( + "order fields that are not marked as unsafe must only consist of digits, letter, '.', '_', and '\'. If you want to sort by arbitrary expressions please use RelationalSort.unsafe. Note that such expressions become part of SQL statements and therefore need to be sanatized to prevent SQL injection attacks."); + } + } + + private static List combine(List orders, @Nullable Direction direction, List paths) { + + List result = new ArrayList<>(orders); + + for (String path : paths) { + result.add(new Order(direction, path)); + } + + return result; + } + + /** + * Creates new unsafe {@link SqlSort} based on given properties. + * + * @param properties must not be {@literal null} or empty. + * @return + */ + public static SqlSort unsafe(String... properties) { + return unsafe(Sort.DEFAULT_DIRECTION, properties); + } + + /** + * Creates new unsafe {@link SqlSort} based on given {@link Direction} and properties. + * + * @param direction must not be {@literal null}. + * @param properties must not be {@literal null} or empty. + * @return + */ + public static SqlSort unsafe(Direction direction, String... properties) { + + Assert.notNull(direction, "Direction must not be null"); + Assert.notEmpty(properties, "Properties must not be empty"); + Assert.noNullElements(properties, "Properties must not contain null values"); + + return unsafe(direction, Arrays.asList(properties)); + } + + /** + * Creates new unsafe {@link SqlSort} based on given {@link Direction} and properties. + * + * @param direction must not be {@literal null}. + * @param properties must not be {@literal null} or empty. + * @return + */ + public static SqlSort unsafe(Direction direction, List properties) { + + Assert.notEmpty(properties, "Properties must not be empty"); + + List orders = new ArrayList<>(properties.size()); + + for (String property : properties) { + orders.add(new SqlOrder(direction, property)); + } + + return new SqlSort(orders); + } + + /** + * Returns a new {@link SqlSort} with the given sorting criteria added to the current one. + * + * @param direction can be {@literal null}. + * @param paths must not be {@literal null}. + * @return + */ + public SqlSort and(@Nullable Direction direction, String... paths) { + + Assert.notNull(paths, "Paths must not be null"); + + List existing = new ArrayList<>(); + + for (Order order : this) { + existing.add(order); + } + + return new SqlSort(existing, direction, Arrays.asList(paths)); + } + + /** + * Returns a new {@link SqlSort} with the given sorting criteria added to the current one. + * + * @param direction can be {@literal null}. + * @param properties must not be {@literal null} or empty. + * @return + */ + public SqlSort andUnsafe(@Nullable Direction direction, String... properties) { + + Assert.notEmpty(properties, "Properties must not be empty"); + + List orders = new ArrayList<>(); + + for (Order order : this) { + orders.add(order); + } + + for (String property : properties) { + orders.add(new SqlOrder(direction, property)); + } + + return new SqlSort(orders, direction, Collections.emptyList()); + } + + /** + * Custom {@link Order} that keeps a flag to indicate unsafe property handling, i.e. the String provided is not + * necessarily a property but can be an arbitrary expression piped into the query execution. We also keep an + * additional {@code ignoreCase} flag around as the constructor of the superclass is private currently. + * + * @author Christoph Strobl + * @author Oliver Gierke + */ + public static class SqlOrder extends Order { + + private static final long serialVersionUID = 1L; + + private final boolean unsafe; + + /** + * Creates a new {@link SqlOrder} instance. Takes a single property. Direction defaults to + * {@link Sort#DEFAULT_DIRECTION}. + * + * @param property must not be {@literal null} or empty. + */ + public static SqlOrder by(String property) { + return new SqlOrder(DEFAULT_DIRECTION, property); + } + + /** + * Creates a new {@link SqlOrder} instance. Takes a single property. Direction is {@link Direction#ASC} and + * NullHandling {@link NullHandling#NATIVE}. + * + * @param property must not be {@literal null} or empty. + */ + public static SqlOrder asc(String property) { + return new SqlOrder(Direction.ASC, property, NullHandling.NATIVE); + } + + /** + * Creates a new {@link SqlOrder} instance. Takes a single property. Direction is {@link Direction#DESC} and + * NullHandling {@link NullHandling#NATIVE}. + * + * @param property must not be {@literal null} or empty. + */ + public static SqlOrder desc(String property) { + return new SqlOrder(Direction.DESC, property, NullHandling.NATIVE); + } + + /** + * Creates a new {@link SqlOrder} instance. if order is {@literal null} then order defaults to + * {@link Sort#DEFAULT_DIRECTION} + * + * @param direction can be {@literal null}, will default to {@link Sort#DEFAULT_DIRECTION}. + * @param property must not be {@literal null}. + */ + private SqlOrder(@Nullable Direction direction, String property) { + this(direction, property, NullHandling.NATIVE); + } + + /** + * Creates a new {@link SqlOrder} instance. if order is {@literal null} then order defaults to + * {@link Sort#DEFAULT_DIRECTION}. + * + * @param direction can be {@literal null}, will default to {@link Sort#DEFAULT_DIRECTION}. + * @param property must not be {@literal null}. + * @param nullHandlingHint can be {@literal null}, will default to {@link NullHandling#NATIVE}. + */ + private SqlOrder(@Nullable Direction direction, String property, NullHandling nullHandlingHint) { + this(direction, property, nullHandlingHint, false, true); + } + + private SqlOrder(@Nullable Direction direction, String property, NullHandling nullHandling, boolean ignoreCase, + boolean unsafe) { + + super(direction, property, ignoreCase, nullHandling); + this.unsafe = unsafe; + } + + @Override + public SqlOrder with(Direction order) { + return new SqlOrder(order, getProperty(), getNullHandling(), isIgnoreCase(), isUnsafe()); + } + + @Override + public SqlOrder with(NullHandling nullHandling) { + return new SqlOrder(getDirection(), getProperty(), nullHandling, isIgnoreCase(), isUnsafe()); + } + + public SqlOrder withUnsafe() { + return new SqlOrder(getDirection(), getProperty(), getNullHandling(), isIgnoreCase(), true); + } + + @Override + public SqlOrder ignoreCase() { + return new SqlOrder(getDirection(), getProperty(), getNullHandling(), true, isUnsafe()); + } + + /** + * @return true if {@link SqlOrder} should not be validated automatically. The validation should be done by the + * developer using this. + */ + public boolean isUnsafe() { + return unsafe; + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/Lock.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/Lock.java new file mode 100644 index 0000000000..62fdab613e --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/Lock.java @@ -0,0 +1,39 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository; + +import org.springframework.data.annotation.QueryAnnotation; +import org.springframework.data.relational.core.sql.LockMode; + +import java.lang.annotation.*; + +/** + * Annotation to provide a lock mode for a given query. + * + * @author Diego Krupitza + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@QueryAnnotation +@Documented +public @interface Lock { + + /** + * Defines which type of {@link LockMode} we want to use. + */ + LockMode value(); + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/CriteriaFactory.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/CriteriaFactory.java new file mode 100644 index 0000000000..eed288b240 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/CriteriaFactory.java @@ -0,0 +1,185 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.repository.query.parser.Part; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Simple factory to contain logic to create {@link Criteria}s from {@link Part}s. + * + * @author Roman Chigvintsev + * @author Mark Paluch + */ +class CriteriaFactory { + + private final ParameterMetadataProvider parameterMetadataProvider; + + /** + * Creates new instance of this class with the given {@link ParameterMetadataProvider}. + * + * @param parameterMetadataProvider parameter metadata provider (must not be {@literal null}) + */ + public CriteriaFactory(ParameterMetadataProvider parameterMetadataProvider) { + Assert.notNull(parameterMetadataProvider, "Parameter metadata provider must not be null"); + this.parameterMetadataProvider = parameterMetadataProvider; + } + + /** + * Creates {@link Criteria} for the given {@link Part}. + * + * @param part method name part (must not be {@literal null}) + * @return {@link Criteria} instance + * @throws IllegalArgumentException if part type is not supported + */ + public Criteria createCriteria(Part part) { + Part.Type type = part.getType(); + + String propertyName = part.getProperty().toDotPath(); + Class propertyType = part.getProperty().getType(); + + Criteria.CriteriaStep criteriaStep = Criteria.where(propertyName); + + if (type == Part.Type.IS_NULL || type == Part.Type.IS_NOT_NULL) { + return part.getType() == Part.Type.IS_NULL ? criteriaStep.isNull() : criteriaStep.isNotNull(); + } + + if (type == Part.Type.TRUE || type == Part.Type.FALSE) { + return part.getType() == Part.Type.TRUE ? criteriaStep.isTrue() : criteriaStep.isFalse(); + } + + switch (type) { + case BETWEEN: { + ParameterMetadata geParamMetadata = parameterMetadataProvider.next(part); + ParameterMetadata leParamMetadata = parameterMetadataProvider.next(part); + return criteriaStep.between(geParamMetadata.getValue(), leParamMetadata.getValue()); + } + case AFTER: + case GREATER_THAN: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + return criteriaStep.greaterThan(paramMetadata.getValue()); + } + case GREATER_THAN_EQUAL: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + return criteriaStep.greaterThanOrEquals(paramMetadata.getValue()); + } + case BEFORE: + case LESS_THAN: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + return criteriaStep.lessThan(paramMetadata.getValue()); + } + case LESS_THAN_EQUAL: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + return criteriaStep.lessThanOrEquals(paramMetadata.getValue()); + } + case IN: + case NOT_IN: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + Criteria criteria = part.getType() == Part.Type.IN ? criteriaStep.in(asCollection(paramMetadata.getValue())) + : criteriaStep.notIn(asCollection(paramMetadata.getValue())); + return criteria.ignoreCase(shouldIgnoreCase(part) && checkCanUpperCase(part, part.getProperty().getType())); + } + case STARTING_WITH: + case ENDING_WITH: + case CONTAINING: + case NOT_CONTAINING: + case LIKE: + case NOT_LIKE: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + Criteria criteria = part.getType() == Part.Type.NOT_LIKE || part.getType() == Part.Type.NOT_CONTAINING + ? criteriaStep.notLike(paramMetadata.getValue()) + : criteriaStep.like(paramMetadata.getValue()); + return criteria + .ignoreCase(shouldIgnoreCase(part) && checkCanUpperCase(part, propertyType, paramMetadata.getType())); + } + case SIMPLE_PROPERTY: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + if (paramMetadata.getValue() == null) { + return criteriaStep.isNull(); + } + return criteriaStep.is(paramMetadata.getValue()) + .ignoreCase(shouldIgnoreCase(part) && checkCanUpperCase(part, propertyType, paramMetadata.getType())); + } + case NEGATING_SIMPLE_PROPERTY: { + ParameterMetadata paramMetadata = parameterMetadataProvider.next(part); + return criteriaStep.not(paramMetadata.getValue()) + .ignoreCase(shouldIgnoreCase(part) && checkCanUpperCase(part, propertyType, paramMetadata.getType())); + } + default: + throw new IllegalArgumentException("Unsupported keyword " + type); + } + } + + /** + * Checks whether comparison should be done in case-insensitive way. + * + * @param part method name part (must not be {@literal null}) + * @return {@literal true} if comparison should be done in case-insensitive way + */ + private boolean shouldIgnoreCase(Part part) { + return part.shouldIgnoreCase() == Part.IgnoreCaseType.ALWAYS + || part.shouldIgnoreCase() == Part.IgnoreCaseType.WHEN_POSSIBLE; + } + + /** + * Checks whether "upper-case" conversion can be applied to the given {@link Expression}s in case the underlying + * {@link Part} requires ignoring case. + * + * @param part method name part (must not be {@literal null}) + * @param expressionTypes types of the given expressions (must not be {@literal null} or empty) + * @throws IllegalStateException if {@link Part} requires ignoring case but "upper-case" conversion cannot be applied + * to at least one of the given {@link Expression}s + */ + private boolean checkCanUpperCase(Part part, Class... expressionTypes) { + Assert.notEmpty(expressionTypes, "Expression types must not be null or empty"); + boolean strict = part.shouldIgnoreCase() == Part.IgnoreCaseType.ALWAYS; + for (Class expressionType : expressionTypes) { + if (!canUpperCase(expressionType)) { + if (strict) { + throw new IllegalStateException("Unable to ignore case of " + expressionType.getName() + + " type, the property '" + part.getProperty().getSegment() + "' must reference a string"); + } + return false; + } + } + return true; + } + + private boolean canUpperCase(Class expressionType) { + return expressionType == String.class; + } + + @SuppressWarnings("unchecked") + private static Collection asCollection(Object value) { + + if (value instanceof Collection) { + return (Collection) value; + } + + if (value.getClass().isArray()) { + return Arrays.asList(ObjectUtils.toObjectArray(value)); + } + + return Collections.singletonList(value); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/ParameterMetadata.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/ParameterMetadata.java new file mode 100644 index 0000000000..df55c34676 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/ParameterMetadata.java @@ -0,0 +1,52 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Helper class for holding information about query parameter. + * + * @since 2.0 + */ +class ParameterMetadata { + + private final String name; + private final @Nullable Object value; + private final Class type; + + public ParameterMetadata(String name, @Nullable Object value, Class type) { + + Assert.notNull(type, "Parameter type must not be null"); + this.name = name; + this.value = value; + this.type = type; + } + + public String getName() { + return name; + } + + @Nullable + public Object getValue() { + return value; + } + + public Class getType() { + return type; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/ParameterMetadataProvider.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/ParameterMetadataProvider.java new file mode 100644 index 0000000000..35a3ee92b3 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/ParameterMetadataProvider.java @@ -0,0 +1,148 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.data.relational.core.query.ValueFunction; +import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.parser.Part; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Helper class to allow easy creation of {@link ParameterMetadata}s. + * + * @author Roman Chigvintsev + * @author Mark Paluch + * @since 2.0 + */ +class ParameterMetadataProvider implements Iterable { + + private static final Object VALUE_PLACEHOLDER = new Object(); + + private final Iterator bindableParameterIterator; + private final Iterator bindableParameterValueIterator; + private final List parameterMetadata = new ArrayList<>(); + + /** + * Creates new instance of this class with the given {@link RelationalParameterAccessor} and {@link Escaper}. + * + * @param accessor relational parameter accessor (must not be {@literal null}). + */ + public ParameterMetadataProvider(RelationalParameterAccessor accessor) { + this(accessor.getBindableParameters(), accessor.iterator()); + } + + /** + * Creates new instance of this class with the given {@link Parameters}, {@link Iterator} over all bindable parameter + * values and {@link Escaper}. + * + * @param bindableParameterValueIterator iterator over bindable parameter values + * @param parameters method parameters (must not be {@literal null}) + */ + private ParameterMetadataProvider(Parameters parameters, + @Nullable Iterator bindableParameterValueIterator) { + + Assert.notNull(parameters, "Parameters must not be null"); + + this.bindableParameterIterator = parameters.getBindableParameters().iterator(); + this.bindableParameterValueIterator = bindableParameterValueIterator; + } + + @Override + public Iterator iterator() { + return parameterMetadata.iterator(); + } + + /** + * Creates new instance of {@link ParameterMetadata} for the given {@link Part} and next {@link Parameter}. + */ + public ParameterMetadata next(Part part) { + + Assert.isTrue(bindableParameterIterator.hasNext(), + () -> String.format("No parameter available for part %s.", part)); + + Parameter parameter = bindableParameterIterator.next(); + String parameterName = getParameterName(parameter, part.getProperty().getSegment()); + Object parameterValue = getParameterValue(); + Part.Type partType = part.getType(); + + checkNullIsAllowed(parameterName, parameterValue, partType); + Class parameterType = parameter.getType(); + Object preparedParameterValue = prepareParameterValue(parameterValue, parameterType, partType); + + ParameterMetadata metadata = new ParameterMetadata(parameterName, preparedParameterValue, parameterType); + parameterMetadata.add(metadata); + + return metadata; + } + + private String getParameterName(Parameter parameter, String defaultName) { + + if (parameter.isExplicitlyNamed()) { + return parameter.getName().orElseThrow(() -> new IllegalArgumentException("Parameter needs to be named")); + } + return defaultName; + } + + @Nullable + private Object getParameterValue() { + return bindableParameterValueIterator == null ? VALUE_PLACEHOLDER : bindableParameterValueIterator.next(); + } + + /** + * Checks whether {@literal null} is allowed as parameter value. + * + * @param parameterName parameter name + * @param parameterValue parameter value + * @param partType method name part type (must not be {@literal null}) + * @throws IllegalArgumentException if {@literal null} is not allowed as parameter value + */ + private void checkNullIsAllowed(String parameterName, @Nullable Object parameterValue, Part.Type partType) { + + if (parameterValue == null && !Part.Type.SIMPLE_PROPERTY.equals(partType)) { + throw new IllegalArgumentException( + String.format("Value of parameter with name %s must not be null", parameterName)); + } + } + + /** + * Prepares parameter value before it's actually bound to the query. + * + * @param value must not be {@literal null} + * @return prepared query parameter value + */ + @Nullable + protected Object prepareParameterValue(@Nullable Object value, Class valueType, Part.Type partType) { + + if (value == null || !CharSequence.class.isAssignableFrom(valueType)) { + return value; + } + + return switch (partType) { + case STARTING_WITH -> (ValueFunction) escaper -> escaper.escape(value.toString()) + "%"; + case ENDING_WITH -> (ValueFunction) escaper -> "%" + escaper.escape(value.toString()); + case CONTAINING, NOT_CONTAINING -> (ValueFunction) escaper -> "%" + escaper.escape(value.toString()) + + "%"; + default -> value; + }; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalEntityInformation.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalEntityInformation.java new file mode 100755 index 0000000000..c77077cc44 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalEntityInformation.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.repository.core.EntityInformation; + +/** + * Relational database-specific {@link EntityInformation}. + * + * @author Mark Paluch + */ +public interface RelationalEntityInformation extends EntityInformation { + + /** + * Returns the name of the table the entity shall be persisted to. + * + * @return + */ + SqlIdentifier getTableName(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalEntityMetadata.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalEntityMetadata.java new file mode 100755 index 0000000000..5f90657e7d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalEntityMetadata.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.repository.core.EntityMetadata; + +/** + * Extension of {@link EntityMetadata} to additionally expose the collection name an entity shall be persisted to. + * + * @author Mark Paluch + */ +public interface RelationalEntityMetadata extends EntityMetadata { + + /** + * Returns the name of the table the entity shall be persisted to. + * + * @return + */ + SqlIdentifier getTableName(); + + /** + * Returns the {@link RelationalPersistentEntity} that supposed to determine the table to be queried. + * + * @return + */ + RelationalPersistentEntity getTableEntity(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalExampleMapper.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalExampleMapper.java new file mode 100644 index 0000000000..5bfd13f583 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalExampleMapper.java @@ -0,0 +1,155 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.repository.query; + +import static org.springframework.data.domain.ExampleMatcher.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.springframework.data.domain.Example; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PropertyHandler; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.relational.core.query.Query; +import org.springframework.data.support.ExampleMatcherAccessor; +import org.springframework.util.Assert; + +/** + * Transform an {@link Example} into a {@link Query}. + * + * @since 2.2 + * @author Greg Turnquist + * @author Jens Schauder + */ +public class RelationalExampleMapper { + + private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; + + public RelationalExampleMapper( + MappingContext, ? extends RelationalPersistentProperty> mappingContext) { + this.mappingContext = mappingContext; + } + + /** + * Use the {@link Example} to extract a {@link Query}. + * + * @param example + * @return query + */ + public Query getMappedExample(Example example) { + return getMappedExample(example, mappingContext.getRequiredPersistentEntity(example.getProbeType())); + } + + /** + * Transform each property of the {@link Example}'s probe into a {@link Criteria} and assemble them into a + * {@link Query}. + * + * @param example + * @param entity + * @return query + */ + private Query getMappedExample(Example example, RelationalPersistentEntity entity) { + + Assert.notNull(example, "Example must not be null"); + Assert.notNull(entity, "RelationalPersistentEntity must not be null"); + + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(example.getProbe()); + ExampleMatcherAccessor matcherAccessor = new ExampleMatcherAccessor(example.getMatcher()); + + final List criteriaBasedOnProperties = new ArrayList<>(); + + entity.doWithProperties((PropertyHandler) property -> { + + if (property.isCollectionLike() || property.isMap()) { + return; + } + + if (matcherAccessor.isIgnoredPath(property.getName())) { + return; + } + + Optional optionalConvertedPropValue = matcherAccessor // + .getValueTransformerForPath(property.getName()) // + .apply(Optional.ofNullable(propertyAccessor.getProperty(property))); + + // If the value is empty, don't try to match against it + if (!optionalConvertedPropValue.isPresent()) { + return; + } + + Object convPropValue = optionalConvertedPropValue.get(); + boolean ignoreCase = matcherAccessor.isIgnoreCaseForPath(property.getName()); + + String column = property.getName(); + + switch (matcherAccessor.getStringMatcherForPath(property.getName())) { + case DEFAULT: + case EXACT: + criteriaBasedOnProperties.add(includeNulls(example) // + ? Criteria.where(column).isNull().or(column).is(convPropValue).ignoreCase(ignoreCase) + : Criteria.where(column).is(convPropValue).ignoreCase(ignoreCase)); + break; + case ENDING: + criteriaBasedOnProperties.add(includeNulls(example) // + ? Criteria.where(column).isNull().or(column).like("%" + convPropValue).ignoreCase(ignoreCase) + : Criteria.where(column).like("%" + convPropValue).ignoreCase(ignoreCase)); + break; + case STARTING: + criteriaBasedOnProperties.add(includeNulls(example) // + ? Criteria.where(column).isNull().or(column).like(convPropValue + "%").ignoreCase(ignoreCase) + : Criteria.where(column).like(convPropValue + "%").ignoreCase(ignoreCase)); + break; + case CONTAINING: + criteriaBasedOnProperties.add(includeNulls(example) // + ? Criteria.where(column).isNull().or(column).like("%" + convPropValue + "%").ignoreCase(ignoreCase) + : Criteria.where(column).like("%" + convPropValue + "%").ignoreCase(ignoreCase)); + break; + default: + throw new IllegalStateException(example.getMatcher().getDefaultStringMatcher() + " is not supported"); + } + }); + + // Criteria, assemble! + Criteria criteria = Criteria.empty(); + + for (Criteria propertyCriteria : criteriaBasedOnProperties) { + + if (example.getMatcher().isAllMatching()) { + criteria = criteria.and(propertyCriteria); + } else { + criteria = criteria.or(propertyCriteria); + } + } + + return Query.query(criteria); + } + + /** + * Does this {@link Example} need to include {@literal NULL} values in its {@link Criteria}? + * + * @param example + * @return whether or not to include nulls. + */ + private static boolean includeNulls(Example example) { + return example.getMatcher().getNullHandler() == NullHandler.INCLUDE; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParameterAccessor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParameterAccessor.java new file mode 100755 index 0000000000..006606ad47 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParameterAccessor.java @@ -0,0 +1,37 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.data.repository.query.Parameters; + +/** + * Relational-specific {@link ParameterAccessor}. + * + * @author Mark Paluch + */ +public interface RelationalParameterAccessor extends ParameterAccessor { + + /** + * Returns the raw parameter values of the underlying query method. + */ + Object[] getValues(); + + /** + * @return the bindable parameters. + */ + Parameters getBindableParameters(); +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParameters.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParameters.java new file mode 100755 index 0000000000..16a4588a11 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParameters.java @@ -0,0 +1,89 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import java.util.List; +import java.util.function.Function; + +import org.springframework.core.MethodParameter; +import org.springframework.core.ResolvableType; +import org.springframework.data.relational.repository.query.RelationalParameters.RelationalParameter; +import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.ParametersSource; +import org.springframework.data.util.TypeInformation; + +/** + * Custom extension of {@link Parameters}. + * + * @author Mark Paluch + */ +public class RelationalParameters extends Parameters { + + /** + * Creates a new {@link RelationalParameters} instance from the given {@link ParametersSource}. + * + * @param parametersSource must not be {@literal null}. + */ + public RelationalParameters(ParametersSource parametersSource) { + super(parametersSource, + methodParameter -> new RelationalParameter(methodParameter, parametersSource.getDomainTypeInformation())); + } + + protected RelationalParameters(ParametersSource parametersSource, + Function parameterFactory) { + super(parametersSource, parameterFactory); + } + + protected RelationalParameters(List parameters) { + super(parameters); + } + + @Override + protected RelationalParameters createFrom(List parameters) { + return new RelationalParameters(parameters); + } + + /** + * Custom {@link Parameter} implementation. + * + * @author Mark Paluch + * @author Chirag Tailor + */ + public static class RelationalParameter extends Parameter { + + private final TypeInformation typeInformation; + + /** + * Creates a new {@link RelationalParameter}. + * + * @param parameter must not be {@literal null}. + */ + protected RelationalParameter(MethodParameter parameter, TypeInformation domainType) { + super(parameter, domainType); + this.typeInformation = TypeInformation.fromMethodParameter(parameter); + + } + + public ResolvableType getResolvableType() { + return getTypeInformation().toTypeDescriptor().getResolvableType(); + } + + public TypeInformation getTypeInformation() { + return typeInformation; + } + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParametersParameterAccessor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParametersParameterAccessor.java new file mode 100755 index 0000000000..f9c6e87942 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalParametersParameterAccessor.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.ParametersParameterAccessor; +import org.springframework.data.repository.query.QueryMethod; + +/** + * Relational-specific {@link ParametersParameterAccessor}. + * + * @author Mark Paluch + */ +public class RelationalParametersParameterAccessor extends ParametersParameterAccessor + implements RelationalParameterAccessor { + + private final List values; + + /** + * Creates a new {@link RelationalParametersParameterAccessor}. + * + * @param method must not be {@literal null}. + * @param values must not be {@literal null}. + */ + public RelationalParametersParameterAccessor(QueryMethod method, Object[] values) { + + super(method.getParameters(), values); + this.values = Arrays.asList(values); + } + + @Override + public Object[] getValues() { + return values.toArray(); + } + + @Override + public Parameters getBindableParameters() { + return getParameters().getBindableParameters(); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalQueryCreator.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalQueryCreator.java new file mode 100644 index 0000000000..577138d24f --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/RelationalQueryCreator.java @@ -0,0 +1,158 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import java.util.Collection; +import java.util.Iterator; + +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.Parameters; +import org.springframework.data.repository.query.parser.AbstractQueryCreator; +import org.springframework.data.repository.query.parser.Part; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.data.util.Streamable; +import org.springframework.util.Assert; + +/** + * Implementation of {@link AbstractQueryCreator} that creates a query from a {@link PartTree}. + * + * @author Roman Chigvintsev + * @author Mark Paluch + * @since 2.0 + */ +public abstract class RelationalQueryCreator extends AbstractQueryCreator { + + private final CriteriaFactory criteriaFactory; + + /** + * Creates new instance of this class with the given {@link PartTree}, {@link RelationalEntityMetadata} and + * {@link ParameterMetadataProvider}. + * + * @param tree part tree, must not be {@literal null}. + * @param accessor parameter metadata provider, must not be {@literal null}. + */ + public RelationalQueryCreator(PartTree tree, RelationalParameterAccessor accessor) { + + super(tree); + + Assert.notNull(accessor, "RelationalParameterAccessor must not be null"); + this.criteriaFactory = new CriteriaFactory(new ParameterMetadataProvider(accessor)); + } + + /** + * Creates {@link Criteria} for the given method name part. + * + * @param part method name part, must not be {@literal null}. + * @param iterator iterator over query parameter values + * @return new instance of {@link Criteria} + */ + @Override + protected Criteria create(Part part, Iterator iterator) { + return criteriaFactory.createCriteria(part); + } + + /** + * Combines the given {@link Criteria} with the new one created for the given method name part using {@code AND}. + * + * @param part method name part, must not be {@literal null}. + * @param base {@link Criteria} to be combined, must not be {@literal null}. + * @param iterator iterator over query parameter values + * @return {@link Criteria} combination + */ + @Override + protected Criteria and(Part part, Criteria base, Iterator iterator) { + return base.and(criteriaFactory.createCriteria(part)); + } + + /** + * Combines two {@link Criteria}s using {@code OR}. + * + * @param base {@link Criteria} to be combined, must not be {@literal null}. + * @param criteria another {@link Criteria} to be combined, must not be {@literal null}. + * @return {@link Criteria} combination + */ + @Override + protected Criteria or(Criteria base, Criteria criteria) { + return base.or(criteria); + } + + /** + * Validate parameters for the derived query. Specifically checking that the query method defines scalar parameters + * and collection parameters where required and that invalid parameter declarations are rejected. + * + * @param tree + * @param parameters + */ + public static void validate(PartTree tree, Parameters parameters) { + + int argCount = 0; + + Iterable parts = () -> tree.stream().flatMap(Streamable::stream).iterator(); + for (Part part : parts) { + + int numberOfArguments = part.getNumberOfArguments(); + for (int i = 0; i < numberOfArguments; i++) { + + throwExceptionOnArgumentMismatch(part, parameters, argCount); + argCount++; + } + } + } + + private static void throwExceptionOnArgumentMismatch(Part part, Parameters parameters, int index) { + + Part.Type type = part.getType(); + String property = part.getProperty().toDotPath(); + + if (!parameters.getBindableParameters().hasParameterAt(index)) { + + String msgTemplate = "Query method expects at least %d arguments but only found %d. " + + "This leaves an operator of type %s for property %s unbound."; + String formattedMsg = String.format(msgTemplate, index + 1, index, type.name(), property); + throw new IllegalStateException(formattedMsg); + } + + Parameter parameter = parameters.getBindableParameter(index); + if (expectsCollection(type) && !parameterIsCollectionLike(parameter)) { + + String message = wrongParameterTypeMessage(property, type, "Collection", parameter); + throw new IllegalStateException(message); + } else if (!expectsCollection(type) && !parameterIsScalarLike(parameter)) { + + String message = wrongParameterTypeMessage(property, type, "scalar", parameter); + throw new IllegalStateException(message); + } + } + + private static boolean expectsCollection(Part.Type type) { + return type == Part.Type.IN || type == Part.Type.NOT_IN; + } + + private static boolean parameterIsCollectionLike(Parameter parameter) { + return parameter.getType().isArray() || Collection.class.isAssignableFrom(parameter.getType()); + } + + private static boolean parameterIsScalarLike(Parameter parameter) { + return !Collection.class.isAssignableFrom(parameter.getType()); + } + + private static String wrongParameterTypeMessage(String property, Part.Type operatorType, String expectedArgumentType, + Parameter parameter) { + return String.format("Operator %s on %s requires a %s argument, found %s", operatorType.name(), property, + expectedArgumentType, parameter.getType()); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/SimpleRelationalEntityMetadata.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/SimpleRelationalEntityMetadata.java new file mode 100755 index 0000000000..092dedac6a --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/SimpleRelationalEntityMetadata.java @@ -0,0 +1,59 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.util.Assert; + +/** + * Default implementation of {@link RelationalEntityMetadata}. + * + * @author Mark Paluch + */ +public class SimpleRelationalEntityMetadata implements RelationalEntityMetadata { + + private final Class type; + private final RelationalPersistentEntity tableEntity; + + /** + * Creates a new {@link SimpleRelationalEntityMetadata} using the given type and {@link RelationalPersistentEntity} to + * use for table lookups. + * + * @param type must not be {@literal null}. + * @param tableEntity must not be {@literal null}. + */ + public SimpleRelationalEntityMetadata(Class type, RelationalPersistentEntity tableEntity) { + + Assert.notNull(type, "Type must not be null"); + Assert.notNull(tableEntity, "Table entity must not be null"); + + this.type = type; + this.tableEntity = tableEntity; + } + + public Class getJavaType() { + return type; + } + + public SqlIdentifier getTableName() { + return tableEntity.getQualifiedTableName(); + } + + public RelationalPersistentEntity getTableEntity() { + return this.tableEntity; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/package-info.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/package-info.java new file mode 100755 index 0000000000..ccd616a69d --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/query/package-info.java @@ -0,0 +1,7 @@ +/** + * Query support for relational database repositories. + */ +@NonNullApi +package org.springframework.data.relational.repository.query; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/MappingRelationalEntityInformation.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/MappingRelationalEntityInformation.java new file mode 100755 index 0000000000..98f43519fc --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/MappingRelationalEntityInformation.java @@ -0,0 +1,106 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.support; + +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.relational.repository.query.RelationalEntityInformation; +import org.springframework.data.repository.core.support.PersistentEntityInformation; +import org.springframework.lang.Nullable; + +/** + * {@link RelationalEntityInformation} implementation using a {@link RelationalPersistentEntity} instance to lookup the + * necessary information. Can be configured with a custom table name. + *

+ * Entity types that do not declare an explicit Id type fall back to {@link Long} as Id type. + * + * @author Mark Paluch + */ +public class MappingRelationalEntityInformation extends PersistentEntityInformation + implements RelationalEntityInformation { + + private final RelationalPersistentEntity entityMetadata; + private final @Nullable SqlIdentifier customTableName; + private final Class fallbackIdType; + + /** + * Creates a new {@link MappingRelationalEntityInformation} for the given {@link RelationalPersistentEntity}. + * + * @param entity must not be {@literal null}. + */ + public MappingRelationalEntityInformation(RelationalPersistentEntity entity) { + this(entity, null, null); + } + + /** + * Creates a new {@link MappingRelationalEntityInformation} for the given {@link RelationalPersistentEntity} and + * fallback identifier type. + * + * @param entity must not be {@literal null}. + * @param fallbackIdType can be {@literal null}. + */ + public MappingRelationalEntityInformation(RelationalPersistentEntity entity, @Nullable Class fallbackIdType) { + this(entity, null, fallbackIdType); + } + + /** + * Creates a new {@link MappingRelationalEntityInformation} for the given {@link RelationalPersistentEntity} and + * custom table name. + * + * @param entity must not be {@literal null}. + * @param customTableName can be {@literal null}. + */ + public MappingRelationalEntityInformation(RelationalPersistentEntity entity, String customTableName) { + this(entity, customTableName, null); + } + + /** + * Creates a new {@link MappingRelationalEntityInformation} for the given {@link RelationalPersistentEntity}, + * collection name and identifier type. + * + * @param entity must not be {@literal null}. + * @param customTableName can be {@literal null}. + * @param idType can be {@literal null}. + */ + @SuppressWarnings("unchecked") + private MappingRelationalEntityInformation(RelationalPersistentEntity entity, @Nullable String customTableName, + @Nullable Class idType) { + + super(entity); + + this.entityMetadata = entity; + this.customTableName = customTableName == null ? null : SqlIdentifier.quoted(customTableName); + this.fallbackIdType = idType != null ? idType : (Class) Long.class; + } + + public SqlIdentifier getTableName() { + return customTableName == null ? entityMetadata.getQualifiedTableName() : customTableName; + } + + public String getIdAttribute() { + return entityMetadata.getRequiredIdProperty().getName(); + } + + @Override + public Class getIdType() { + + if (this.entityMetadata.hasIdProperty()) { + return super.getIdType(); + } + + return fallbackIdType; + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/RelationalQueryLookupStrategy.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/RelationalQueryLookupStrategy.java new file mode 100644 index 0000000000..ebcf765413 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/RelationalQueryLookupStrategy.java @@ -0,0 +1,65 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.repository.support; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.util.Assert; + +/** + * Base class for R2DBC and JDBC {@link QueryLookupStrategy} implementations. + * + * @author Jens Schauder + * @since 3.4 + */ +public abstract class RelationalQueryLookupStrategy implements QueryLookupStrategy { + + private final MappingContext, ? extends RelationalPersistentProperty> context; + private final Dialect dialect; + + protected RelationalQueryLookupStrategy( + MappingContext, ? extends RelationalPersistentProperty> context, + Dialect dialect) { + + Assert.notNull(context, "RelationalMappingContext must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + + this.context = context; + this.dialect = dialect; + } + + public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() { + return context; + } + + public Dialect getDialect() { + return dialect; + } + + protected String evaluateTableExpressions(RepositoryMetadata repositoryMetadata, String queryString) { + + TableNameQueryPreprocessor preprocessor = new TableNameQueryPreprocessor( + context.getRequiredPersistentEntity(repositoryMetadata.getDomainType()), dialect); + + return preprocessor.transform(queryString); + } + +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/TableNameQueryPreprocessor.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/TableNameQueryPreprocessor.java new file mode 100644 index 0000000000..6a2c17da85 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/TableNameQueryPreprocessor.java @@ -0,0 +1,83 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.repository.support; + +import java.util.regex.Pattern; + +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.expression.Expression; +import org.springframework.expression.ParserContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.util.Assert; + +/** + * Replaces SpEL expressions based on table names in query strings. + * + * @author Jens Schauder + */ +class TableNameQueryPreprocessor { + + private static final String EXPRESSION_PARAMETER = "$1#{"; + private static final String QUOTED_EXPRESSION_PARAMETER = "$1__HASH__{"; + + private static final Pattern EXPRESSION_PARAMETER_QUOTING = Pattern.compile("([:?])#\\{"); + private static final Pattern EXPRESSION_PARAMETER_UNQUOTING = Pattern.compile("([:?])__HASH__\\{"); + + private final SqlIdentifier tableName; + private final SqlIdentifier qualifiedTableName; + private final Dialect dialect; + + public TableNameQueryPreprocessor(RelationalPersistentEntity entity, Dialect dialect) { + this(entity.getTableName(), entity.getQualifiedTableName(), dialect); + } + + TableNameQueryPreprocessor(SqlIdentifier tableName, SqlIdentifier qualifiedTableName, Dialect dialect) { + + Assert.notNull(tableName, "TableName must not be null"); + Assert.notNull(qualifiedTableName, "QualifiedTableName must not be null"); + Assert.notNull(dialect, "Dialect must not be null"); + + this.tableName = tableName; + this.qualifiedTableName = qualifiedTableName; + this.dialect = dialect; + } + + public String transform(String query) { + + StandardEvaluationContext evaluationContext = new StandardEvaluationContext(); + evaluationContext.setVariable("tableName", tableName.toSql(dialect.getIdentifierProcessing())); + evaluationContext.setVariable("qualifiedTableName", qualifiedTableName.toSql(dialect.getIdentifierProcessing())); + + SpelExpressionParser parser = new SpelExpressionParser(); + + query = quoteExpressionsParameter(query); + Expression expression = parser.parseExpression(query, ParserContext.TEMPLATE_EXPRESSION); + + return unquoteParameterExpressions(expression.getValue(evaluationContext, String.class)); + } + + private static String unquoteParameterExpressions(String result) { + return EXPRESSION_PARAMETER_UNQUOTING.matcher(result).replaceAll(EXPRESSION_PARAMETER); + } + + private static String quoteExpressionsParameter(String query) { + return EXPRESSION_PARAMETER_QUOTING.matcher(query).replaceAll(QUOTED_EXPRESSION_PARAMETER); + } +} diff --git a/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/package-info.java b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/package-info.java new file mode 100755 index 0000000000..28aeb25142 --- /dev/null +++ b/spring-data-relational/src/main/java/org/springframework/data/relational/repository/support/package-info.java @@ -0,0 +1,7 @@ +/** + * Support infrastructure for query derivation of relational database repositories. + */ +@NonNullApi +package org.springframework.data.relational.repository.support; + +import org.springframework.lang.NonNullApi; diff --git a/spring-data-relational/src/main/kotlin/org/springframework/data/relational/core/query/CriteriaStepExtensions.kt b/spring-data-relational/src/main/kotlin/org/springframework/data/relational/core/query/CriteriaStepExtensions.kt new file mode 100644 index 0000000000..90d5ee18ff --- /dev/null +++ b/spring-data-relational/src/main/kotlin/org/springframework/data/relational/core/query/CriteriaStepExtensions.kt @@ -0,0 +1,46 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.query + +/** + * Extension for [Criteria.CriteriaStep.is] providing a + * `isEquals(value)` variant. + * + * @author Juan Medina + * @since 2.1 + */ +infix fun Criteria.CriteriaStep.isEqual(value: Any): Criteria = + `is`(value) + +/** + * Extension for [Criteria.CriteriaStep.in] providing a + * `isIn(value)` variant. + * + * @author Juan Medina + * @since 2.1 + */ +fun Criteria.CriteriaStep.isIn(vararg value: Any): Criteria = + `in`(value) + +/** + * Extension for [Criteria.CriteriaStep.in] providing a + * `isIn(value)` variant. + * + * @author Juan Medina + * @since 2.1 + */ +fun Criteria.CriteriaStep.isIn(values: Collection): Criteria = + `in`(values) diff --git a/spring-data-relational/src/main/resources/META-INF/spring/aot.factories b/spring-data-relational/src/main/resources/META-INF/spring/aot.factories new file mode 100644 index 0000000000..7f22e8671c --- /dev/null +++ b/spring-data-relational/src/main/resources/META-INF/spring/aot.factories @@ -0,0 +1,2 @@ +org.springframework.beans.factory.aot.BeanRegistrationAotProcessor=\ + org.springframework.data.relational.aot.RelationalManagedTypesBeanRegistrationAotProcessor diff --git a/spring-data-relational/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java b/spring-data-relational/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java new file mode 100644 index 0000000000..463c40ba17 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/ProxyImageNameSubstitutor.java @@ -0,0 +1,84 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data; + +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.utility.DockerImageName; +import org.testcontainers.utility.ImageNameSubstitutor; + +/** + * An {@link ImageNameSubstitutor} only used on CI servers to leverage internal proxy solution, that needs to vary the + * prefix based on which container image is needed. + * + * @author Greg Turnquist + */ +public class ProxyImageNameSubstitutor extends ImageNameSubstitutor { + + private static final Logger LOG = LoggerFactory.getLogger(ProxyImageNameSubstitutor.class); + + private static final List NAMES_TO_PROXY_PREFIX = List.of("ryuk", "arm64v8/mariadb", "ibmcom/db2", + "gvenzl/oracle-free", "gvenzl/oracle-xe"); + + private static final List NAMES_TO_LIBRARY_PROXY_PREFIX = List.of("mariadb", "mysql", "postgres"); + + private static final String PROXY_PREFIX = "docker-hub.usw1.packages.broadcom.com/"; + + private static final String LIBRARY_PROXY_PREFIX = PROXY_PREFIX + "library/"; + + @Override + public DockerImageName apply(DockerImageName dockerImageName) { + + if (NAMES_TO_PROXY_PREFIX.stream().anyMatch(s -> dockerImageName.asCanonicalNameString().contains(s))) { + + String transformedName = applyProxyPrefix(dockerImageName.asCanonicalNameString()); + LOG.info("Converting " + dockerImageName.asCanonicalNameString() + " to " + transformedName); + return DockerImageName.parse(transformedName); + } + + if (NAMES_TO_LIBRARY_PROXY_PREFIX.stream().anyMatch(s -> dockerImageName.asCanonicalNameString().contains(s))) { + + String transformedName = applyProxyAndLibraryPrefix(dockerImageName.asCanonicalNameString()); + LOG.info("Converting " + dockerImageName.asCanonicalNameString() + " to " + transformedName); + return DockerImageName.parse(transformedName); + } + + LOG.info("Not changing " + dockerImageName.asCanonicalNameString() + "..."); + return dockerImageName; + } + + @Override + protected String getDescription() { + return "Spring Data Proxy Image Name Substitutor"; + } + + /** + * Apply a non-library-based prefix. + */ + private static String applyProxyPrefix(String imageName) { + return PROXY_PREFIX + imageName; + } + + /** + * Apply a library based prefix. + */ + private static String applyProxyAndLibraryPrefix(String imageName) { + return LIBRARY_PROXY_PREFIX + imageName; + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/DependencyTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/DependencyTests.java new file mode 100644 index 0000000000..7728a1e975 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/DependencyTests.java @@ -0,0 +1,172 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.sql.render.SelectRenderContext; + +import com.tngtech.archunit.base.DescribedPredicate; +import com.tngtech.archunit.core.domain.JavaClass; +import com.tngtech.archunit.core.domain.JavaClasses; +import com.tngtech.archunit.core.importer.ClassFileImporter; +import com.tngtech.archunit.core.importer.ImportOption; +import com.tngtech.archunit.lang.ArchRule; +import com.tngtech.archunit.library.dependencies.SliceAssignment; +import com.tngtech.archunit.library.dependencies.SliceIdentifier; +import com.tngtech.archunit.library.dependencies.SlicesRuleDefinition; + +/** + * Test package dependencies for violations. + * + * @author Jens Schauder + * @author Mark Paluch + */ +public class DependencyTests { + + @Test + void cycleFree() { + + JavaClasses importedClasses = new ClassFileImporter() // + .withImportOption(ImportOption.Predefined.DO_NOT_INCLUDE_TESTS) // + .withImportOption(ImportOption.Predefined.DO_NOT_INCLUDE_JARS) // we just analyze the code of this module. + .importPackages("org.springframework.data.relational") // + .that(onlySpringData()) // + .that(ignore(SelectRenderContext.class)) // + .that(ignore(RenderContextFactory.class)); + + ArchRule rule = SlicesRuleDefinition.slices() // + .matching("org.springframework.data.relational.(**)") // + .should() // + .beFreeOfCycles(); + + rule.check(importedClasses); + } + + @Test + void acrossModules() { + + JavaClasses importedClasses = new ClassFileImporter() // + .withImportOption(ImportOption.Predefined.DO_NOT_INCLUDE_TESTS) // + .importPackages( // + "org.springframework.data.relational", // Spring Data Relational + "org.springframework.data" // Spring Data Commons + ).that(onlySpringData()) // + .that(ignorePackage("org.springframework.data.aot.hint")) // ignoring aot, since it causes cycles in commons + .that(ignorePackage("org.springframework.data.aot")); // ignoring aot, since it causes cycles in commons; + + ArchRule rule = SlicesRuleDefinition.slices() // + .assignedFrom(subModuleSlicing()) // + .should().beFreeOfCycles(); + + rule.check(importedClasses); + } + + @Test // GH-1058 + void testGetFirstPackagePart() { + + SoftAssertions.assertSoftly(softly -> { + softly.assertThat(getFirstPackagePart("a.b.c")).isEqualTo("a"); + softly.assertThat(getFirstPackagePart("a")).isEqualTo("a"); + }); + } + + @Test // GH-1058 + void testSubModule() { + + SoftAssertions.assertSoftly(softly -> { + softly.assertThat(subModule("a.b", "a.b.c.d")).isEqualTo("c"); + softly.assertThat(subModule("a.b", "a.b.c")).isEqualTo("c"); + softly.assertThat(subModule("a.b", "a.b")).isEqualTo(""); + }); + } + + private DescribedPredicate onlySpringData() { + + return new DescribedPredicate<>("Spring Data Classes") { + @Override + public boolean test(JavaClass input) { + return input.getPackageName().startsWith("org.springframework.data"); + } + }; + } + + private DescribedPredicate ignore(Class type) { + + return new DescribedPredicate<>("ignored class " + type.getName()) { + @Override + public boolean test(JavaClass input) { + return !input.getFullName().startsWith(type.getName()); + } + }; + } + + private DescribedPredicate ignorePackage(String type) { + + return new DescribedPredicate<>("ignored class " + type) { + @Override + public boolean test(JavaClass input) { + return !input.getPackageName().equals(type); + } + }; + } + + private String getFirstPackagePart(String subpackage) { + + int index = subpackage.indexOf("."); + if (index < 0) { + return subpackage; + } + return subpackage.substring(0, index); + } + + private String subModule(String basePackage, String packageName) { + + if (packageName.startsWith(basePackage) && packageName.length() > basePackage.length()) { + + final int index = basePackage.length() + 1; + String subpackage = packageName.substring(index); + return getFirstPackagePart(subpackage); + } + return ""; + } + + private SliceAssignment subModuleSlicing() { + return new SliceAssignment() { + + @Override + public SliceIdentifier getIdentifierOf(JavaClass javaClass) { + + String packageName = javaClass.getPackageName(); + String subModule = subModule("org.springframework.data.relational", packageName); + if (!subModule.isEmpty()) { + return SliceIdentifier.of(subModule); + } + subModule = subModule("org.springframework.data", packageName); + if (!subModule.isEmpty()) { + return SliceIdentifier.of(subModule); + } + return SliceIdentifier.ignore(); + } + + @Override + public String getDescription() { + return "Submodule"; + } + }; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/BatchedActionsUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/BatchedActionsUnitTests.java new file mode 100644 index 0000000000..f3ba099344 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/BatchedActionsUnitTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import static org.assertj.core.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.conversion.DbAction.BatchDelete; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +public class BatchedActionsUnitTests { + + BatchedActions deletes = BatchedActions.batchedDeletes(); + + LoggingConsumer consumer = new LoggingConsumer(); + RelationalMappingContext context = new RelationalMappingContext(); + + DbAction.Delete firstOneDelete = new DbAction.Delete(23L, path("one")); + DbAction.Delete secondOneDelete = new DbAction.Delete(24L, path("one")); + DbAction.Delete firstTwoDelete = new DbAction.Delete(25L, path("two")); + DbAction.Delete secondTwoDelete = new DbAction.Delete(26L, path("two")); + + @Test // GH-537 + void emptyBatchedDeletesDoesNotInvokeConsumer() { + + deletes.forEach(consumer); + + assertThat(consumer.log).isEmpty(); + } + + @Test // GH-537 + void singleActionGetsPassedToConsumer() { + + deletes.add(firstOneDelete); + + deletes.forEach(consumer); + + assertThat(consumer.log).containsExactly(firstOneDelete); + } + + @Test // GH-537 + void multipleUnbatchableActionsGetsPassedToConsumerIndividually() { + + deletes.add(firstOneDelete); + deletes.add(firstTwoDelete); + + deletes.forEach(consumer); + + assertThat(consumer.log).containsExactlyInAnyOrder(firstOneDelete, firstTwoDelete); + } + + @Test // GH-537 + void batchableActionsGetPassedToConsumerAsOne() { + + deletes.add(firstOneDelete); + deletes.add(secondOneDelete); + + deletes.forEach(consumer); + + assertThat(consumer.log).extracting(a -> ((Class)a.getClass())).containsExactly(BatchDelete.class); + } + + private PersistentPropertyPath path(String path) { + return context.getPersistentPropertyPath(path, DummyEntity.class); + } + + private static class LoggingConsumer implements Consumer> { + List> log = new ArrayList<>(); + + @Override + public void accept(DbAction dbAction) { + log.add(dbAction); + } + } + + private static class DummyEntity { + OtherEntity one; + OtherEntity two; + } + + private static class OtherEntity { + String one; + String two; + } + +} diff --git a/src/test/java/org/springframework/data/jdbc/core/conversion/DbActionExecutionExceptionTest.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DbActionExecutionExceptionUnitTests.java similarity index 71% rename from src/test/java/org/springframework/data/jdbc/core/conversion/DbActionExecutionExceptionTest.java rename to spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DbActionExecutionExceptionUnitTests.java index 6e114ab93f..c81c7dd20a 100644 --- a/src/test/java/org/springframework/data/jdbc/core/conversion/DbActionExecutionExceptionTest.java +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DbActionExecutionExceptionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018-2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,16 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.core.conversion; +package org.springframework.data.relational.core.conversion; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.Test; -import org.junit.Test; +import static org.mockito.Mockito.*; /** + * Unit test for {@link DbActionExecutionException}. + * * @author Jens Schauder */ -public class DbActionExecutionExceptionTest { +public class DbActionExecutionExceptionUnitTests { @Test // DATAJDBC-162 public void constructorWorksWithNullPropertyPath() { diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DbActionTestSupport.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DbActionTestSupport.java new file mode 100644 index 0000000000..51e398aa36 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DbActionTestSupport.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import org.springframework.lang.Nullable; + +/** + * Utility class for analyzing DbActions in tests. + * + * @author Jens Schauder + * @author Chirag Tailor + */ +final class DbActionTestSupport { + + private DbActionTestSupport() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + static String extractPath(DbAction action) { + + if (action instanceof DbAction.WithPropertyPath) { + return ((DbAction.WithPropertyPath) action).getPropertyPath().toDotPath(); + } + + return ""; + } + + static boolean isWithDependsOn(DbAction dbAction) { + return dbAction instanceof DbAction.WithDependingOn; + } + + @Nullable + static Class actualEntityType(DbAction a) { + + if (a instanceof DbAction.WithEntity) { + return ((DbAction.WithEntity) a).getEntity().getClass(); + } + return null; + } + + @Nullable + static IdValueSource insertIdValueSource(DbAction action) { + + if (action instanceof DbAction.WithEntity) { + return ((DbAction.WithEntity) action).getIdValueSource(); + } else if (action instanceof DbAction.BatchInsert) { + return ((DbAction.BatchInsert) action).getBatchValue(); + } else if (action instanceof DbAction.BatchInsertRoot) { + return ((DbAction.BatchInsertRoot) action).getBatchValue(); + } else { + return null; + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DeleteBatchingAggregateChangeTest.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DeleteBatchingAggregateChangeTest.java new file mode 100644 index 0000000000..e5e1f73ab1 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/DeleteBatchingAggregateChangeTest.java @@ -0,0 +1,213 @@ +package org.springframework.data.relational.core.conversion; + +import static org.assertj.core.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.assertj.core.groups.Tuple; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Unit tests for {@link DeleteBatchingAggregateChange}. + * + * @author Chirag Tailor + */ +class DeleteBatchingAggregateChangeTest { + + RelationalMappingContext context = new RelationalMappingContext(); + + @Test // GH-537 + void yieldsDeleteActions() { + + Root root = new Root(1L, null); + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(root); + DbAction.Delete intermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(intermediateDelete); + } + + @Test // GH-537 + void yieldsNestedDeleteActionsInTreeOrderFromLeavesToRoot() { + + Root root = new Root(2L, null); + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(root); + DbAction.Delete intermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete); + + DbAction.Delete leafDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate.leaf", Root.class)); + aggregateChange.addAction(leafDelete); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + List> actions = extractActions(change); + assertThat(actions).containsExactly(leafDelete, intermediateDelete); + } + + @Test // GH-537 + void yieldsDeleteActionsAsBatchDeletes_groupedByPath_whenGroupContainsMultipleDeletes() { + + Root root = new Root(1L, null); + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(root); + + DbAction.Delete intermediateDelete1 = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete1); + + DbAction.Delete intermediateDelete2 = new DbAction.Delete<>(2L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete2); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + List> actions = extractActions(change); + assertThat(actions).extracting(DbAction::getClass, DbAction::getEntityType) // + .containsExactly(Tuple.tuple(DbAction.BatchDelete.class, Intermediate.class)); + assertThat(getBatchWithValueAction(actions, Intermediate.class, DbAction.BatchDelete.class).getActions()) + .containsExactly(intermediateDelete1, intermediateDelete2); + } + + @Test // GH-537 + void yieldsDeleteRootActions() { + + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(new Root(null, null)); + DbAction.DeleteRoot deleteRoot = new DbAction.DeleteRoot<>(1L, Root.class, null); + aggregateChange.addAction(deleteRoot); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(deleteRoot); + } + + @Test // GH-537 + void yieldsDeleteRootActionsAfterDeleteActions() { + + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(new Root(null, null)); + + DbAction.DeleteRoot deleteRoot = new DbAction.DeleteRoot<>(1L, Root.class, null); + aggregateChange.addAction(deleteRoot); + + DbAction.Delete intermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(intermediateDelete, deleteRoot); + } + + @Test // GH-537 + void yieldsLockRootActions() { + + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(new Root(null, null)); + + DbAction.AcquireLockRoot lockRootAction = new DbAction.AcquireLockRoot<>(1L, Root.class); + aggregateChange.addAction(lockRootAction); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(lockRootAction); + } + + @Test // GH-537 + void yieldsLockRootActionsBeforeDeleteActions() { + + DeleteAggregateChange aggregateChange = MutableAggregateChange.forDelete(new Root(null, null)); + + DbAction.Delete intermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete); + + DbAction.AcquireLockRoot lockRootAction = new DbAction.AcquireLockRoot<>(1L, Root.class); + aggregateChange.addAction(lockRootAction); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(lockRootAction, intermediateDelete); + } + + @Test // GH-537 + void yieldsDeleteRootActionsWithoutVersionAsBatchDeleteRoots_whenGroupContainsMultipleDeleteRoots() { + + DeleteAggregateChange aggregateChange1 = MutableAggregateChange.forDelete(new Root(null, null)); + DbAction.DeleteRoot deleteRoot1 = new DbAction.DeleteRoot<>(1L, Root.class, null); + aggregateChange1.addAction(deleteRoot1); + DeleteAggregateChange aggregateChange2 = MutableAggregateChange.forDelete(Root.class); + DbAction.DeleteRoot deleteRoot2 = new DbAction.DeleteRoot<>(2L, Root.class, 10); + aggregateChange2.addAction(deleteRoot2); + DeleteAggregateChange aggregateChange3 = MutableAggregateChange.forDelete(Root.class); + DbAction.DeleteRoot deleteRoot3 = new DbAction.DeleteRoot<>(3L, Root.class, null); + aggregateChange3.addAction(deleteRoot3); + DeleteAggregateChange aggregateChange4 = MutableAggregateChange.forDelete(Root.class); + DbAction.DeleteRoot deleteRoot4 = new DbAction.DeleteRoot<>(4L, Root.class, 10); + aggregateChange4.addAction(deleteRoot4); + + BatchingAggregateChange> change = BatchingAggregateChange.forDelete(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + change.add(aggregateChange3); + change.add(aggregateChange4); + + List> actions = extractActions(change); + assertThat(actions).extracting(DbAction::getClass, DbAction::getEntityType).containsExactly( // + Tuple.tuple(DbAction.BatchDeleteRoot.class, Root.class), // + Tuple.tuple(DbAction.DeleteRoot.class, Root.class), // + Tuple.tuple(DbAction.DeleteRoot.class, Root.class)); + assertThat(getBatchWithValueAction(actions, Root.class, DbAction.BatchDeleteRoot.class).getActions()) + .containsExactly(deleteRoot1, deleteRoot3); + assertThat(actions).containsSubsequence(deleteRoot2, deleteRoot4); + } + + private List> extractActions(BatchingAggregateChange> change) { + + List> actions = new ArrayList<>(); + change.forEachAction(actions::add); + return actions; + } + + private DbAction.BatchWithValue, Object> getBatchWithValueAction(List> actions, + Class entityType, Class batchActionType) { + + return getBatchWithValueActions(actions, entityType, batchActionType).stream().findFirst() + .orElseThrow(() -> new RuntimeException("No BatchWithValue action found")); + } + + @SuppressWarnings("unchecked") + private List, Object>> getBatchWithValueActions( + List> actions, Class entityType, Class batchActionType) { + + return actions.stream() // + .filter(dbAction -> dbAction.getClass().equals(batchActionType)) // + .filter(dbAction -> dbAction.getEntityType().equals(entityType)) // + .map(dbAction -> (DbAction.BatchWithValue, Object>) dbAction).collect(Collectors.toList()); + } + + record Root( + + @Id Long id, Intermediate intermediate) { + } + + record Intermediate( + + @Id Long id, String name, Leaf leaf) { + } + + record Leaf(@Id Long id, String name) { + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/MappingRelationalConverterUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/MappingRelationalConverterUnitTests.java new file mode 100644 index 0000000000..35ef9dfe3a --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/MappingRelationalConverterUnitTests.java @@ -0,0 +1,419 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Collections; +import java.util.Date; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.UUID; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.convert.TypeDescriptor; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.convert.converter.GenericConverter; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.ConverterBuilder.ConverterAware; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.CustomConversions.StoreConversions; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.domain.RowDocument; +import org.springframework.data.util.TypeInformation; + +/** + * Unit tests for {@link MappingRelationalConverter}. + * + * @author Mark Paluch + * @author Lukáš Křečan + * @author Jens Schauder + */ +class MappingRelationalConverterUnitTests { + + MappingRelationalConverter converter = new MappingRelationalConverter(new RelationalMappingContext()); + + @Test // GH-1586 + void shouldReadSimpleType() { + + RowDocument document = new RowDocument().append("id", "1").append("name", "John").append("age", 30); + + SimpleType result = converter.read(SimpleType.class, document); + + assertThat(result.id).isEqualTo("1"); + assertThat(result.name).isEqualTo("John"); + assertThat(result.age).isEqualTo(30); + } + + @Test // GH-1586 + void shouldReadSimpleRecord() { + + RowDocument document = new RowDocument().append("id", "1").append("name", "John").append("age", 30); + + SimpleRecord result = converter.read(SimpleRecord.class, document); + + assertThat(result.id).isEqualTo("1"); + assertThat(result.name).isEqualTo("John"); + assertThat(result.age).isEqualTo(30); + } + + @Test // GH-1586 + void shouldEvaluateExpression() { + + RowDocument document = new RowDocument().append("myprop", "bar"); + + WithExpression result = converter.read(WithExpression.class, document); + + assertThat(result.name).isEqualTo("bar"); + } + + @Test + // GH-1689 + void shouldApplySimpleTypeConverterSimpleType() { + + converter = new MappingRelationalConverter(converter.getMappingContext(), + new CustomConversions(StoreConversions.NONE, List.of(MyEnumConverter.INSTANCE))); + + RowDocument document = new RowDocument().append("my_enum", "one"); + + WithMyEnum result = converter.read(WithMyEnum.class, document); + + assertThat(result.myEnum).isEqualTo(MyEnum.ONE); + } + + @Test // GH-1586 + void shouldReadNonstaticInner() { + + RowDocument document = new RowDocument().append("name", "John").append("child", + new RowDocument().append("name", "Johnny")); + + Parent result = converter.read(Parent.class, document); + + assertThat(result.name).isEqualTo("John"); + assertThat(result.child.name).isEqualTo("Johnny"); + } + + @Test // GH-1586 + void shouldReadEmbedded() { + + RowDocument document = new RowDocument().append("simple_id", "1").append("simple_name", "John").append("simple_age", + 30); + + WithEmbedded result = converter.read(WithEmbedded.class, document); + + assertThat(result.simple).isNotNull(); + assertThat(result.simple.id).isEqualTo("1"); + assertThat(result.simple.name).isEqualTo("John"); + assertThat(result.simple.age).isEqualTo(30); + } + + @Test // GH-1586 + void shouldReadWithLists() { + + RowDocument nested = new RowDocument().append("id", "1").append("name", "John").append("age", 30); + + RowDocument document = new RowDocument().append("strings", List.of("one", "two")) + .append("states", List.of("NEW", "USED")).append("entities", List.of(nested)); + + WithCollection result = converter.read(WithCollection.class, document); + + assertThat(result.strings).containsExactly("one", "two"); + assertThat(result.states).containsExactly(State.NEW, State.USED); + assertThat(result.entities).hasSize(1); + + assertThat(result.entities.get(0).id).isEqualTo("1"); + assertThat(result.entities.get(0).name).isEqualTo("John"); + } + + @Test // GH-1586 + void shouldReadWithMaps() { + + RowDocument nested = new RowDocument().append("id", "1").append("name", "John").append("age", 30); + + RowDocument document = new RowDocument().append("strings", Map.of(1, "one", 2, "two")).append("entities", + Map.of(1, nested)); + + WithMap result = converter.read(WithMap.class, document); + + assertThat(result.strings).hasSize(2).containsEntry(1, "one").containsEntry(2, "two"); + + assertThat(result.entities).hasSize(1); + assertThat(result.entities.get(1).id).isEqualTo("1"); + assertThat(result.entities.get(1).name).isEqualTo("John"); + } + + @Test // GH-1586 + void shouldApplyConverters() { + + ConverterAware converterAware = ConverterBuilder.reading(String.class, Money.class, s -> { + String[] s1 = s.split(" "); + return new Money(Integer.parseInt(s1[0]), s1[1]); + }).andWriting(money -> money.amount + " " + money.currency); + + CustomConversions conversions = new CustomConversions(StoreConversions.of(SimpleTypeHolder.DEFAULT), + List.of(converterAware)); + RelationalMappingContext mappingContext = new RelationalMappingContext(); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + MappingRelationalConverter converter = new MappingRelationalConverter(mappingContext, conversions); + + RowDocument document = new RowDocument().append("money", "1 USD"); + + WithMoney result = converter.read(WithMoney.class, document); + + assertThat(result.money.amount).isEqualTo(1); + assertThat(result.money.currency).isEqualTo("USD"); + } + + @Test // GH-1554 + void projectShouldReadNestedProjection() { + + RowDocument source = RowDocument.of("addresses", Collections.singletonList(RowDocument.of("s", "hwy"))); + + EntityProjection projection = converter + .introspectProjection(WithNestedProjection.class, Person.class); + WithNestedProjection person = converter.project(projection, source); + + assertThat(person.getAddresses()).extracting(AddressProjection::getStreet).hasSize(1).containsOnly("hwy"); + } + + @Test // GH-1554 + void projectShouldReadProjectionWithNestedEntity() { + + RowDocument source = RowDocument.of("addresses", Collections.singletonList(RowDocument.of("s", "hwy"))); + + EntityProjection projection = converter + .introspectProjection(ProjectionWithNestedEntity.class, Person.class); + ProjectionWithNestedEntity person = converter.project(projection, source); + + assertThat(person.getAddresses()).extracting(Address::getStreet).hasSize(1).containsOnly("hwy"); + } + + @Test // GH-1842 + void shouldApplyGenericTypeConverter() { + + converter = new MappingRelationalConverter(converter.getMappingContext(), + new CustomConversions(StoreConversions.NONE, List.of(GenericTypeConverter.INSTANCE))); + + UUID uuid = UUID.randomUUID(); + GenericClass wrappedUuid = new GenericClass<>(uuid); + GenericClass wrappedString = new GenericClass<>("test"); + + SoftAssertions.assertSoftly(softly -> { + + softly.assertThat(converter.writeValue(uuid, TypeInformation.of(GenericClass.class))).isEqualTo(wrappedUuid); + softly.assertThat(converter.writeValue(wrappedUuid, TypeInformation.of(UUID.class))).isEqualTo(uuid); + + softly.assertThat(converter.writeValue("test", TypeInformation.of(GenericClass.class))).isEqualTo(wrappedString); + softly.assertThat(converter.writeValue(wrappedString, TypeInformation.of(String.class))).isEqualTo("test"); + }); + } + + static class SimpleType { + + @Id String id; + String name; + int age; + + } + + record SimpleRecord(@Id String id, String name, int age) { + } + + static class Parent { + String name; + + Child child; + + class Child { + + String name; + } + } + + static class WithCollection { + + List strings; + EnumSet states; + List entities; + } + + static class WithMap { + + Map strings; + Map entities; + } + + enum State { + NEW, USED, UNKNOWN + } + + static class WithMoney { + + Money money; + + } + + static class Money { + final int amount; + final String currency; + + public Money(int amount, String currency) { + this.amount = amount; + this.currency = currency; + } + } + + static class WithExpression { + + private final String name; + + public WithExpression(@Value("#root.myprop") String foo) { + this.name = foo; + } + } + + static class WithEmbedded { + + @Embedded.Nullable(prefix = "simple_") SimpleType simple; + } + + static class Person { + + @Id String id; + + Date birthDate; + + @Column("foo") String firstname; + String lastname; + + Set
addresses; + + Person() { + + } + + @PersistenceCreator + public Person(Set
addresses) { + this.addresses = addresses; + } + } + + static class Address { + + @Column("s") String street; + String city; + + public String getStreet() { + return street; + } + + public String getCity() { + return city; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(street, address.street) && Objects.equals(city, address.city); + } + + @Override + public int hashCode() { + return Objects.hash(street, city); + } + } + + interface WithNestedProjection { + + Set getAddresses(); + } + + interface ProjectionWithNestedEntity { + + Set
getAddresses(); + } + + interface AddressProjection { + + String getStreet(); + } + + record WithMyEnum(MyEnum myEnum) { + } + + enum MyEnum { + ONE, TWO, + } + + @ReadingConverter + enum MyEnumConverter implements Converter { + + INSTANCE; + + @Override + public MyEnum convert(String source) { + return MyEnum.valueOf(source.toUpperCase()); + } + + } + + @WritingConverter + enum GenericTypeConverter implements GenericConverter { + + INSTANCE; + + @Override + public Set getConvertibleTypes() { + return Set.of(new ConvertiblePair(String.class, GenericClass.class), + new ConvertiblePair(UUID.class, GenericClass.class), new ConvertiblePair(GenericClass.class, String.class), + new ConvertiblePair(GenericClass.class, UUID.class)); + } + + @Override + public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { + if (targetType.getType() == GenericClass.class) + return new GenericClass<>(source); + + return ((GenericClass) source).value(); + } + + } + + public record GenericClass(T value) { + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityDeleteWriterUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityDeleteWriterUnitTests.java new file mode 100644 index 0000000000..11e0238b95 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityDeleteWriterUnitTests.java @@ -0,0 +1,202 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import org.assertj.core.groups.Tuple; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.relational.core.conversion.DbAction.AcquireLockAllRoot; +import org.springframework.data.relational.core.conversion.DbAction.AcquireLockRoot; +import org.springframework.data.relational.core.conversion.DbAction.Delete; +import org.springframework.data.relational.core.conversion.DbAction.DeleteAll; +import org.springframework.data.relational.core.conversion.DbAction.DeleteAllRoot; +import org.springframework.data.relational.core.conversion.DbAction.DeleteRoot; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +import java.util.ArrayList; +import java.util.List; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for the {@link org.springframework.data.relational.core.conversion.RelationalEntityDeleteWriter} + * + * @author Jens Schauder + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +@ExtendWith(MockitoExtension.class) +public class RelationalEntityDeleteWriterUnitTests { + + RelationalEntityDeleteWriter converter = new RelationalEntityDeleteWriter(new RelationalMappingContext()); + + @Test // DATAJDBC-112 + public void deleteDeletesTheEntityAndReferencedEntities() { + + SomeEntity entity = new SomeEntity(23L); + + MutableAggregateChange aggregateChange = MutableAggregateChange.forDelete(SomeEntity.class); + + converter.write(entity.id, aggregateChange); + + assertThat(extractActions(aggregateChange)) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath) // + .containsExactly( // + Tuple.tuple(AcquireLockRoot.class, SomeEntity.class, ""), // + Tuple.tuple(Delete.class, YetAnother.class, "other.yetAnother"), // + Tuple.tuple(Delete.class, OtherEntity.class, "other"), // + Tuple.tuple(DeleteRoot.class, SomeEntity.class, "") // + ); + } + + @Test // DATAJDBC-493 + public void deleteDeletesTheEntityAndNoReferencedEntities() { + + SingleEntity entity = new SingleEntity(23L); + + MutableAggregateChange aggregateChange = MutableAggregateChange.forDelete(SingleEntity.class); + + converter.write(entity.id, aggregateChange); + + assertThat(extractActions(aggregateChange)) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath) // + .containsExactly(Tuple.tuple(DeleteRoot.class, SingleEntity.class, "")); + } + + @Test // DATAJDBC-188 + public void deleteAllDeletesAllEntitiesAndReferencedEntities() { + + MutableAggregateChange aggregateChange = MutableAggregateChange.forDelete(SomeEntity.class); + + converter.write(null, aggregateChange); + + assertThat(extractActions(aggregateChange)) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath) // + .containsExactly( // + Tuple.tuple(AcquireLockAllRoot.class, SomeEntity.class, ""), // + Tuple.tuple(DeleteAll.class, YetAnother.class, "other.yetAnother"), // + Tuple.tuple(DeleteAll.class, OtherEntity.class, "other"), // + Tuple.tuple(DeleteAllRoot.class, SomeEntity.class, "") // + ); + } + + @Test // DATAJDBC-493 + public void deleteAllDeletesAllEntitiesAndNoReferencedEntities() { + + MutableAggregateChange aggregateChange = MutableAggregateChange.forDelete(SingleEntity.class); + + converter.write(null, aggregateChange); + + assertThat(extractActions(aggregateChange)) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath) // + .containsExactly(Tuple.tuple(DeleteAllRoot.class, SingleEntity.class, "")); + } + + @Test // GH-1249 + public void deleteDoesNotDeleteReadOnlyReferences() { + + WithReadOnlyReference entity = new WithReadOnlyReference(23L); + + MutableAggregateChange aggregateChange = MutableAggregateChange + .forDelete(WithReadOnlyReference.class); + + converter.write(entity.id, aggregateChange); + + assertThat(extractActions(aggregateChange)) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath) // + .containsExactly( // + Tuple.tuple(DeleteRoot.class, WithReadOnlyReference.class, "") // + ); + } + + @Test // GH-1249 + public void deleteAllDoesNotDeleteReadOnlyReferences() { + + WithReadOnlyReference entity = new WithReadOnlyReference(23L); + + MutableAggregateChange aggregateChange = MutableAggregateChange + .forDelete(WithReadOnlyReference.class); + + converter.write(null, aggregateChange); + + assertThat(extractActions(aggregateChange)) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath) // + .containsExactly( // + Tuple.tuple(DeleteAllRoot.class, WithReadOnlyReference.class, "") // + ); + } + + private List> extractActions(MutableAggregateChange aggregateChange) { + + List> actions = new ArrayList<>(); + aggregateChange.forEachAction(actions::add); + return actions; + } + + private static class SomeEntity { + + @Id final Long id; + OtherEntity other; + // should not trigger own Dbaction + String name; + + private SomeEntity(Long id) { + this.id = id; + } + } + + private class OtherEntity { + + @Id final Long id; + YetAnother yetAnother; + + private OtherEntity(Long id) { + this.id = id; + } + } + + private class YetAnother { + @Id final Long id; + + private YetAnother(Long id) { + this.id = id; + } + } + + private class SingleEntity { + @Id final Long id; + String name; + + private SingleEntity(Long id) { + this.id = id; + } + } + + private static class WithReadOnlyReference { + + @Id + final Long id; + @ReadOnlyProperty + OtherEntity other; + + public WithReadOnlyReference(Long id) { + this.id = id; + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityInsertWriterUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityInsertWriterUnitTests.java new file mode 100644 index 0000000000..f6014894a6 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityInsertWriterUnitTests.java @@ -0,0 +1,91 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import static org.assertj.core.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.relational.core.conversion.DbAction.InsertRoot; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Unit tests for the {@link RelationalEntityInsertWriter} + * + * @author Thomas Lang + * @author Chirag Tailor + */ +@ExtendWith(MockitoExtension.class) +public class RelationalEntityInsertWriterUnitTests { + + public static final long SOME_ENTITY_ID = 23L; + RelationalMappingContext context = new RelationalMappingContext(); + + @Test // DATAJDBC-112 + public void newEntityGetsConvertedToOneInsert() { + + SingleReferenceEntity entity = new SingleReferenceEntity(null, null, null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityInsertWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath, + DbActionTestSupport::actualEntityType, DbActionTestSupport::isWithDependsOn) // + .containsExactly( // + tuple(InsertRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false) // + ); + } + + @Test // DATAJDBC-282 + public void existingEntityGetsNotConvertedToDeletePlusUpdate() { + + SingleReferenceEntity entity = new SingleReferenceEntity(SOME_ENTITY_ID, null, null); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityInsertWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath, + DbActionTestSupport::actualEntityType, DbActionTestSupport::isWithDependsOn) // + .containsExactly( // + tuple(InsertRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false) // + ); + } + + private List> extractActions(MutableAggregateChange aggregateChange) { + + List> actions = new ArrayList<>(); + aggregateChange.forEachAction(actions::add); + return actions; + } + + record SingleReferenceEntity( + + @Id Long id, Element other, + // should not trigger own Dbaction + String name) { + } + + record Element(@Id Long id) { + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityUpdateWriterUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityUpdateWriterUnitTests.java new file mode 100644 index 0000000000..0ba6c41d58 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityUpdateWriterUnitTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import static org.assertj.core.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Unit tests for the {@link RelationalEntityUpdateWriter} + * + * @author Thomas Lang + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +@ExtendWith(MockitoExtension.class) +public class RelationalEntityUpdateWriterUnitTests { + + public static final long SOME_ENTITY_ID = 23L; + private final RelationalMappingContext context = new RelationalMappingContext(); + + @Test // DATAJDBC-112 + public void existingEntityGetsConvertedToDeletePlusUpdate() { + + SingleReferenceEntity entity = new SingleReferenceEntity(SOME_ENTITY_ID, null, null); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityUpdateWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath, + DbActionTestSupport::actualEntityType, DbActionTestSupport::isWithDependsOn) // + .containsExactly( // + tuple(DbAction.UpdateRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false), // + tuple(DbAction.Delete.class, Element.class, "other", null, false) // + ); + } + + private List> extractActions(MutableAggregateChange aggregateChange) { + + List> actions = new ArrayList<>(); + aggregateChange.forEachAction(actions::add); + return actions; + } + + record SingleReferenceEntity( + + @Id Long id, Element other, + // should not trigger own Dbaction + String name) { + } + + record Element(@Id Long id) { + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityWriterUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityWriterUnitTests.java new file mode 100644 index 0000000000..81154ccbec --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/RelationalEntityWriterUnitTests.java @@ -0,0 +1,983 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.conversion.DbAction.Delete; +import org.springframework.data.relational.core.conversion.DbAction.Insert; +import org.springframework.data.relational.core.conversion.DbAction.InsertRoot; +import org.springframework.data.relational.core.conversion.DbAction.UpdateRoot; +import org.springframework.data.relational.core.mapping.Embedded; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.lang.Nullable; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for the {@link RelationalEntityWriter} + * + * @author Jens Schauder + * @author Bastian Wilhelm + * @author Mark Paluch + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +@ExtendWith(MockitoExtension.class) +public class RelationalEntityWriterUnitTests { + + static final long SOME_ENTITY_ID = 23L; + final RelationalMappingContext context = new RelationalMappingContext(); + + final PersistentPropertyPath listContainerElements = toPath("elements", + ListContainer.class, context); + + private final PersistentPropertyPath mapContainerElements = toPath("elements", + MapContainer.class, context); + + private final PersistentPropertyPath listMapContainerElements = toPath("maps.elements", + ListMapContainer.class, context); + + private final PersistentPropertyPath listMapContainerMaps = toPath("maps", + ListMapContainer.class, context); + + private final PersistentPropertyPath noIdListMapContainerElements = toPath( + "maps.elements", NoIdListMapContainer.class, context); + + private final PersistentPropertyPath noIdListMapContainerMaps = toPath("maps", + NoIdListMapContainer.class, context); + + @Test // DATAJDBC-112 + public void newEntityGetsConvertedToOneInsert() { + + SingleReferenceEntity entity = new SingleReferenceEntity(null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false, + IdValueSource.GENERATED) // + ); + } + + @Test // GH-1159 + void newEntityWithPrimitiveLongId_insertDoesNotIncludeId_whenIdValueIsZero() { + PrimitiveLongIdEntity entity = new PrimitiveLongIdEntity(); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, PrimitiveLongIdEntity.class, "", PrimitiveLongIdEntity.class, false, + IdValueSource.GENERATED) // + ); + } + + @Test // GH-1159 + void newEntityWithPrimitiveIntId_insertDoesNotIncludeId_whenIdValueIsZero() { + PrimitiveIntIdEntity entity = new PrimitiveIntIdEntity(); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, PrimitiveIntIdEntity.class, "", PrimitiveIntIdEntity.class, false, + IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-111 + public void newEntityGetsConvertedToOneInsertByEmbeddedEntities() { + + EmbeddedReferenceEntity entity = new EmbeddedReferenceEntity(null); + entity.other = new Element(2L); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, EmbeddedReferenceEntity.class, "", EmbeddedReferenceEntity.class, false, + IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-112 + public void newEntityWithReferenceGetsConvertedToTwoInserts() { + + SingleReferenceEntity entity = new SingleReferenceEntity(null); + entity.other = new Element(null); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false, + IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other", Element.class, true, IdValueSource.GENERATED) // + ); + } + + @Test // GH-1159 + void newEntityWithReference_whenReferenceHasPrimitiveId_insertDoesNotIncludeId_whenIdValueIsZero() { + + EntityWithReferencesToPrimitiveIdEntity entity = new EntityWithReferencesToPrimitiveIdEntity(null); + entity.primitiveLongIdEntity = new PrimitiveLongIdEntity(); + entity.primitiveIntIdEntity = new PrimitiveIntIdEntity(); + + RootAggregateChange aggregateChange = MutableAggregateChange + .forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactlyInAnyOrder( // + tuple(InsertRoot.class, EntityWithReferencesToPrimitiveIdEntity.class, "", + EntityWithReferencesToPrimitiveIdEntity.class, false, IdValueSource.GENERATED), // + tuple(Insert.class, PrimitiveLongIdEntity.class, "primitiveLongIdEntity", PrimitiveLongIdEntity.class, true, + IdValueSource.GENERATED), // + tuple(Insert.class, PrimitiveIntIdEntity.class, "primitiveIntIdEntity", PrimitiveIntIdEntity.class, true, + IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-112 + public void existingEntityGetsConvertedToDeletePlusUpdate() { + + SingleReferenceEntity entity = new SingleReferenceEntity(SOME_ENTITY_ID); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity, 1L); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + dbAction -> dbAction instanceof UpdateRoot ? ((UpdateRoot) dbAction).getPreviousVersion() : null) // + .containsExactly( // + tuple(UpdateRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false, 1L), // + tuple(Delete.class, Element.class, "other", null, false, null) // + ); + } + + @Test // DATAJDBC-112 + public void newReferenceTriggersDeletePlusInsert() { + + SingleReferenceEntity entity = new SingleReferenceEntity(SOME_ENTITY_ID); + entity.other = new Element(null); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity, 1L); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(UpdateRoot.class, SingleReferenceEntity.class, "", SingleReferenceEntity.class, false, IdValueSource.PROVIDED), // + tuple(Delete.class, Element.class, "other", null, false, null), // + tuple(Insert.class, Element.class, "other", Element.class, true, IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-113 + public void newEntityWithEmptySetResultsInSingleInsert() { + + SetContainer entity = new SetContainer(null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, SetContainer.class, "", SetContainer.class, false, IdValueSource.GENERATED)); + } + + @Test // DATAJDBC-113 + public void newEntityWithSetContainingMultipleElementsResultsInAnInsertForEach() { + + SetContainer entity = new SetContainer(null); + entity.elements.add(new Element(null)); + entity.elements.add(new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, SetContainer.class, "", SetContainer.class, false, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "elements", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "elements", Element.class, true, IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-113 + public void cascadingReferencesTriggerCascadingActions() { + + CascadingReferenceEntity entity = new CascadingReferenceEntity(null); + + entity.other.add(createMiddleElement( // + new Element(null), // + new Element(null)) // + ); + + entity.other.add(createMiddleElement( // + new Element(null), // + new Element(null)) // + ); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, CascadingReferenceEntity.class, "", CascadingReferenceEntity.class, false, + IdValueSource.GENERATED), // + tuple(Insert.class, CascadingReferenceMiddleElement.class, "other", CascadingReferenceMiddleElement.class, + true, IdValueSource.GENERATED), // + tuple(Insert.class, CascadingReferenceMiddleElement.class, "other", CascadingReferenceMiddleElement.class, + true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-188 + public void cascadingReferencesTriggerCascadingActionsForUpdate() { + + CascadingReferenceEntity entity = new CascadingReferenceEntity(23L); + + entity.other.add(createMiddleElement( // + new Element(null), // + new Element(null)) // + ); + + entity.other.add(createMiddleElement( // + new Element(null), // + new Element(null)) // + ); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity, 1L); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(UpdateRoot.class, CascadingReferenceEntity.class, "", CascadingReferenceEntity.class, false, IdValueSource.PROVIDED), // + tuple(Delete.class, Element.class, "other.element", null, false, null), + tuple(Delete.class, CascadingReferenceMiddleElement.class, "other", null, false, null), + tuple(Insert.class, CascadingReferenceMiddleElement.class, "other", CascadingReferenceMiddleElement.class, + true, IdValueSource.GENERATED), // + tuple(Insert.class, CascadingReferenceMiddleElement.class, "other", CascadingReferenceMiddleElement.class, + true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "other.element", Element.class, true, IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-131 + public void newEntityWithEmptyMapResultsInSingleInsert() { + + MapContainer entity = new MapContainer(null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)).extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, MapContainer.class, "", IdValueSource.GENERATED)); + } + + @Test // DATAJDBC-131 + public void newEntityWithMapResultsInAdditionalInsertPerElement() { + + MapContainer entity = new MapContainer(null); + entity.elements.put("one", new Element(null)); + entity.elements.put("two", new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + this::getMapKey, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactlyInAnyOrder( // + tuple(InsertRoot.class, MapContainer.class, null, "", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "one", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "two", "elements", IdValueSource.GENERATED) // + ).containsSubsequence( // container comes before the elements + tuple(InsertRoot.class, MapContainer.class, null, "", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "two", "elements", IdValueSource.GENERATED) // + ).containsSubsequence( // container comes before the elements + tuple(InsertRoot.class, MapContainer.class, null, "", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "one", "elements", IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-183 + public void newEntityWithFullMapResultsInAdditionalInsertPerElement() { + + MapContainer entity = new MapContainer(null); + + entity.elements.put("1", new Element(null)); + entity.elements.put("2", new Element(null)); + entity.elements.put("3", new Element(null)); + entity.elements.put("4", new Element(null)); + entity.elements.put("5", new Element(null)); + entity.elements.put("6", new Element(null)); + entity.elements.put("7", new Element(null)); + entity.elements.put("8", new Element(null)); + entity.elements.put("9", new Element(null)); + entity.elements.put("0", new Element(null)); + entity.elements.put("a", new Element(null)); + entity.elements.put("b", new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + this::getMapKey, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactlyInAnyOrder( // + tuple(InsertRoot.class, MapContainer.class, null, "", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "1", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "2", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "3", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "4", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "5", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "6", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "7", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "8", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "9", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "0", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "a", "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, "b", "elements", IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-130 + public void newEntityWithEmptyListResultsInSingleInsert() { + + ListContainer entity = new ListContainer(null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)).extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, ListContainer.class, "", IdValueSource.GENERATED)); + } + + @Test // DATAJDBC-130 + public void newEntityWithListResultsInAdditionalInsertPerElement() { + + ListContainer entity = new ListContainer(null); + entity.elements.add(new Element(null)); + entity.elements.add(new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + this::getListKey, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, ListContainer.class, null, "", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, 0, "elements", IdValueSource.GENERATED), // + tuple(Insert.class, Element.class, 1, "elements", IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-131 + public void mapTriggersDeletePlusInsert() { + + MapContainer entity = new MapContainer(SOME_ENTITY_ID); + entity.elements.put("one", new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity, 1L); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + this::getMapKey, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(UpdateRoot.class, MapContainer.class, null, "", IdValueSource.PROVIDED), // + tuple(Delete.class, Element.class, null, "elements", null), // + tuple(Insert.class, Element.class, "one", "elements", IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-130 + public void listTriggersDeletePlusInsert() { + + ListContainer entity = new ListContainer(SOME_ENTITY_ID); + entity.elements.add(new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity, 1L); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + this::getListKey, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(UpdateRoot.class, ListContainer.class, null, "", IdValueSource.PROVIDED), // + tuple(Delete.class, Element.class, null, "elements", null), // + tuple(Insert.class, Element.class, 0, "elements", IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-223 + public void multiLevelQualifiedReferencesWithId() { + + ListMapContainer listMapContainer = new ListMapContainer(SOME_ENTITY_ID); + listMapContainer.maps.add(new MapContainer(SOME_ENTITY_ID)); + listMapContainer.maps.get(0).elements.put("one", new Element(null)); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(listMapContainer, 1L); + + new RelationalEntityWriter(context).write(listMapContainer, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + a -> getQualifier(a, listMapContainerMaps), // + a -> getQualifier(a, listMapContainerElements), // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(UpdateRoot.class, ListMapContainer.class, null, null, "", IdValueSource.PROVIDED), // + tuple(Delete.class, Element.class, null, null, "maps.elements", null), // + tuple(Delete.class, MapContainer.class, null, null, "maps", null), // + tuple(Insert.class, MapContainer.class, 0, null, "maps", IdValueSource.PROVIDED), // + tuple(Insert.class, Element.class, null, "one", "maps.elements", IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-223 + public void multiLevelQualifiedReferencesWithOutId() { + + NoIdListMapContainer listMapContainer = new NoIdListMapContainer(SOME_ENTITY_ID); + listMapContainer.maps.add(new NoIdMapContainer()); + listMapContainer.maps.get(0).elements.put("one", new NoIdElement()); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(listMapContainer, + 1L); + + new RelationalEntityWriter(context).write(listMapContainer, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + a -> getQualifier(a, noIdListMapContainerMaps), // + a -> getQualifier(a, noIdListMapContainerElements), // + DbActionTestSupport::extractPath, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(UpdateRoot.class, NoIdListMapContainer.class, null, null, "", IdValueSource.PROVIDED), // + tuple(Delete.class, NoIdElement.class, null, null, "maps.elements", null), // + tuple(Delete.class, NoIdMapContainer.class, null, null, "maps", null), // + tuple(Insert.class, NoIdMapContainer.class, 0, null, "maps", IdValueSource.NONE), // + tuple(Insert.class, NoIdElement.class, 0, "one", "maps.elements", IdValueSource.NONE) // + ); + } + + @Test // DATAJDBC-417 + public void savingANullEmbeddedWithEntity() { + + EmbeddedReferenceChainEntity entity = new EmbeddedReferenceChainEntity(null); + // the embedded is null !!! + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, EmbeddedReferenceChainEntity.class, "", EmbeddedReferenceChainEntity.class, false, + IdValueSource.GENERATED) // + ); + } + + @Test // DATAJDBC-417 + public void savingInnerNullEmbeddedWithEntity() { + + RootWithEmbeddedReferenceChainEntity root = new RootWithEmbeddedReferenceChainEntity(null); + root.other = new EmbeddedReferenceChainEntity(null); + // the embedded is null !!! + + RootAggregateChange aggregateChange = MutableAggregateChange + .forSave(root); + + new RelationalEntityWriter(context).write(root, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactly( // + tuple(InsertRoot.class, RootWithEmbeddedReferenceChainEntity.class, "", + RootWithEmbeddedReferenceChainEntity.class, false, IdValueSource.GENERATED), // + tuple(Insert.class, EmbeddedReferenceChainEntity.class, "other", EmbeddedReferenceChainEntity.class, true, + IdValueSource.GENERATED) // + ); + } + + @Test // GH-1159 + void newEntityWithCollection_whenElementHasPrimitiveId_doesNotIncludeId_whenIdValueIsZero() { + + EntityWithReferencesToPrimitiveIdEntity entity = new EntityWithReferencesToPrimitiveIdEntity(null); + entity.primitiveLongIdEntities.add(new PrimitiveLongIdEntity()); + entity.primitiveIntIdEntities.add(new PrimitiveIntIdEntity()); + + RootAggregateChange aggregateChange = MutableAggregateChange + .forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + List> actions = extractActions(aggregateChange); + assertThat(actions).extracting(DbAction::getClass, // + DbAction::getEntityType, // + DbActionTestSupport::extractPath, // + DbActionTestSupport::actualEntityType, // + DbActionTestSupport::isWithDependsOn, // + DbActionTestSupport::insertIdValueSource) // + .containsExactlyInAnyOrder( // + tuple(InsertRoot.class, EntityWithReferencesToPrimitiveIdEntity.class, "", + EntityWithReferencesToPrimitiveIdEntity.class, false, IdValueSource.GENERATED), // + tuple(Insert.class, PrimitiveLongIdEntity.class, "primitiveLongIdEntities", PrimitiveLongIdEntity.class, + true, IdValueSource.GENERATED), // + tuple(Insert.class, PrimitiveIntIdEntity.class, "primitiveIntIdEntities", PrimitiveIntIdEntity.class, true, + IdValueSource.GENERATED) // + ); + } + + @Test // GH-1249 + public void readOnlyReferenceDoesNotCreateInsertsOnCreation() { + + WithReadOnlyReference entity = new WithReadOnlyReference(null); + entity.readOnly = new Element(SOME_ENTITY_ID); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath, + DbActionTestSupport::actualEntityType, DbActionTestSupport::isWithDependsOn) // + .containsExactly( // + tuple(InsertRoot.class, WithReadOnlyReference.class, "", WithReadOnlyReference.class, false) // + // no insert for element + ); + + } + + @Test // GH-1249 + public void readOnlyReferenceDoesNotCreateDeletesOrInsertsDuringUpdate() { + + WithReadOnlyReference entity = new WithReadOnlyReference(SOME_ENTITY_ID); + entity.readOnly = new Element(SOME_ENTITY_ID); + + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(entity); + + new RelationalEntityWriter(context).write(entity, aggregateChange); + + assertThat(extractActions(aggregateChange)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::extractPath, + DbActionTestSupport::actualEntityType, DbActionTestSupport::isWithDependsOn) // + .containsExactly( // + tuple(UpdateRoot.class, WithReadOnlyReference.class, "", WithReadOnlyReference.class, false) // + // no insert for element + ); + + } + + private List> extractActions(MutableAggregateChange aggregateChange) { + + List> actions = new ArrayList<>(); + aggregateChange.forEachAction(actions::add); + return actions; + } + + private CascadingReferenceMiddleElement createMiddleElement(Element first, Element second) { + + CascadingReferenceMiddleElement middleElement1 = new CascadingReferenceMiddleElement(null); + middleElement1.element.add(first); + middleElement1.element.add(second); + return middleElement1; + } + + private Object getMapKey(DbAction a) { + + PersistentPropertyPath path = this.mapContainerElements; + + return getQualifier(a, path); + } + + private Object getListKey(DbAction a) { + + PersistentPropertyPath path = this.listContainerElements; + + return getQualifier(a, path); + } + + @Nullable + private Object getQualifier(DbAction a, PersistentPropertyPath path) { + + return a instanceof DbAction.WithDependingOn // + ? ((DbAction.WithDependingOn) a).getQualifiers().get(path) // + : null; + } + + static PersistentPropertyPath toPath(String path, Class source, + RelationalMappingContext context) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(source, p -> true); + + return persistentPropertyPaths.filter(p -> p.toDotPath().equals(path)).stream().findFirst().orElse(null); + } + + static class EntityWithReferencesToPrimitiveIdEntity { + @Id final Long id; + PrimitiveLongIdEntity primitiveLongIdEntity; + List primitiveLongIdEntities = new ArrayList<>(); + PrimitiveIntIdEntity primitiveIntIdEntity; + List primitiveIntIdEntities = new ArrayList<>(); + + EntityWithReferencesToPrimitiveIdEntity(Long id) { + this.id = id; + } + } + + static class PrimitiveLongIdEntity { + @Id long id; + } + + static class PrimitiveIntIdEntity { + @Id int id; + } + + static class SingleReferenceEntity { + + @Id + final Long id; + Element other; + // should not trigger own DbAction + String name; + + public SingleReferenceEntity(Long id) { + this.id = id; + } + } + + static class EmbeddedReferenceEntity { + + @Id + final Long id; + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix_") + Element other; + + public EmbeddedReferenceEntity(Long id) { + this.id = id; + } + } + + static class EmbeddedReferenceChainEntity { + + @Id + final Long id; + @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix_") + ElementReference other; + + public EmbeddedReferenceChainEntity(Long id) { + this.id = id; + } + } + + static class RootWithEmbeddedReferenceChainEntity { + + @Id + final Long id; + EmbeddedReferenceChainEntity other; + + public RootWithEmbeddedReferenceChainEntity(Long id) { + this.id = id; + } + } + + static class ReferenceWoIdEntity { + + @Id + final Long id; + NoIdElement other; + // should not trigger own DbAction + String name; + + public ReferenceWoIdEntity(Long id) { + this.id = id; + } + } + + private static class CascadingReferenceMiddleElement { + + @Id + final Long id; + final Set element = new HashSet<>(); + + public CascadingReferenceMiddleElement(Long id) { + this.id = id; + } + } + + private static class CascadingReferenceEntity { + + @Id + final Long id; + final Set other = new HashSet<>(); + + public CascadingReferenceEntity(Long id) { + this.id = id; + } + } + + private static class SetContainer { + + @Id + final Long id; + Set elements = new HashSet<>(); + + public SetContainer(Long id) { + this.id = id; + } + } + + private static class ListMapContainer { + + @Id + final Long id; + List maps = new ArrayList<>(); + + public ListMapContainer(Long id) { + this.id = id; + } + } + + private static class MapContainer { + + @Id + final Long id; + Map elements = new HashMap<>(); + + public MapContainer(Long id) { + this.id = id; + } + } + + private static class ListContainer { + + @Id + final Long id; + List elements = new ArrayList<>(); + + public ListContainer(Long id) { + this.id = id; + } + } + + private static class Element { + @Id + final Long id; + + public Element(Long id) { + this.id = id; + } + } + + private static class ElementReference { + final Element element; + + public ElementReference(Element element) { + this.element = element; + } + } + + private static class NoIdListMapContainer { + + @Id + final Long id; + List maps = new ArrayList<>(); + + public NoIdListMapContainer(Long id) { + this.id = id; + } + } + + private static class NoIdMapContainer { + + Map elements = new HashMap<>(); + + public NoIdMapContainer() { + } + } + + private static class NoIdElement { + // empty classes feel weird. + String name; + + public NoIdElement() { + } + } + + private static class WithReadOnlyReference { + + @Id + final Long id; + @ReadOnlyProperty + Element readOnly; + + public WithReadOnlyReference(Long id) { + this.id = id; + } + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/SaveBatchingAggregateChangeTest.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/SaveBatchingAggregateChangeTest.java new file mode 100644 index 0000000000..92b719d962 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/conversion/SaveBatchingAggregateChangeTest.java @@ -0,0 +1,726 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.conversion; + +import org.assertj.core.groups.Tuple; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static java.util.Collections.*; +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for {@link SaveBatchingAggregateChange}. + * + * @author Chirag Tailor + */ +class SaveBatchingAggregateChangeTest { + + RelationalMappingContext context = new RelationalMappingContext(); + + @Test // GH-537 + void startsWithNoActions() { + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + + assertThat(extractActions(change)).isEmpty(); + } + + @Nested + class RootActionsTests { + @Test // GH-537 + void yieldsUpdateRoot() { + + Root root = new Root(1L, null); + DbAction.UpdateRoot rootUpdate = new DbAction.UpdateRoot<>(root, null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(root); + aggregateChange.setRootAction(rootUpdate); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(rootUpdate); + } + + @Test // GH-537 + void yieldsSingleInsertRoot_followedByUpdateRoot_asIndividualActions() { + + Root root1 = new Root(1L, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Root root2 = new Root(1L, null); + DbAction.UpdateRoot root2Update = new DbAction.UpdateRoot<>(root2, null); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Update); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + assertThat(extractActions(change)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsExactly( // + Tuple.tuple(DbAction.InsertRoot.class, Root.class, IdValueSource.GENERATED), // + Tuple.tuple(DbAction.UpdateRoot.class, Root.class, IdValueSource.PROVIDED)); + } + + @Test // GH-537 + void yieldsMultipleMatchingInsertRoot_followedByUpdateRoot_asBatchInsertRootAction() { + + Root root1 = new Root(1L, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Root root2 = new Root(2L, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + Root root3 = new Root(3L, null); + DbAction.UpdateRoot root3Update = new DbAction.UpdateRoot<>(root3, null); + RootAggregateChange aggregateChange3 = MutableAggregateChange.forSave(root3); + aggregateChange3.setRootAction(root3Update); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + change.add(aggregateChange3); + + List> actions = extractActions(change); + assertThat(actions) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsExactly( // + Tuple.tuple(DbAction.BatchInsertRoot.class, Root.class, IdValueSource.GENERATED), // + Tuple.tuple(DbAction.UpdateRoot.class, Root.class, IdValueSource.PROVIDED)); + assertThat(getBatchWithValueAction(actions, Root.class, DbAction.BatchInsertRoot.class).getActions()) + .containsExactly(root1Insert, root2Insert); + } + + @Test // GH-537 + void yieldsInsertRoot() { + + Root root = new Root(1L, null); + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(root); + aggregateChange.setRootAction(rootInsert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange); + + assertThat(extractActions(change)).containsExactly(rootInsert); + } + + @Test // GH-537 + void yieldsSingleInsertRoot_followedByNonMatchingInsertRoot_asIndividualActions() { + + Root root1 = new Root(1L, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Root root2 = new Root(2L, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.PROVIDED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + assertThat(extractActions(change)).containsExactly(root1Insert, root2Insert); + } + + @Test // GH-537 + void yieldsMultipleMatchingInsertRoot_followedByNonMatchingInsertRoot_asBatchInsertRootAction() { + + Root root1 = new Root(1L, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Root root2 = new Root(2L, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + Root root3 = new Root(3L, null); + DbAction.InsertRoot root3Insert = new DbAction.InsertRoot<>(root3, IdValueSource.PROVIDED); + RootAggregateChange aggregateChange3 = MutableAggregateChange.forSave(root3); + aggregateChange3.setRootAction(root3Insert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + change.add(aggregateChange3); + + List> actions = extractActions(change); + assertThat(actions) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsExactly( // + Tuple.tuple(DbAction.BatchInsertRoot.class, Root.class, IdValueSource.GENERATED), // + Tuple.tuple(DbAction.InsertRoot.class, Root.class, IdValueSource.PROVIDED)); + assertThat(getBatchWithValueAction(actions, Root.class, DbAction.BatchInsertRoot.class).getActions()) + .containsExactly(root1Insert, root2Insert); + } + + @Test // GH-537 + void yieldsMultipleMatchingInsertRoot_asBatchInsertRootAction() { + + Root root1 = new Root(1L, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Root root2 = new Root(2L, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + List> actions = extractActions(change); + assertThat(actions) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsExactly(Tuple.tuple(DbAction.BatchInsertRoot.class, Root.class, IdValueSource.GENERATED)); + assertThat(getBatchWithValueAction(actions, Root.class, DbAction.BatchInsertRoot.class).getActions()) + .containsExactly(root1Insert, root2Insert); + } + + @Test // GH-537 + void yieldsPreviouslyYieldedInsertRoot_asBatchInsertRootAction_whenAdditionalMatchingInsertRootIsAdded() { + + Root root1 = new Root(1L, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Root root2 = new Root(2L, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + + change.add(aggregateChange1); + + assertThat(extractActions(change)) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsExactly(Tuple.tuple(DbAction.InsertRoot.class, Root.class, IdValueSource.GENERATED)); + + change.add(aggregateChange2); + + List> actions = extractActions(change); + assertThat(actions) // + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsExactly(Tuple.tuple(DbAction.BatchInsertRoot.class, Root.class, IdValueSource.GENERATED)); + assertThat(getBatchWithValueAction(actions, Root.class, DbAction.BatchInsertRoot.class).getActions()) + .containsExactly(root1Insert, root2Insert); + } + } + + @Test // GH-537 + void yieldsRootActionsBeforeDeleteActions() { + + Root root1 = new Root(null, null); + DbAction.UpdateRoot root1Update = new DbAction.UpdateRoot<>(root1, null); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Update); + + DbAction.Delete root1IntermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange1.addAction(root1IntermediateDelete); + + Root root2 = new Root(null, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + assertThat(extractActions(change)).extracting(DbAction::getClass, DbAction::getEntityType).containsExactly( // + Tuple.tuple(DbAction.UpdateRoot.class, Root.class), // + Tuple.tuple(DbAction.InsertRoot.class, Root.class), // + Tuple.tuple(DbAction.Delete.class, Intermediate.class)); + } + + @Test // GH-537 + void yieldsNestedDeleteActionsInTreeOrderFromLeavesToRoot() { + + Root root1 = new Root(1L, null); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(new DbAction.UpdateRoot<>(root1, null)); + DbAction.Delete root1IntermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange1.addAction(root1IntermediateDelete); + + Root root2 = new Root(2L, null); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(new DbAction.UpdateRoot<>(root2, null)); + + DbAction.Delete root2LeafDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate.leaf", Root.class)); + aggregateChange2.addAction(root2LeafDelete); + + DbAction.Delete root2IntermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange2.addAction(root2IntermediateDelete); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + List> actions = extractActions(change); + assertThat(actions).extracting(DbAction::getClass, DbAction::getEntityType).containsSubsequence( + Tuple.tuple(DbAction.Delete.class, Leaf.class), // + Tuple.tuple(DbAction.BatchDelete.class, Intermediate.class)); + assertThat(getBatchWithValueAction(actions, Intermediate.class, DbAction.BatchDelete.class).getActions()) + .containsExactly(root1IntermediateDelete, root2IntermediateDelete); + } + + @Test // GH-537 + void yieldsDeleteActionsAsBatchDeletes_groupedByPath_whenGroupContainsMultipleDeletes() { + + Root root = new Root(1L, null); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(root); + DbAction.UpdateRoot updateRoot = new DbAction.UpdateRoot<>(root, null); + aggregateChange.setRootAction(updateRoot); + DbAction.Delete intermediateDelete1 = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + DbAction.Delete intermediateDelete2 = new DbAction.Delete<>(2L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange.addAction(intermediateDelete1); + aggregateChange.addAction(intermediateDelete2); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange); + + List> actions = extractActions(change); + assertThat(actions).extracting(DbAction::getClass, DbAction::getEntityType) // + .containsExactly( // + Tuple.tuple(DbAction.UpdateRoot.class, Root.class), // + Tuple.tuple(DbAction.BatchDelete.class, Intermediate.class)); + assertThat(getBatchWithValueAction(actions, Intermediate.class, DbAction.BatchDelete.class).getActions()) + .containsExactly(intermediateDelete1, intermediateDelete2); + } + + @Test // GH-537 + void yieldsDeleteActionsBeforeInsertActions() { + + Root root1 = new Root(null, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + Intermediate root1Intermediate = new Intermediate(null, "root1Intermediate", null); + DbAction.Insert root1IntermediateInsert = new DbAction.Insert<>(root1Intermediate, + context.getPersistentPropertyPath("intermediate", Root.class), root1Insert, emptyMap(), + IdValueSource.GENERATED); + aggregateChange1.addAction(root1IntermediateInsert); + + Root root2 = new Root(1L, null); + DbAction.UpdateRoot root2Update = new DbAction.UpdateRoot<>(root2, null); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Update); + DbAction.Delete root2IntermediateDelete = new DbAction.Delete<>(1L, + context.getPersistentPropertyPath("intermediate", Root.class)); + aggregateChange2.addAction(root2IntermediateDelete); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + assertThat(extractActions(change)).extracting(DbAction::getClass, DbAction::getEntityType).containsSubsequence( // + Tuple.tuple(DbAction.Delete.class, Intermediate.class), // + Tuple.tuple(DbAction.BatchInsert.class, Intermediate.class)); + } + + @Test // GH-537 + void yieldsInsertActionsAsBatchInserts_groupedByIdValueSource() { + + Root root = new Root(null, null); + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, IdValueSource.GENERATED); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(root); + aggregateChange.setRootAction(rootInsert); + + Intermediate intermediateGeneratedId = new Intermediate(null, "intermediateGeneratedId", null); + DbAction.Insert intermediateInsertGeneratedId = new DbAction.Insert<>(intermediateGeneratedId, + context.getPersistentPropertyPath("intermediate", Root.class), rootInsert, emptyMap(), IdValueSource.GENERATED); + aggregateChange.addAction(intermediateInsertGeneratedId); + + Intermediate intermediateProvidedId = new Intermediate(123L, "intermediateProvidedId", null); + DbAction.Insert intermediateInsertProvidedId = new DbAction.Insert<>(intermediateProvidedId, + context.getPersistentPropertyPath("intermediate", Root.class), rootInsert, emptyMap(), IdValueSource.PROVIDED); + aggregateChange.addAction(intermediateInsertProvidedId); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange); + + List> actions = extractActions(change); + assertThat(actions) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) // + .containsSubsequence( // + Tuple.tuple(DbAction.InsertRoot.class, Root.class, IdValueSource.GENERATED), // + Tuple.tuple(DbAction.BatchInsert.class, Intermediate.class, IdValueSource.PROVIDED)) // + .containsSubsequence( // + Tuple.tuple(DbAction.InsertRoot.class, Root.class, IdValueSource.GENERATED), // + Tuple.tuple(DbAction.BatchInsert.class, Intermediate.class, IdValueSource.GENERATED)) // + .doesNotContain(Tuple.tuple(DbAction.Insert.class, Intermediate.class)); + assertThat(getBatchWithValueAction(actions, Intermediate.class, DbAction.BatchInsert.class, IdValueSource.GENERATED) + .getActions()).containsExactly(intermediateInsertGeneratedId); + assertThat(getBatchWithValueAction(actions, Intermediate.class, DbAction.BatchInsert.class, IdValueSource.PROVIDED) + .getActions()).containsExactly(intermediateInsertProvidedId); + } + + @Test // GH-537 + void yieldsNestedInsertActionsInTreeOrderFromRootToLeaves() { + + Root root1 = new Root(null, null); + DbAction.InsertRoot root1Insert = new DbAction.InsertRoot<>(root1, IdValueSource.GENERATED); + RootAggregateChange aggregateChange1 = MutableAggregateChange.forSave(root1); + aggregateChange1.setRootAction(root1Insert); + + Intermediate root1Intermediate = new Intermediate(null, "root1Intermediate", null); + DbAction.Insert root1IntermediateInsert = new DbAction.Insert<>(root1Intermediate, + context.getPersistentPropertyPath("intermediate", Root.class), root1Insert, emptyMap(), + IdValueSource.GENERATED); + aggregateChange1.addAction(root1IntermediateInsert); + + Leaf root1Leaf = new Leaf(null, "root1Leaf"); + DbAction.Insert root1LeafInsert = new DbAction.Insert<>(root1Leaf, + context.getPersistentPropertyPath("intermediate.leaf", Root.class), root1IntermediateInsert, emptyMap(), + IdValueSource.GENERATED); + aggregateChange1.addAction(root1LeafInsert); + + Root root2 = new Root(null, null); + DbAction.InsertRoot root2Insert = new DbAction.InsertRoot<>(root2, IdValueSource.GENERATED); + RootAggregateChange aggregateChange2 = MutableAggregateChange.forSave(root2); + aggregateChange2.setRootAction(root2Insert); + + Intermediate root2Intermediate = new Intermediate(null, "root2Intermediate", null); + DbAction.Insert root2IntermediateInsert = new DbAction.Insert<>(root2Intermediate, + context.getPersistentPropertyPath("intermediate", Root.class), root2Insert, emptyMap(), + IdValueSource.GENERATED); + aggregateChange2.addAction(root2IntermediateInsert); + + BatchingAggregateChange> change = BatchingAggregateChange.forSave(Root.class); + change.add(aggregateChange1); + change.add(aggregateChange2); + + List> actions = extractActions(change); + assertThat(actions) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsSubsequence( // + Tuple.tuple(DbAction.BatchInsert.class, Intermediate.class, IdValueSource.GENERATED), + Tuple.tuple(DbAction.BatchInsert.class, Leaf.class, IdValueSource.GENERATED)); + assertThat(getBatchWithValueAction(actions, Intermediate.class, DbAction.BatchInsert.class).getActions()) // + .containsExactly(root1IntermediateInsert, root2IntermediateInsert); + assertThat(getBatchWithValueAction(actions, Leaf.class, DbAction.BatchInsert.class).getActions()) // + .containsExactly(root1LeafInsert); + } + + @Test // GH-537 + void yieldsInsertsWithSameLengthReferences_asSeparateInserts() { + + RootWithSameLengthReferences root = new RootWithSameLengthReferences(null, null, null); + DbAction.InsertRoot rootInsert = new DbAction.InsertRoot<>(root, + IdValueSource.GENERATED); + RootAggregateChange aggregateChange = MutableAggregateChange.forSave(root); + aggregateChange.setRootAction(rootInsert); + + Intermediate one = new Intermediate(null, "one", null); + DbAction.Insert oneInsert = new DbAction.Insert<>(one, + context.getPersistentPropertyPath("one", RootWithSameLengthReferences.class), rootInsert, emptyMap(), + IdValueSource.GENERATED); + aggregateChange.addAction(oneInsert); + + Intermediate two = new Intermediate(null, "two", null); + DbAction.Insert twoInsert = new DbAction.Insert<>(two, + context.getPersistentPropertyPath("two", RootWithSameLengthReferences.class), rootInsert, emptyMap(), + IdValueSource.GENERATED); + aggregateChange.addAction(twoInsert); + + BatchingAggregateChange> change = // + BatchingAggregateChange.forSave(RootWithSameLengthReferences.class); + change.add(aggregateChange); + + List> actions = extractActions(change); + assertThat(actions) + .extracting(DbAction::getClass, DbAction::getEntityType, DbActionTestSupport::insertIdValueSource) + .containsSubsequence( // + Tuple.tuple(DbAction.BatchInsert.class, Intermediate.class, IdValueSource.GENERATED), + Tuple.tuple(DbAction.BatchInsert.class, Intermediate.class, IdValueSource.GENERATED)); + List, Object>> batchInsertActions = getBatchWithValueActions( + actions, Intermediate.class, DbAction.BatchInsert.class); + assertThat(batchInsertActions).hasSize(2); + assertThat(batchInsertActions.get(0).getActions()).containsExactly(oneInsert); + assertThat(batchInsertActions.get(1).getActions()).containsExactly(twoInsert); + } + + private List> extractActions(BatchingAggregateChange> change) { + + List> actions = new ArrayList<>(); + change.forEachAction(actions::add); + return actions; + } + + private DbAction.BatchWithValue, Object> getBatchWithValueAction(List> actions, + Class entityType, Class batchActionType) { + + return getBatchWithValueActions(actions, entityType, batchActionType).stream().findFirst() + .orElseThrow(() -> new RuntimeException("No BatchWithValue action found")); + } + + private DbAction.BatchWithValue, Object> getBatchWithValueAction(List> actions, + Class entityType, Class batchActionType, Object batchValue) { + + return getBatchWithValueActions(actions, entityType, batchActionType).stream() + .filter(batchWithValue -> batchWithValue.getBatchValue() == batchValue).findFirst().orElseThrow( + () -> new RuntimeException(String.format("No BatchWithValue with batch value '%s' found", batchValue))); + } + + @SuppressWarnings("unchecked") + private List, Object>> getBatchWithValueActions( + List> actions, Class entityType, Class batchActionType) { + + return actions.stream() // + .filter(dbAction -> dbAction.getClass().equals(batchActionType)) // + .filter(dbAction -> dbAction.getEntityType().equals(entityType)) // + .map(dbAction -> (DbAction.BatchWithValue, Object>) dbAction).collect(Collectors.toList()); + } + + static final class RootWithSameLengthReferences { + + @Id + private final Long id; + private final Intermediate one; + private final Intermediate two; + + public RootWithSameLengthReferences(Long id, Intermediate one, Intermediate two) { + this.id = id; + this.one = one; + this.two = two; + } + + public Long getId() { + return this.id; + } + + public Intermediate getOne() { + return this.one; + } + + public Intermediate getTwo() { + return this.two; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof RootWithSameLengthReferences)) return false; + final RootWithSameLengthReferences other = (RootWithSameLengthReferences) o; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) return false; + final Object this$one = this.getOne(); + final Object other$one = other.getOne(); + if (this$one == null ? other$one != null : !this$one.equals(other$one)) return false; + final Object this$two = this.getTwo(); + final Object other$two = other.getTwo(); + if (this$two == null ? other$two != null : !this$two.equals(other$two)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $one = this.getOne(); + result = result * PRIME + ($one == null ? 43 : $one.hashCode()); + final Object $two = this.getTwo(); + result = result * PRIME + ($two == null ? 43 : $two.hashCode()); + return result; + } + + public String toString() { + return "SaveBatchingAggregateChangeTest.RootWithSameLengthReferences(id=" + this.getId() + ", one=" + this.getOne() + ", two=" + this.getTwo() + ")"; + } + } + + static final class Root { + + @Id + private final Long id; + private final Intermediate intermediate; + + public Root(Long id, Intermediate intermediate) { + this.id = id; + this.intermediate = intermediate; + } + + public Long getId() { + return this.id; + } + + public Intermediate getIntermediate() { + return this.intermediate; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof Root)) return false; + final Root other = (Root) o; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) return false; + final Object this$intermediate = this.getIntermediate(); + final Object other$intermediate = other.getIntermediate(); + if (this$intermediate == null ? other$intermediate != null : !this$intermediate.equals(other$intermediate)) + return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $intermediate = this.getIntermediate(); + result = result * PRIME + ($intermediate == null ? 43 : $intermediate.hashCode()); + return result; + } + + public String toString() { + return "SaveBatchingAggregateChangeTest.Root(id=" + this.getId() + ", intermediate=" + this.getIntermediate() + ")"; + } + } + + static final class Intermediate { + + @Id + private final Long id; + private final String name; + private final Leaf leaf; + + public Intermediate(Long id, String name, Leaf leaf) { + this.id = id; + this.name = name; + this.leaf = leaf; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Leaf getLeaf() { + return this.leaf; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof Intermediate)) return false; + final Intermediate other = (Intermediate) o; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (this$name == null ? other$name != null : !this$name.equals(other$name)) return false; + final Object this$leaf = this.getLeaf(); + final Object other$leaf = other.getLeaf(); + if (this$leaf == null ? other$leaf != null : !this$leaf.equals(other$leaf)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $leaf = this.getLeaf(); + result = result * PRIME + ($leaf == null ? 43 : $leaf.hashCode()); + return result; + } + + public String toString() { + return "SaveBatchingAggregateChangeTest.Intermediate(id=" + this.getId() + ", name=" + this.getName() + ", leaf=" + this.getLeaf() + ")"; + } + } + + static final class Leaf { + + @Id + private final Long id; + private final String name; + + public Leaf(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public boolean equals(final Object o) { + if (o == this) return true; + if (!(o instanceof Leaf)) return false; + final Leaf other = (Leaf) o; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (this$name == null ? other$name != null : !this$name.equals(other$name)) return false; + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + return result; + } + + public String toString() { + return "SaveBatchingAggregateChangeTest.Leaf(id=" + this.getId() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/EscaperUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/EscaperUnitTests.java new file mode 100644 index 0000000000..ce51101c30 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/EscaperUnitTests.java @@ -0,0 +1,74 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link Escaper}. + * + * @author Roman Chigvintsev + * @author Mark Paluch + */ +public class EscaperUnitTests { + + @Test // DATAJDBC-514 + public void ignoresNulls() { + assertThat((Escaper.DEFAULT.escape(null))).isNull(); + } + + @Test // DATAJDBC-514 + public void ignoresEmptyString() { + assertThat(Escaper.DEFAULT.escape("")).isEmpty(); + } + + @Test // DATAJDBC-514 + public void ignoresBlankString() { + assertThat(Escaper.DEFAULT.escape(" ")).isEqualTo(" "); + } + + @Test // DATAJDBC-514 + public void throwsExceptionWhenEscapeCharacterIsUnderscore() { + assertThatIllegalArgumentException().isThrownBy(() -> Escaper.of('_')); + } + + @Test // DATAJDBC-514 + public void throwsExceptionWhenEscapeCharacterIsPercent() { + assertThatIllegalArgumentException().isThrownBy(() -> Escaper.of('%')); + } + + @Test // DATAJDBC-514 + public void escapesUnderscoresUsingDefaultEscapeCharacter() { + assertThat(Escaper.DEFAULT.escape("_test_")).isEqualTo("\\_test\\_"); + } + + @Test // DATAJDBC-514 + public void escapesPercentsUsingDefaultEscapeCharacter() { + assertThat(Escaper.DEFAULT.escape("%test%")).isEqualTo("\\%test\\%"); + } + + @Test // DATAJDBC-514 + public void escapesSpecialCharactersUsingCustomEscapeCharacter() { + assertThat(Escaper.of('$').escape("_%")).isEqualTo("$_$%"); + } + + @Test // DATAJDBC-514 + public void escapesAdditionalCharacters() { + assertThat(Escaper.DEFAULT.withRewriteFor("[", "]").escape("Hello Wo[Rr]ld")).isEqualTo("Hello Wo\\[Rr\\]ld"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/HsqlDbDialectUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/HsqlDbDialectUnitTests.java new file mode 100644 index 0000000000..b03eb034a0 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/HsqlDbDialectUnitTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.LockOptions; + +/** + * Unit tests for the {@link HsqlDbDialect}. + * + * @author Jens Schauder + * @author Myeonghyeon Lee + */ +public class HsqlDbDialectUnitTests { + + @Test // DATAJDBC-386 + public void shouldNotSupportArrays() { + + ArrayColumns arrayColumns = new HsqlDbDialect().getArraySupport(); + + assertThat(arrayColumns.isSupported()).isFalse(); + } + + @Test // DATAJDBC-386 + public void shouldRenderLimit() { + + LimitClause limit = new HsqlDbDialect().limit(); + + assertThat(limit.getClausePosition()).isEqualTo(LimitClause.Position.AFTER_ORDER_BY); + assertThat(limit.getLimit(10)).isEqualTo("LIMIT 10"); + } + + @Test // DATAJDBC-386 + public void shouldRenderOffset() { + + LimitClause limit = new HsqlDbDialect().limit(); + + assertThat(limit.getOffset(10)).isEqualTo("OFFSET 10"); + } + + @Test // DATAJDBC-386 + public void shouldRenderLimitOffset() { + + LimitClause limit = new HsqlDbDialect().limit(); + + assertThat(limit.getLimitOffset(20, 10)).isEqualTo("OFFSET 10 LIMIT 20"); + } + + @Test // DATAJDBC-386 + public void shouldQuoteIdentifiersUsingBackticks() { + + String abcQuoted = new HsqlDbDialect().getIdentifierProcessing().quote("abc"); + + assertThat(abcQuoted).isEqualTo("\"abc\""); + } + + @Test // DATAJDBC-498 + public void shouldRenderLock() { + + LockClause limit = new HsqlDbDialect().lock(); + From from = mock(From.class); + LockOptions lockOptions = new LockOptions(LockMode.PESSIMISTIC_WRITE, from); + + assertThat(limit.getLock(lockOptions)).isEqualTo("FOR UPDATE"); + assertThat(limit.getClausePosition()).isEqualTo(LockClause.Position.AFTER_ORDER_BY); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/MySqlDialectRenderingUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/MySqlDialectRenderingUnitTests.java new file mode 100644 index 0000000000..2ad7733412 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/MySqlDialectRenderingUnitTests.java @@ -0,0 +1,126 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.render.NamingStrategies; +import org.springframework.data.relational.core.sql.render.SqlRenderer; + +/** + * Tests for {@link MySqlDialect}-specific rendering. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + */ +public class MySqlDialectRenderingUnitTests { + + private final RenderContextFactory factory = new RenderContextFactory(new MySqlDialect()); + + @BeforeEach + public void before() { + factory.setNamingStrategy(NamingStrategies.asIs()); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimit() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithOffset() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).offset(10).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10, 18446744073709551615"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimitOffset() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).offset(20).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 20, 10"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo FOR UPDATE"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LOCK IN SHARE MODE"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10 FOR UPDATE"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10 LOCK IN SHARE MODE"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/MySqlDialectUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/MySqlDialectUnitTests.java new file mode 100644 index 0000000000..d9112a4dde --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/MySqlDialectUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.LockOptions; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +/** + * Unit tests for {@link MySqlDialect}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + */ +public class MySqlDialectUnitTests { + + @Test // DATAJDBC-278 + public void shouldNotSupportArrays() { + + ArrayColumns arrayColumns = new MySqlDialect().getArraySupport(); + + assertThat(arrayColumns.isSupported()).isFalse(); + } + + @Test // DATAJDBC-278 + public void shouldRenderLimit() { + + LimitClause limit = new MySqlDialect().limit(); + + assertThat(limit.getClausePosition()).isEqualTo(LimitClause.Position.AFTER_ORDER_BY); + assertThat(limit.getLimit(10)).isEqualTo("LIMIT 10"); + } + + @Test // DATAJDBC-278 + public void shouldRenderOffset() { + + LimitClause limit = new MySqlDialect().limit(); + + assertThat(limit.getOffset(10)).isEqualTo("LIMIT 10, 18446744073709551615"); + } + + @Test // DATAJDBC-278 + public void shouldRenderLimitOffset() { + + LimitClause limit = new MySqlDialect().limit(); + + assertThat(limit.getLimitOffset(20, 10)).isEqualTo("LIMIT 10, 20"); + } + + @Test // DATAJDBC-386 + public void shouldQuoteIdentifiersUsingBackticks() { + + String abcQuoted = new MySqlDialect().getIdentifierProcessing().quote("abc"); + + assertThat(abcQuoted).isEqualTo("`abc`"); + } + + @Test // DATAJDBC-498 + public void shouldRenderLock() { + + LockClause lock = new MySqlDialect().lock(); + From from = mock(From.class); + + assertThat(lock.getLock(new LockOptions(LockMode.PESSIMISTIC_WRITE, from))).isEqualTo("FOR UPDATE"); + assertThat(lock.getLock(new LockOptions(LockMode.PESSIMISTIC_READ, from))).isEqualTo("LOCK IN SHARE MODE"); + assertThat(lock.getClausePosition()).isEqualTo(LockClause.Position.AFTER_ORDER_BY); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/PostgresDialectRenderingUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/PostgresDialectRenderingUnitTests.java new file mode 100644 index 0000000000..2a5bbe98ed --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/PostgresDialectRenderingUnitTests.java @@ -0,0 +1,212 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.data.domain.Sort; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.OrderByField; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.render.NamingStrategies; +import org.springframework.data.relational.core.sql.render.SqlRenderer; + +/** + * Tests for {@link PostgresDialect}-specific rendering. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +public class PostgresDialectRenderingUnitTests { + + private final RenderContextFactory factory = new RenderContextFactory(PostgresDialect.INSTANCE); + + @BeforeEach + public void before() throws Exception { + factory.setNamingStrategy(NamingStrategies.asIs()); + } + + @Test // DATAJDBC-278 + public void shouldRenderSimpleSelect() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo"); + } + + @Test // DATAJDBC-278 + public void shouldApplyNamingStrategy() { + + factory.setNamingStrategy(NamingStrategies.toUpper()); + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT FOO.* FROM FOO"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimit() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithOffset() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).offset(10).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo OFFSET 10"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimitOffset() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).offset(20).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10 OFFSET 20"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo FOR UPDATE OF foo"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo FOR SHARE OF foo"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10 FOR UPDATE OF foo"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo LIMIT 10 FOR SHARE OF foo"); + } + + @Test // GH-821 + void shouldRenderSelectOrderByWithNoOptions() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()) + .from(table) + .orderBy(OrderByField.from(Column.create("bar", table))) + .build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo ORDER BY foo.bar"); + } + + @Test // GH-821 + void shouldRenderSelectOrderByWithDirection() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()) + .from(table) + .orderBy(OrderByField.from(Column.create("bar", table), Sort.Direction.ASC)) + .build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo ORDER BY foo.bar ASC"); + } + + @Test // GH-821 + void shouldRenderSelectOrderByWithNullPrecedence() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()) + .from(table) + .orderBy(OrderByField.from(Column.create("bar", table)) + .withNullHandling(Sort.NullHandling.NULLS_FIRST)) + .build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo ORDER BY foo.bar NULLS FIRST"); + } + + @Test // GH-821 + void shouldRenderSelectOrderByWithDirectionAndNullHandling() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()) + .from(table) + .orderBy(OrderByField.from(Column.create("bar", table), Sort.Direction.DESC) + .withNullHandling(Sort.NullHandling.NULLS_FIRST)) + .build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo ORDER BY foo.bar DESC NULLS FIRST"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/PostgresDialectUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/PostgresDialectUnitTests.java new file mode 100644 index 0000000000..829eb1f677 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/PostgresDialectUnitTests.java @@ -0,0 +1,94 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.LockOptions; +import org.springframework.data.relational.core.sql.Table; + +import java.util.Collections; + +/** + * Unit tests for {@link PostgresDialect}. + * + * @author Mark Paluch + * @author Myeonghyeon Lee + */ +public class PostgresDialectUnitTests { + + @Test // DATAJDBC-278 + public void shouldSupportArrays() { + + ArrayColumns arrayColumns = PostgresDialect.INSTANCE.getArraySupport(); + + assertThat(arrayColumns.isSupported()).isTrue(); + } + + @Test // DATAJDBC-278 + public void shouldUseBoxedArrayTypesForPrimitiveTypes() { + + ArrayColumns arrayColumns = PostgresDialect.INSTANCE.getArraySupport(); + + assertSoftly(it -> { + it.assertThat(arrayColumns.getArrayType(int.class)).isEqualTo(Integer.class); + it.assertThat(arrayColumns.getArrayType(double.class)).isEqualTo(Double.class); + it.assertThat(arrayColumns.getArrayType(String.class)).isEqualTo(String.class); + }); + } + + @Test // DATAJDBC-278 + public void shouldRenderLimit() { + + LimitClause limit = PostgresDialect.INSTANCE.limit(); + + assertThat(limit.getClausePosition()).isEqualTo(LimitClause.Position.AFTER_ORDER_BY); + assertThat(limit.getLimit(10)).isEqualTo("LIMIT 10"); + } + + @Test // DATAJDBC-278 + public void shouldRenderOffset() { + + LimitClause limit = PostgresDialect.INSTANCE.limit(); + + assertThat(limit.getOffset(10)).isEqualTo("OFFSET 10"); + } + + @Test // DATAJDBC-278 + public void shouldRenderLimitOffset() { + + LimitClause limit = PostgresDialect.INSTANCE.limit(); + + assertThat(limit.getLimitOffset(20, 10)).isEqualTo("LIMIT 20 OFFSET 10"); + } + + @Test // DATAJDBC-498 + public void shouldRenderLock() { + + LockClause lock = PostgresDialect.INSTANCE.lock(); + From from = mock(From.class); + when(from.getTables()).thenReturn(Collections.singletonList(Table.create("dummy_table"))); + + assertThat(lock.getLock(new LockOptions(LockMode.PESSIMISTIC_WRITE, from))).isEqualTo("FOR UPDATE OF dummy_table"); + assertThat(lock.getLock(new LockOptions(LockMode.PESSIMISTIC_READ, from))).isEqualTo("FOR SHARE OF dummy_table"); + assertThat(lock.getClausePosition()).isEqualTo(LockClause.Position.AFTER_ORDER_BY); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/SqlServerDialectRenderingUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/SqlServerDialectRenderingUnitTests.java new file mode 100644 index 0000000000..484b133ef0 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/SqlServerDialectRenderingUnitTests.java @@ -0,0 +1,214 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.data.domain.Sort; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.OrderByField; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.render.NamingStrategies; +import org.springframework.data.relational.core.sql.render.SqlRenderer; + +/** + * Tests for {@link SqlServerDialect}-specific rendering. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Myeonghyeon Lee + * @author Chirag Tailor + */ +public class SqlServerDialectRenderingUnitTests { + + private final RenderContextFactory factory = new RenderContextFactory(SqlServerDialect.INSTANCE); + + @BeforeEach + public void before() { + factory.setNamingStrategy(NamingStrategies.asIs()); + } + + @Test // DATAJDBC-278 + public void shouldRenderSimpleSelect() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo"); + } + + @Test // DATAJDBC-278 + public void shouldApplyNamingStrategy() { + + factory.setNamingStrategy(NamingStrategies.toUpper()); + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT FOO.* FROM FOO"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimit() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.*, ROW_NUMBER() over (ORDER BY (SELECT 1)) AS __relational_row_number__ FROM foo ORDER BY __relational_row_number__ OFFSET 0 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithOffset() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).offset(10).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.*, ROW_NUMBER() over (ORDER BY (SELECT 1)) AS __relational_row_number__ FROM foo ORDER BY __relational_row_number__ OFFSET 10 ROWS"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimitOffset() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).offset(20).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.*, ROW_NUMBER() over (ORDER BY (SELECT 1)) AS __relational_row_number__ FROM foo ORDER BY __relational_row_number__ OFFSET 20 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-278 + public void shouldRenderSelectWithLimitOffsetAndOrderBy() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()).from(table).orderBy(table.column("column_1")).limit(10) + .offset(20).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo ORDER BY foo.column_1 OFFSET 20 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.* FROM foo WITH (UPDLOCK, ROWLOCK)"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.* FROM foo WITH (HOLDLOCK, ROWLOCK)"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitOffsetWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).offset(20).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.*, ROW_NUMBER() over (ORDER BY (SELECT 1)) AS __relational_row_number__ FROM foo WITH (UPDLOCK, ROWLOCK) ORDER BY __relational_row_number__ OFFSET 20 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitOffsetWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).limit(10).offset(20).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo( + "SELECT foo.*, ROW_NUMBER() over (ORDER BY (SELECT 1)) AS __relational_row_number__ FROM foo WITH (HOLDLOCK, ROWLOCK) ORDER BY __relational_row_number__ OFFSET 20 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitOffsetAndOrderByWithLockWrite() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + Select select = StatementBuilder.select(table.asterisk()).from(table).orderBy(table.column("column_1")).limit(10) + .offset(20).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo WITH (UPDLOCK, ROWLOCK) ORDER BY foo.column_1 OFFSET 20 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-498 + public void shouldRenderSelectWithLimitOffsetAndOrderByWithLockRead() { + + Table table = Table.create("foo"); + LockMode lockMode = LockMode.PESSIMISTIC_READ; + Select select = StatementBuilder.select(table.asterisk()).from(table).orderBy(table.column("column_1")).limit(10) + .offset(20).lock(lockMode).build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo WITH (HOLDLOCK, ROWLOCK) ORDER BY foo.column_1 OFFSET 20 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // GH-821 + void shouldRenderSelectOrderByIgnoringNullHandling() { + + Table table = Table.create("foo"); + Select select = StatementBuilder.select(table.asterisk()) + .from(table) + .orderBy(OrderByField.from(Column.create("bar", table)) + .withNullHandling(Sort.NullHandling.NULLS_FIRST)) + .build(); + + String sql = SqlRenderer.create(factory.createRenderContext()).render(select); + + assertThat(sql).isEqualTo("SELECT foo.* FROM foo ORDER BY foo.bar"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/SqlServerDialectUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/SqlServerDialectUnitTests.java new file mode 100644 index 0000000000..eb1766d15b --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/SqlServerDialectUnitTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.LockMode; +import org.springframework.data.relational.core.sql.LockOptions; + +/** + * Unit tests for {@link SqlServerDialect}. + * + * @author Mark Paluch + * @author Myeonghyeon Lee + */ +public class SqlServerDialectUnitTests { + + @Test // DATAJDBC-278 + public void shouldNotSupportArrays() { + + ArrayColumns arrayColumns = SqlServerDialect.INSTANCE.getArraySupport(); + + assertThat(arrayColumns.isSupported()).isFalse(); + assertThatThrownBy(() -> arrayColumns.getArrayType(String.class)).isInstanceOf(UnsupportedOperationException.class); + } + + @Test // DATAJDBC-278 + public void shouldRenderLimit() { + + LimitClause limit = SqlServerDialect.INSTANCE.limit(); + + assertThat(limit.getClausePosition()).isEqualTo(LimitClause.Position.AFTER_ORDER_BY); + assertThat(limit.getLimit(10)).isEqualTo("OFFSET 0 ROWS FETCH NEXT 10 ROWS ONLY"); + } + + @Test // DATAJDBC-278 + public void shouldRenderOffset() { + + LimitClause limit = SqlServerDialect.INSTANCE.limit(); + + assertThat(limit.getOffset(10)).isEqualTo("OFFSET 10 ROWS"); + } + + @Test // DATAJDBC-278 + public void shouldRenderLimitOffset() { + + LimitClause limit = SqlServerDialect.INSTANCE.limit(); + + assertThat(limit.getLimitOffset(20, 10)).isEqualTo("OFFSET 10 ROWS FETCH NEXT 20 ROWS ONLY"); + } + + @Test // DATAJDBC-498 + public void shouldRenderLock() { + + LockClause lock = SqlServerDialect.INSTANCE.lock(); + From from = mock(From.class); + + assertThat(lock.getLock(new LockOptions(LockMode.PESSIMISTIC_WRITE, from))).isEqualTo("WITH (UPDLOCK, ROWLOCK)"); + assertThat(lock.getLock(new LockOptions(LockMode.PESSIMISTIC_READ, from))).isEqualTo("WITH (HOLDLOCK, ROWLOCK)"); + assertThat(lock.getClausePosition()).isEqualTo(LockClause.Position.AFTER_FROM_TABLE); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/TimestampAtUtcToOffsetDateTimeConverterUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/TimestampAtUtcToOffsetDateTimeConverterUnitTests.java new file mode 100644 index 0000000000..dcd7802e59 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/dialect/TimestampAtUtcToOffsetDateTimeConverterUnitTests.java @@ -0,0 +1,42 @@ +package org.springframework.data.relational.core.dialect; + +import static org.assertj.core.api.Assertions.*; + +import java.sql.Timestamp; +import java.time.Instant; +import java.time.OffsetDateTime; + +import org.junit.jupiter.api.Test; + +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Tests {@link TimestampAtUtcToOffsetDateTimeConverter}. + * + * @author Jens Schauder + */ +class TimestampAtUtcToOffsetDateTimeConverterUnitTests { + + @Test + void conversionMaintainsInstant() { + + Timestamp timestamp = Timestamp.from(Instant.now()); + OffsetDateTime converted = TimestampAtUtcToOffsetDateTimeConverter.INSTANCE.convert(timestamp); + + assertThat(converted.toInstant()).isEqualTo(timestamp.toInstant()); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/BasicRelationalPersistentEntityUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/BasicRelationalPersistentEntityUnitTests.java new file mode 100644 index 0000000000..af7b19844c --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/BasicRelationalPersistentEntityUnitTests.java @@ -0,0 +1,258 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import java.util.Map; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.annotation.Id; +import org.springframework.data.relational.core.mapping.BasicRelationalPersistentEntityUnitTests.MyConfig; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.SqlIdentifier; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * Unit tests for {@link BasicRelationalPersistentEntity}. + * + * @author Oliver Gierke + * @author Kazuki Shimizu + * @author Bastian Wilhelm + * @author Mark Paluch + * @author Mikhail Polivakha + * @author Kurt Niemi + */ +@SpringJUnitConfig(classes = MyConfig.class) +class BasicRelationalPersistentEntityUnitTests { + + @Autowired ApplicationContext applicationContext; + private RelationalMappingContext mappingContext = new RelationalMappingContext(); + + @Test // DATAJDBC-106 + void discoversAnnotatedTableName() { + + RelationalPersistentEntity entity = mappingContext.getRequiredPersistentEntity(DummySubEntity.class); + + assertThat(entity.getTableName()).isEqualTo(quoted("dummy_sub_entity")); + assertThat(entity.getQualifiedTableName()).isEqualTo(quoted("dummy_sub_entity")); + assertThat(entity.getTableName()).isEqualTo(quoted("dummy_sub_entity")); + } + + @Test // DATAJDBC-294 + void considerIdColumnName() { + + RelationalPersistentEntity entity = mappingContext.getRequiredPersistentEntity(DummySubEntity.class); + + assertThat(entity.getIdColumn()).isEqualTo(quoted("renamedId")); + } + + @Test // DATAJDBC-296 + void emptyTableAnnotationFallsBackToNamingStrategy() { + + RelationalPersistentEntity entity = mappingContext + .getRequiredPersistentEntity(DummyEntityWithEmptyAnnotation.class); + + assertThat(entity.getTableName()).isEqualTo(quoted("DUMMY_ENTITY_WITH_EMPTY_ANNOTATION")); + assertThat(entity.getQualifiedTableName()).isEqualTo(quoted("DUMMY_ENTITY_WITH_EMPTY_ANNOTATION")); + assertThat(entity.getTableName()).isEqualTo(quoted("DUMMY_ENTITY_WITH_EMPTY_ANNOTATION")); + } + + @Test // DATAJDBC-491 + void namingStrategyWithSchemaReturnsCompositeTableName() { + + mappingContext = new RelationalMappingContext(NamingStrategyWithSchema.INSTANCE); + RelationalPersistentEntity entity = mappingContext + .getRequiredPersistentEntity(DummyEntityWithEmptyAnnotation.class); + + SqlIdentifier simpleExpected = quoted("DUMMY_ENTITY_WITH_EMPTY_ANNOTATION"); + SqlIdentifier fullExpected = SqlIdentifier.from(quoted("MY_SCHEMA"), simpleExpected); + + assertThat(entity.getQualifiedTableName()).isEqualTo(fullExpected); + assertThat(entity.getTableName()).isEqualTo(simpleExpected); + + assertThat(entity.getQualifiedTableName().toSql(IdentifierProcessing.ANSI)) + .isEqualTo("\"MY_SCHEMA\".\"DUMMY_ENTITY_WITH_EMPTY_ANNOTATION\""); + } + + @Test // GH-1099 + void testRelationalPersistentEntitySchemaNameChoice() { + + mappingContext = new RelationalMappingContext(NamingStrategyWithSchema.INSTANCE); + RelationalPersistentEntity entity = mappingContext.getRequiredPersistentEntity(EntityWithSchemaAndName.class); + + SqlIdentifier simpleExpected = quoted("I_AM_THE_SENATE"); + SqlIdentifier expected = SqlIdentifier.from(quoted("DART_VADER"), simpleExpected); + assertThat(entity.getQualifiedTableName()).isEqualTo(expected); + assertThat(entity.getTableName()).isEqualTo(simpleExpected); + } + + @Test // GH-1325 + void testRelationalPersistentEntitySpelExpression() { + + mappingContext = new RelationalMappingContext(NamingStrategyWithSchema.INSTANCE); + RelationalPersistentEntity entity = mappingContext + .getRequiredPersistentEntity(EntityWithSchemaAndTableSpelExpression.class); + + SqlIdentifier simpleExpected = quoted("USE_THE_FORCE"); + SqlIdentifier expected = SqlIdentifier.from(quoted("HELP_ME_OBI_WON"), simpleExpected); + assertThat(entity.getQualifiedTableName()).isEqualTo(expected); + assertThat(entity.getTableName()).isEqualTo(simpleExpected); + } + + @Test // GH-1325 + void testRelationalPersistentEntitySpelExpression_Sanitized() { + + mappingContext = new RelationalMappingContext(NamingStrategyWithSchema.INSTANCE); + RelationalPersistentEntity entity = mappingContext.getRequiredPersistentEntity(LittleBobbyTables.class); + + SqlIdentifier simpleExpected = quoted("RobertDROPTABLEstudents"); + SqlIdentifier expected = SqlIdentifier.from(quoted("RandomSQLToExecute"), simpleExpected); + assertThat(entity.getQualifiedTableName()).isEqualTo(expected); + assertThat(entity.getTableName()).isEqualTo(simpleExpected); + } + + @Test // GH-1325 + void testRelationalPersistentEntitySpelExpression_NonSpelExpression() { + + mappingContext = new RelationalMappingContext(NamingStrategyWithSchema.INSTANCE); + RelationalPersistentEntity entity = mappingContext.getRequiredPersistentEntity(EntityWithSchemaAndName.class); + + SqlIdentifier simpleExpected = quoted("I_AM_THE_SENATE"); + SqlIdentifier expected = SqlIdentifier.from(quoted("DART_VADER"), simpleExpected); + assertThat(entity.getQualifiedTableName()).isEqualTo(expected); + assertThat(entity.getTableName()).isEqualTo(simpleExpected); + } + + @Test // GH-1099 + void specifiedSchemaGetsCombinedWithNameFromNamingStrategy() { + + RelationalPersistentEntity entity = mappingContext.getRequiredPersistentEntity(EntityWithSchema.class); + + SqlIdentifier simpleExpected = quoted("ENTITY_WITH_SCHEMA"); + SqlIdentifier expected = SqlIdentifier.from(quoted("ANAKYN_SKYWALKER"), simpleExpected); + assertThat(entity.getQualifiedTableName()).isEqualTo(expected); + assertThat(entity.getTableName()).isEqualTo(simpleExpected); + } + + @Test // GH-1325 + void considersSpelExtensions() { + + mappingContext.setApplicationContext(applicationContext); + RelationalPersistentEntity entity = mappingContext + .getRequiredPersistentEntity(WithConfiguredSqlIdentifiers.class); + + assertThat(entity.getTableName()).isEqualTo(SqlIdentifier.quoted("my_table")); + assertThat(entity.getIdColumn()).isEqualTo(SqlIdentifier.quoted("my_column")); + } + + @Table(schema = "ANAKYN_SKYWALKER") + private static class EntityWithSchema { + @Id private Long id; + } + + @Table(schema = "DART_VADER", name = "I_AM_THE_SENATE") + private static class EntityWithSchemaAndName { + @Id private Long id; + } + + @Table( + schema = "#{T(org.springframework.data.relational.core.mapping." + + "BasicRelationalPersistentEntityUnitTests$EntityWithSchemaAndTableSpelExpression).desiredSchemaName}", + name = "#{T(org.springframework.data.relational.core.mapping." + + "BasicRelationalPersistentEntityUnitTests$EntityWithSchemaAndTableSpelExpression).desiredTableName}") + private static class EntityWithSchemaAndTableSpelExpression { + @Id private Long id; + public static String desiredTableName = "USE_THE_FORCE"; + public static String desiredSchemaName = "HELP_ME_OBI_WON"; + } + + @Table( + schema = "#{T(org.springframework.data.relational.core.mapping." + + "BasicRelationalPersistentEntityUnitTests$LittleBobbyTables).desiredSchemaName}", + name = "#{T(org.springframework.data.relational.core.mapping." + + "BasicRelationalPersistentEntityUnitTests$LittleBobbyTables).desiredTableName}") + private static class LittleBobbyTables { + @Id private Long id; + public static String desiredTableName = "Robert'); DROP TABLE students;--"; + public static String desiredSchemaName = "Random SQL To Execute;"; + } + + @Table("dummy_sub_entity") + static class DummySubEntity { + @Id + @Column("renamedId") Long id; + } + + @Table() + static class DummyEntityWithEmptyAnnotation { + @Id + @Column() Long id; + } + + enum NamingStrategyWithSchema implements NamingStrategy { + INSTANCE; + + @Override + public String getSchema() { + return "my_schema"; + } + } + + @Table("#{myExtension.getTableName()}") + static class WithConfiguredSqlIdentifiers { + @Id + @Column("#{myExtension.getColumnName()}") Long id; + } + + @Configuration + public static class MyConfig { + + @Bean + public MyExtension extension() { + return new MyExtension(); + } + + } + + public static class MyExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "my"; + } + + public String getTableName() { + return "my_table"; + } + + public String getColumnName() { + return "my_column"; + } + + @Override + public Map getProperties() { + return Map.of("myExtension", this); + } + + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/BasicRelationalPersistentPropertyUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/BasicRelationalPersistentPropertyUnitTests.java new file mode 100644 index 0000000000..86f3c53858 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/BasicRelationalPersistentPropertyUnitTests.java @@ -0,0 +1,547 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import junit.framework.AssertionFailedError; + +import java.time.LocalDateTime; +import java.time.ZonedDateTime; +import java.util.Date; +import java.util.List; +import java.util.function.BiConsumer; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.mapping.Embedded.OnEmpty; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Unit tests for the {@link BasicRelationalPersistentProperty}. + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Florian Lüdiger + * @author Bastian Wilhelm + * @author Kurt Niemi + * @author Mark Paluch + */ +class BasicRelationalPersistentPropertyUnitTests { + + private RelationalMappingContext context = new RelationalMappingContext(); + private RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + @Test // DATAJDBC-106 + void detectsAnnotatedColumnName() { + + assertThat(entity.getRequiredPersistentProperty("name").getColumnName()).isEqualTo(quoted("dummy_name")); + assertThat(entity.getRequiredPersistentProperty("localDateTime").getColumnName()) + .isEqualTo(quoted("dummy_last_updated_at")); + } + + @Test // DATAJDBC-218 + void detectsAnnotatedColumnAndKeyName() { + + RelationalPersistentProperty listProperty = entity.getRequiredPersistentProperty("someList"); + + PersistentPropertyPath path = context + .findPersistentPropertyPaths(DummyEntity.class, p -> p.getName().equals("someList")).getFirst() + .orElseThrow(() -> new AssertionFailedError("Couldn't find path for 'someList'")); + + assertThat(listProperty.getReverseColumnName(path.getLeafProperty().getOwner())) + .isEqualTo(quoted("dummy_column_name")); + assertThat(listProperty.getKeyColumn()).isEqualTo(quoted("dummy_key_column_name")); + } + + @Test // GH-1325 + void testRelationalPersistentEntitySpelExpressions() { + + assertThat(entity.getRequiredPersistentProperty("spelExpression1").getColumnName()) + .isEqualTo(quoted("THE_FORCE_IS_WITH_YOU")); + assertThat(entity.getRequiredPersistentProperty("littleBobbyTables").getColumnName()) + .isEqualTo(quoted("DROPALLTABLES")); + + // Test that sanitizer does affect non-spel expressions + assertThat(entity.getRequiredPersistentProperty("poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot") + .getColumnName()).isEqualTo(quoted("--; DROP ALL TABLES;--")); + } + + @Test // GH-1325 + void shouldEvaluateMappedCollectionExpressions() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(WithMappedCollection.class); + RelationalPersistentProperty property = entity.getRequiredPersistentProperty("someList"); + + assertThat(property.getKeyColumn()).isEqualTo(quoted("key_col")); + } + + @Test // DATAJDBC-111 + void detectsEmbeddedEntity() { + + final RelationalPersistentEntity requiredPersistentEntity = context + .getRequiredPersistentEntity(DummyEntity.class); + + SoftAssertions softly = new SoftAssertions(); + + BiConsumer checkEmbedded = (name, prefix) -> { + + RelationalPersistentProperty property = requiredPersistentEntity.getRequiredPersistentProperty(name); + + softly.assertThat(property.isEmbedded()) // + .describedAs(name + " is embedded") // + .isEqualTo(prefix != null); + + softly.assertThat(property.getEmbeddedPrefix()) // + .describedAs(name + " prefix") // + .isEqualTo(prefix); + }; + + checkEmbedded.accept("someList", null); + checkEmbedded.accept("id", null); + checkEmbedded.accept("embeddableEntity", ""); + checkEmbedded.accept("prefixedEmbeddableEntity", "prefix"); + + softly.assertAll(); + } + + @Test // DATAJDBC-259 + void classificationOfCollectionLikeProperties() { + + RelationalPersistentProperty listOfString = entity.getRequiredPersistentProperty("listOfString"); + RelationalPersistentProperty arrayOfString = entity.getRequiredPersistentProperty("arrayOfString"); + RelationalPersistentProperty listOfEntity = entity.getRequiredPersistentProperty("listOfEntity"); + RelationalPersistentProperty arrayOfEntity = entity.getRequiredPersistentProperty("arrayOfEntity"); + + SoftAssertions softly = new SoftAssertions(); + + softly.assertThat(listOfString.isCollectionLike() && !listOfString.isEntity()) + .describedAs("listOfString is a Collection of a simple type.").isEqualTo(true); + softly.assertThat(arrayOfString.isCollectionLike() && !arrayOfString.isEntity()) + .describedAs("arrayOfString is a Collection of a simple type.").isTrue(); + softly.assertThat(listOfEntity.isCollectionLike() && !listOfEntity.isEntity()) + .describedAs("listOfEntity is a Collection of a simple type.").isFalse(); + softly.assertThat(arrayOfEntity.isCollectionLike() && !arrayOfEntity.isEntity()) + .describedAs("arrayOfEntity is a Collection of a simple type.").isFalse(); + + BiConsumer checkEitherOr = (p, s) -> softly + .assertThat(p.isCollectionLike() && !p.isEntity()).describedAs(s + " contains either simple types or entities") + .isNotEqualTo(p.isCollectionLike() && p.isEntity()); + + checkEitherOr.accept(listOfString, "listOfString"); + checkEitherOr.accept(arrayOfString, "arrayOfString"); + checkEitherOr.accept(listOfEntity, "listOfEntity"); + checkEitherOr.accept(arrayOfEntity, "arrayOfEntity"); + + softly.assertAll(); + } + + @Test // GH-1923 + void entityWithNoSequence() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + assertThat(entity.getRequiredIdProperty().getSequence()).isNull(); + } + + @Test // GH-1923 + void determineSequenceName() { + + RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(EntityWithSequence.class); + + assertThat(persistentEntity.getRequiredIdProperty().getSequence()).isEqualTo(SqlIdentifier.quoted("my_seq")); + } + + @Test // GH-1923 + void determineSequenceNameFromValue() { + + RelationalPersistentEntity persistentEntity = context + .getRequiredPersistentEntity(EntityWithSequenceValueAlias.class); + + assertThat(persistentEntity.getRequiredIdProperty().getSequence()).isEqualTo(SqlIdentifier.quoted("my_seq")); + } + + @Test // GH-1923 + void determineSequenceNameWithSchemaSpecified() { + + RelationalPersistentEntity persistentEntity = context + .getRequiredPersistentEntity(EntityWithSequenceAndSchema.class); + + assertThat(persistentEntity.getRequiredIdProperty().getSequence()) + .isEqualTo(SqlIdentifier.from(SqlIdentifier.quoted("public"), SqlIdentifier.quoted("my_seq"))); + } + + @SuppressWarnings("unused") + static class DummyEntity { + + @Id private final Long id; + private final SomeEnum someEnum; + private final LocalDateTime localDateTime; + private final ZonedDateTime zonedDateTime; + + // DATAJDBC-259 + private final List listOfString; + private final String[] arrayOfString; + private final List listOfEntity; + private final OtherEntity[] arrayOfEntity; + + @MappedCollection(idColumn = "dummy_column_name", + keyColumn = "dummy_key_column_name") private List someList; + + // DATACMNS-106 + private @Column("dummy_name") String name; + + public static String spelExpression1Value = "THE_FORCE_IS_WITH_YOU"; + + public static String littleBobbyTablesValue = "--; DROP ALL TABLES;--"; + @Column(value = "#{T(org.springframework.data.relational.core.mapping." + + "BasicRelationalPersistentPropertyUnitTests$DummyEntity" + + ").spelExpression1Value}") private String spelExpression1; + + @Column(value = "#{T(org.springframework.data.relational.core.mapping." + + "BasicRelationalPersistentPropertyUnitTests$DummyEntity" + + ").littleBobbyTablesValue}") private String littleBobbyTables; + + @Column( + value = "--; DROP ALL TABLES;--") private String poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot; + + // DATAJDBC-111 + private @Embedded(onEmpty = OnEmpty.USE_NULL) EmbeddableEntity embeddableEntity; + + // DATAJDBC-111 + private @Embedded(onEmpty = OnEmpty.USE_NULL, prefix = "prefix") EmbeddableEntity prefixedEmbeddableEntity; + + public DummyEntity(Long id, SomeEnum someEnum, LocalDateTime localDateTime, ZonedDateTime zonedDateTime, + List listOfString, String[] arrayOfString, List listOfEntity, + OtherEntity[] arrayOfEntity) { + this.id = id; + this.someEnum = someEnum; + this.localDateTime = localDateTime; + this.zonedDateTime = zonedDateTime; + this.listOfString = listOfString; + this.arrayOfString = arrayOfString; + this.listOfEntity = listOfEntity; + this.arrayOfEntity = arrayOfEntity; + } + + @Column("dummy_last_updated_at") + public LocalDateTime getLocalDateTime() { + return localDateTime; + } + + public void setListSetter(Integer integer) { + + } + + public List getListGetter() { + return null; + } + + Long getId() { + return this.id; + } + + SomeEnum getSomeEnum() { + return this.someEnum; + } + + ZonedDateTime getZonedDateTime() { + return this.zonedDateTime; + } + + List getListOfString() { + return this.listOfString; + } + + String[] getArrayOfString() { + return this.arrayOfString; + } + + List getListOfEntity() { + return this.listOfEntity; + } + + OtherEntity[] getArrayOfEntity() { + return this.arrayOfEntity; + } + + List getSomeList() { + return this.someList; + } + + String getName() { + return this.name; + } + + String getSpelExpression1() { + return this.spelExpression1; + } + + String getLittleBobbyTables() { + return this.littleBobbyTables; + } + + String getPoorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot() { + return this.poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot; + } + + EmbeddableEntity getEmbeddableEntity() { + return this.embeddableEntity; + } + + EmbeddableEntity getPrefixedEmbeddableEntity() { + return this.prefixedEmbeddableEntity; + } + + public void setSomeList(List someList) { + this.someList = someList; + } + + public void setName(String name) { + this.name = name; + } + + public void setSpelExpression1(String spelExpression1) { + this.spelExpression1 = spelExpression1; + } + + public void setLittleBobbyTables(String littleBobbyTables) { + this.littleBobbyTables = littleBobbyTables; + } + + public void setPoorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot( + String poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot) { + this.poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot = poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot; + } + + public void setEmbeddableEntity(EmbeddableEntity embeddableEntity) { + this.embeddableEntity = embeddableEntity; + } + + public void setPrefixedEmbeddableEntity(EmbeddableEntity prefixedEmbeddableEntity) { + this.prefixedEmbeddableEntity = prefixedEmbeddableEntity; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof DummyEntity)) + return false; + final DummyEntity other = (DummyEntity) o; + if (!other.canEqual((Object) this)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) + return false; + final Object this$someEnum = this.getSomeEnum(); + final Object other$someEnum = other.getSomeEnum(); + if (this$someEnum == null ? other$someEnum != null : !this$someEnum.equals(other$someEnum)) + return false; + final Object this$localDateTime = this.getLocalDateTime(); + final Object other$localDateTime = other.getLocalDateTime(); + if (this$localDateTime == null ? other$localDateTime != null : !this$localDateTime.equals(other$localDateTime)) + return false; + final Object this$zonedDateTime = this.getZonedDateTime(); + final Object other$zonedDateTime = other.getZonedDateTime(); + if (this$zonedDateTime == null ? other$zonedDateTime != null : !this$zonedDateTime.equals(other$zonedDateTime)) + return false; + final Object this$listOfString = this.getListOfString(); + final Object other$listOfString = other.getListOfString(); + if (this$listOfString == null ? other$listOfString != null : !this$listOfString.equals(other$listOfString)) + return false; + if (!java.util.Arrays.deepEquals(this.getArrayOfString(), other.getArrayOfString())) + return false; + final Object this$listOfEntity = this.getListOfEntity(); + final Object other$listOfEntity = other.getListOfEntity(); + if (this$listOfEntity == null ? other$listOfEntity != null : !this$listOfEntity.equals(other$listOfEntity)) + return false; + if (!java.util.Arrays.deepEquals(this.getArrayOfEntity(), other.getArrayOfEntity())) + return false; + final Object this$someList = this.getSomeList(); + final Object other$someList = other.getSomeList(); + if (this$someList == null ? other$someList != null : !this$someList.equals(other$someList)) + return false; + final Object this$name = this.getName(); + final Object other$name = other.getName(); + if (this$name == null ? other$name != null : !this$name.equals(other$name)) + return false; + final Object this$spelExpression1 = this.getSpelExpression1(); + final Object other$spelExpression1 = other.getSpelExpression1(); + if (this$spelExpression1 == null ? other$spelExpression1 != null + : !this$spelExpression1.equals(other$spelExpression1)) + return false; + final Object this$littleBobbyTables = this.getLittleBobbyTables(); + final Object other$littleBobbyTables = other.getLittleBobbyTables(); + if (this$littleBobbyTables == null ? other$littleBobbyTables != null + : !this$littleBobbyTables.equals(other$littleBobbyTables)) + return false; + final Object this$poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot = this + .getPoorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot(); + final Object other$poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot = other + .getPoorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot(); + if (this$poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot == null + ? other$poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot != null + : !this$poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot + .equals(other$poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot)) + return false; + final Object this$embeddableEntity = this.getEmbeddableEntity(); + final Object other$embeddableEntity = other.getEmbeddableEntity(); + if (this$embeddableEntity == null ? other$embeddableEntity != null + : !this$embeddableEntity.equals(other$embeddableEntity)) + return false; + final Object this$prefixedEmbeddableEntity = this.getPrefixedEmbeddableEntity(); + final Object other$prefixedEmbeddableEntity = other.getPrefixedEmbeddableEntity(); + if (this$prefixedEmbeddableEntity == null ? other$prefixedEmbeddableEntity != null + : !this$prefixedEmbeddableEntity.equals(other$prefixedEmbeddableEntity)) + return false; + return true; + } + + boolean canEqual(final Object other) { + return other instanceof DummyEntity; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $someEnum = this.getSomeEnum(); + result = result * PRIME + ($someEnum == null ? 43 : $someEnum.hashCode()); + final Object $localDateTime = this.getLocalDateTime(); + result = result * PRIME + ($localDateTime == null ? 43 : $localDateTime.hashCode()); + final Object $zonedDateTime = this.getZonedDateTime(); + result = result * PRIME + ($zonedDateTime == null ? 43 : $zonedDateTime.hashCode()); + final Object $listOfString = this.getListOfString(); + result = result * PRIME + ($listOfString == null ? 43 : $listOfString.hashCode()); + result = result * PRIME + java.util.Arrays.deepHashCode(this.getArrayOfString()); + final Object $listOfEntity = this.getListOfEntity(); + result = result * PRIME + ($listOfEntity == null ? 43 : $listOfEntity.hashCode()); + result = result * PRIME + java.util.Arrays.deepHashCode(this.getArrayOfEntity()); + final Object $someList = this.getSomeList(); + result = result * PRIME + ($someList == null ? 43 : $someList.hashCode()); + final Object $name = this.getName(); + result = result * PRIME + ($name == null ? 43 : $name.hashCode()); + final Object $spelExpression1 = this.getSpelExpression1(); + result = result * PRIME + ($spelExpression1 == null ? 43 : $spelExpression1.hashCode()); + final Object $littleBobbyTables = this.getLittleBobbyTables(); + result = result * PRIME + ($littleBobbyTables == null ? 43 : $littleBobbyTables.hashCode()); + final Object $poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot = this + .getPoorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot(); + result = result * PRIME + ($poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot == null ? 43 + : $poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot.hashCode()); + final Object $embeddableEntity = this.getEmbeddableEntity(); + result = result * PRIME + ($embeddableEntity == null ? 43 : $embeddableEntity.hashCode()); + final Object $prefixedEmbeddableEntity = this.getPrefixedEmbeddableEntity(); + result = result * PRIME + ($prefixedEmbeddableEntity == null ? 43 : $prefixedEmbeddableEntity.hashCode()); + return result; + } + + public String toString() { + return "BasicRelationalPersistentPropertyUnitTests.DummyEntity(id=" + this.getId() + ", someEnum=" + + this.getSomeEnum() + ", localDateTime=" + this.getLocalDateTime() + ", zonedDateTime=" + + this.getZonedDateTime() + ", listOfString=" + this.getListOfString() + ", arrayOfString=" + + java.util.Arrays.deepToString(this.getArrayOfString()) + ", listOfEntity=" + this.getListOfEntity() + + ", arrayOfEntity=" + java.util.Arrays.deepToString(this.getArrayOfEntity()) + ", someList=" + + this.getSomeList() + ", name=" + this.getName() + ", spelExpression1=" + this.getSpelExpression1() + + ", littleBobbyTables=" + this.getLittleBobbyTables() + + ", poorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot=" + + this.getPoorDeveloperProgrammaticallyAskingToShootThemselvesInTheFoot() + ", embeddableEntity=" + + this.getEmbeddableEntity() + ", prefixedEmbeddableEntity=" + this.getPrefixedEmbeddableEntity() + ")"; + } + } + + private static class WithMappedCollection { + + @MappedCollection(idColumn = "#{'id_col'}", keyColumn = "#{'key_col'}") private List someList; + } + + @SuppressWarnings("unused") + private enum SomeEnum { + ALPHA + } + + // DATAJDBC-111 + private static class EmbeddableEntity { + private final String embeddedTest; + + public EmbeddableEntity(String embeddedTest) { + this.embeddedTest = embeddedTest; + } + + String getEmbeddedTest() { + return this.embeddedTest; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof EmbeddableEntity)) + return false; + final EmbeddableEntity other = (EmbeddableEntity) o; + if (!other.canEqual((Object) this)) + return false; + final Object this$embeddedTest = this.getEmbeddedTest(); + final Object other$embeddedTest = other.getEmbeddedTest(); + if (this$embeddedTest == null ? other$embeddedTest != null : !this$embeddedTest.equals(other$embeddedTest)) + return false; + return true; + } + + boolean canEqual(final Object other) { + return other instanceof EmbeddableEntity; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $embeddedTest = this.getEmbeddedTest(); + result = result * PRIME + ($embeddedTest == null ? 43 : $embeddedTest.hashCode()); + return result; + } + + public String toString() { + return "BasicRelationalPersistentPropertyUnitTests.EmbeddableEntity(embeddedTest=" + this.getEmbeddedTest() + ")"; + } + } + + @SuppressWarnings("unused") + private static class OtherEntity {} + + @Table("entity_with_sequence") + static class EntityWithSequence { + @Id + @Sequence(sequence = "my_seq") Long id; + } + + @Table("entity_with_sequence_value_alias") + static class EntityWithSequenceValueAlias { + @Id + @Column("myId") + @Sequence(value = "my_seq") Long id; + } + + @Table("entity_with_sequence_and_schema") + static class EntityWithSequenceAndSchema { + @Id + @Column("myId") + @Sequence(sequence = "my_seq", schema = "public") Long id; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DefaultAggregatePathUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DefaultAggregatePathUnitTests.java new file mode 100644 index 0000000000..c173d0294f --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DefaultAggregatePathUnitTests.java @@ -0,0 +1,523 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.mapping; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Tests for {@link AggregatePath}. + * + * @author Jens Schauder + * @author Mark Paluch + */ +class DefaultAggregatePathUnitTests { + RelationalMappingContext context = new RelationalMappingContext(); + + private RelationalPersistentEntity entity = context.getRequiredPersistentEntity(DummyEntity.class); + + @Test // GH-1525 + void isNotRootForNonRootPath() { + + AggregatePath path = context.getAggregatePath(context.getPersistentPropertyPath("entityId", DummyEntity.class)); + + assertThat(path.isRoot()).isFalse(); + } + + @Test // GH-1525 + void isRootForRootPath() { + + AggregatePath path = context.getAggregatePath(entity); + + assertThat(path.isRoot()).isTrue(); + } + + @Test // GH-1525 + void getParentPath() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2.value").getParentPath()).isEqualTo(path("second.third2")); + softly.assertThat(path("second.third2").getParentPath()).isEqualTo(path("second")); + softly.assertThat(path("second").getParentPath()).isEqualTo(path()); + + softly.assertThatThrownBy(() -> path().getParentPath()).isInstanceOf(IllegalStateException.class); + }); + } + + @Test // GH-1525 + void getRequiredLeafEntity() { + + assertSoftly(softly -> { + + softly.assertThat(path().getRequiredLeafEntity()).isEqualTo(entity); + softly.assertThat(path("second").getRequiredLeafEntity()) + .isEqualTo(context.getRequiredPersistentEntity(Second.class)); + softly.assertThat(path("second.third").getRequiredLeafEntity()) + .isEqualTo(context.getRequiredPersistentEntity(Third.class)); + softly.assertThat(path("secondList").getRequiredLeafEntity()) + .isEqualTo(context.getRequiredPersistentEntity(Second.class)); + + softly.assertThatThrownBy(() -> path("secondList.third.value").getRequiredLeafEntity()) + .isInstanceOf(IllegalStateException.class); + + }); + } + + @Test // GH-1525 + void idDefiningPath() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2.value").getIdDefiningParentPath()).isEqualTo(path()); + softly.assertThat(path("second.third.value").getIdDefiningParentPath()).isEqualTo(path()); + softly.assertThat(path("secondList.third2.value").getIdDefiningParentPath()).isEqualTo(path()); + softly.assertThat(path("secondList.third.value").getIdDefiningParentPath()).isEqualTo(path()); + softly.assertThat(path("second2.third2.value").getIdDefiningParentPath()).isEqualTo(path()); + softly.assertThat(path("second2.third.value").getIdDefiningParentPath()).isEqualTo(path()); + softly.assertThat(path("withId.second.third2.value").getIdDefiningParentPath()).isEqualTo(path("withId")); + softly.assertThat(path("withId.second.third.value").getIdDefiningParentPath()).isEqualTo(path("withId")); + }); + } + + @Test // GH-1525 + void getRequiredIdProperty() { + + assertSoftly(softly -> { + + softly.assertThat(path().getRequiredIdProperty().getName()).isEqualTo("entityId"); + softly.assertThat(path("withId").getRequiredIdProperty().getName()).isEqualTo("withIdId"); + softly.assertThatThrownBy(() -> path("second").getRequiredIdProperty()).isInstanceOf(IllegalStateException.class); + }); + } + + @Test // GH-1525 + void reverseColumnName() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("second.third").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("secondList.third2").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("secondList.third").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("second2.third").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("withId.second.third2.value").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("WITH_ID")); + softly.assertThat(path("withId.second.third").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("WITH_ID")); + softly.assertThat(path("withId.second2.third").getTableInfo().reverseColumnInfo().name()) + .isEqualTo(quoted("WITH_ID")); + }); + } + + @Test // GH-1525 + void getQualifierColumn() { + + assertSoftly(softly -> { + + softly.assertThat(path().getTableInfo().qualifierColumnInfo()).isEqualTo(null); + softly.assertThat(path("second.third").getTableInfo().qualifierColumnInfo()).isEqualTo(null); + softly.assertThat(path("secondList.third2").getTableInfo().qualifierColumnInfo()).isEqualTo(null); + softly.assertThat(path("secondList").getTableInfo().qualifierColumnInfo().name()) + .isEqualTo(SqlIdentifier.quoted("DUMMY_ENTITY_KEY")); + + }); + } + + @Test // GH-1525 + void getQualifierColumnType() { + + assertSoftly(softly -> { + + softly.assertThat(path().getTableInfo().qualifierColumnType()).isEqualTo(null); + softly.assertThat(path("second.third").getTableInfo().qualifierColumnType()).isEqualTo(null); + softly.assertThat(path("secondList.third2").getTableInfo().qualifierColumnType()).isEqualTo(null); + softly.assertThat(path("secondList").getTableInfo().qualifierColumnType()).isEqualTo(Integer.class); + + }); + } + + @Test // GH-1525 + void extendBy() { + + assertSoftly(softly -> { + + softly.assertThat(path().append(entity.getRequiredPersistentProperty("withId"))).isEqualTo(path("withId")); + softly.assertThat(path("withId").append(path("withId").getRequiredIdProperty())) + .isEqualTo(path("withId.withIdId")); + }); + } + + @Test // GH-1525 + void isWritable() { + + assertSoftly(softly -> { + softly.assertThat(context.getAggregatePath(createSimplePath("withId")).isWritable()) + .describedAs("simple path is writable").isTrue(); + softly.assertThat(context.getAggregatePath(createSimplePath("secondList.third2")).isWritable()) + .describedAs("long path is writable").isTrue(); + softly.assertThat(context.getAggregatePath(createSimplePath("second")).isWritable()) + .describedAs("simple read only path is not writable").isFalse(); + softly.assertThat(context.getAggregatePath(createSimplePath("second.third")).isWritable()) + .describedAs("long path containing read only element is not writable").isFalse(); + }); + } + + @Test // GH-1525 + void isEmbedded() { + + assertSoftly(softly -> { + softly.assertThat(path().isEmbedded()).isFalse(); + softly.assertThat(path("withId").isEmbedded()).isFalse(); + softly.assertThat(path("second2.third").isEmbedded()).isFalse(); + softly.assertThat(path("second2.third2").isEmbedded()).isTrue(); + softly.assertThat(path("second2").isEmbedded()).isTrue(); + }); + } + + @Test // GH-1525 + void isEntity() { + + assertSoftly(softly -> { + + softly.assertThat(path().isEntity()).isTrue(); + softly.assertThat(path("second").isEntity()).isTrue(); + softly.assertThat(path("second.third2").isEntity()).isTrue(); + softly.assertThat(path("secondList.third2").isEntity()).isTrue(); + softly.assertThat(path("secondList").isEntity()).isTrue(); + softly.assertThat(path("second.third2.value").isEntity()).isFalse(); + softly.assertThat(path("secondList.third2.value").isEntity()).isFalse(); + }); + } + + @Test // GH-1525 + void isMultiValued() { + + assertSoftly(softly -> { + + softly.assertThat(path().isMultiValued()).isFalse(); + softly.assertThat(path("second").isMultiValued()).isFalse(); + softly.assertThat(path("second.third2").isMultiValued()).isFalse(); + softly.assertThat(path("secondList.third2").isMultiValued()).isTrue(); // this seems wrong as third2 is an + // embedded path into Second, held by + // List (so the parent is + // multi-valued but not third2). + // TODO: This test fails because MultiValued considers parents. + // softly.assertThat(path("secondList.third.value").isMultiValued()).isFalse(); + softly.assertThat(path("secondList").isMultiValued()).isTrue(); + }); + } + + @Test // GH-1525 + void isQualified() { + + assertSoftly(softly -> { + + softly.assertThat(path().isQualified()).isFalse(); + softly.assertThat(path("second").isQualified()).isFalse(); + softly.assertThat(path("second.third2").isQualified()).isFalse(); + softly.assertThat(path("secondList.third2").isQualified()).isFalse(); + softly.assertThat(path("secondList").isQualified()).isTrue(); + }); + } + + @Test // GH-1525 + void isMap() { + + assertSoftly(softly -> { + + softly.assertThat(path().isMap()).isFalse(); + softly.assertThat(path("second").isMap()).isFalse(); + softly.assertThat(path("second.third2").isMap()).isFalse(); + softly.assertThat(path("secondList.third2").isMap()).isFalse(); + softly.assertThat(path("secondList").isMap()).isFalse(); + softly.assertThat(path("secondMap.third2").isMap()).isFalse(); + softly.assertThat(path("secondMap").isMap()).isTrue(); + }); + } + + @Test // GH-1525 + void isCollectionLike() { + + assertSoftly(softly -> { + + softly.assertThat(path().isCollectionLike()).isFalse(); + softly.assertThat(path("second").isCollectionLike()).isFalse(); + softly.assertThat(path("second.third2").isCollectionLike()).isFalse(); + softly.assertThat(path("secondList.third2").isCollectionLike()).isFalse(); + softly.assertThat(path("secondMap.third2").isCollectionLike()).isFalse(); + softly.assertThat(path("secondMap").isCollectionLike()).isFalse(); + softly.assertThat(path("secondList").isCollectionLike()).isTrue(); + }); + } + + @Test // GH-1525 + void isOrdered() { + + assertSoftly(softly -> { + + softly.assertThat(path().isOrdered()).isFalse(); + softly.assertThat(path("second").isOrdered()).isFalse(); + softly.assertThat(path("second.third2").isOrdered()).isFalse(); + softly.assertThat(path("secondList.third2").isOrdered()).isFalse(); + softly.assertThat(path("secondMap.third2").isOrdered()).isFalse(); + softly.assertThat(path("secondMap").isOrdered()).isFalse(); + softly.assertThat(path("secondList").isOrdered()).isTrue(); + }); + } + + @Test // GH-1525 + void getTableAlias() { + + assertSoftly(softly -> { + + softly.assertThat(path().getTableInfo().tableAlias()).isEqualTo(null); + softly.assertThat(path("second").getTableInfo().tableAlias()).isEqualTo(quoted("second")); + softly.assertThat(path("second.third2").getTableInfo().tableAlias()).isEqualTo(quoted("second")); + softly.assertThat(path("second.third2.value").getTableInfo().tableAlias()).isEqualTo(quoted("second")); + softly.assertThat(path("second.third").getTableInfo().tableAlias()).isEqualTo(quoted("second_third")); // missing + // _ + softly.assertThat(path("second.third.value").getTableInfo().tableAlias()).isEqualTo(quoted("second_third")); // missing + // _ + softly.assertThat(path("secondList.third2").getTableInfo().tableAlias()).isEqualTo(quoted("secondList")); + softly.assertThat(path("secondList.third2.value").getTableInfo().tableAlias()).isEqualTo(quoted("secondList")); + softly.assertThat(path("secondList.third").getTableInfo().tableAlias()).isEqualTo(quoted("secondList_third")); // missing + // _ + softly.assertThat(path("secondList.third.value").getTableInfo().tableAlias()) + .isEqualTo(quoted("secondList_third")); // missing _ + softly.assertThat(path("secondList").getTableInfo().tableAlias()).isEqualTo(quoted("secondList")); + softly.assertThat(path("second2.third").getTableInfo().tableAlias()).isEqualTo(quoted("secthird")); + softly.assertThat(path("second3.third").getTableInfo().tableAlias()).isEqualTo(quoted("third")); + }); + } + + @Test // GH-1525 + void getTableName() { + + assertSoftly(softly -> { + + softly.assertThat(path().getTableInfo().qualifiedTableName()).isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("second").getTableInfo().qualifiedTableName()).isEqualTo(quoted("SECOND")); + softly.assertThat(path("second.third2").getTableInfo().qualifiedTableName()).isEqualTo(quoted("SECOND")); + softly.assertThat(path("second.third2.value").getTableInfo().qualifiedTableName()).isEqualTo(quoted("SECOND")); + softly.assertThat(path("secondList.third2").getTableInfo().qualifiedTableName()).isEqualTo(quoted("SECOND")); + softly.assertThat(path("secondList.third2.value").getTableInfo().qualifiedTableName()) + .isEqualTo(quoted("SECOND")); + softly.assertThat(path("secondList").getTableInfo().qualifiedTableName()).isEqualTo(quoted("SECOND")); + }); + } + + @Test // GH-1525 + void getColumnName() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2.value").getColumnInfo().name()).isEqualTo(quoted("THRDVALUE")); + softly.assertThat(path("second.third.value").getColumnInfo().name()).isEqualTo(quoted("VALUE")); + softly.assertThat(path("secondList.third2.value").getColumnInfo().name()).isEqualTo(quoted("THRDVALUE")); + softly.assertThat(path("secondList.third.value").getColumnInfo().name()).isEqualTo(quoted("VALUE")); + softly.assertThat(path("second2.third2.value").getColumnInfo().name()).isEqualTo(quoted("SECTHRDVALUE")); + softly.assertThat(path("second2.third.value").getColumnInfo().name()).isEqualTo(quoted("VALUE")); + }); + } + + @Test // GH-1525 + void getColumnAlias() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2.value").getColumnInfo().alias()).isEqualTo(quoted("SECOND_THRDVALUE")); + softly.assertThat(path("second.third.value").getColumnInfo().alias()).isEqualTo(quoted("SECOND_THIRD_VALUE")); + softly.assertThat(path("secondList.third2.value").getColumnInfo().alias()) + .isEqualTo(quoted("SECONDLIST_THRDVALUE")); + softly.assertThat(path("secondList.third.value").getColumnInfo().alias()) + .isEqualTo(quoted("SECONDLIST_THIRD_VALUE")); + softly.assertThat(path("second2.third2.value").getColumnInfo().alias()).isEqualTo(quoted("SECTHRDVALUE")); + softly.assertThat(path("second2.third.value").getColumnInfo().alias()).isEqualTo(quoted("SECTHIRD_VALUE")); + }); + } + + @Test // GH-1525 + void getReverseColumnAlias() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2.value").getTableInfo().reverseColumnInfo().alias()) + .isEqualTo(quoted("SECOND_DUMMY_ENTITY")); + softly.assertThat(path("second.third.value").getTableInfo().reverseColumnInfo().alias()) + .isEqualTo(quoted("SECOND_THIRD_DUMMY_ENTITY")); + softly.assertThat(path("secondList.third2.value").getTableInfo().reverseColumnInfo().alias()) + .isEqualTo(quoted("SECONDLIST_DUMMY_ENTITY")); + softly.assertThat(path("secondList.third.value").getTableInfo().reverseColumnInfo().alias()) + .isEqualTo(quoted("SECONDLIST_THIRD_DUMMY_ENTITY")); + softly.assertThat(path("second2.third.value").getTableInfo().reverseColumnInfo().alias()) + .isEqualTo(quoted("SECTHIRD_DUMMY_ENTITY")); + }); + } + + @Test // GH-1525 + void getRequiredLeafProperty() { + + assertSoftly(softly -> { + + RelationalPersistentProperty prop = path("second.third2.value").getRequiredLeafProperty(); + softly.assertThat(prop.getName()).isEqualTo("value"); + softly.assertThat(prop.getOwner().getType()).isEqualTo(Third.class); + softly.assertThat(path("second.third").getRequiredLeafProperty()) + .isEqualTo(context.getRequiredPersistentEntity(Second.class).getPersistentProperty("third")); + softly.assertThat(path("secondList").getRequiredLeafProperty()) + .isEqualTo(entity.getPersistentProperty("secondList")); + softly.assertThatThrownBy(() -> path().getRequiredLeafProperty()).isInstanceOf(IllegalStateException.class); + }); + } + + @Test // GH-1525 + void getBaseProperty() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third2.value").getRequiredBaseProperty()) + .isEqualTo(entity.getPersistentProperty("second")); + softly.assertThat(path("second.third.value").getRequiredBaseProperty()) + .isEqualTo(entity.getPersistentProperty("second")); + softly.assertThat(path("secondList.third2.value").getRequiredBaseProperty()) + .isEqualTo(entity.getPersistentProperty("secondList")); + softly.assertThatThrownBy(() -> path().getRequiredBaseProperty()).isInstanceOf(IllegalStateException.class); + }); + } + + @Test // GH-1525 + void getIdColumnName() { + + assertSoftly(softly -> { + + softly.assertThat(path().getTableInfo().idColumnName()).isEqualTo(quoted("ENTITY_ID")); + softly.assertThat(path("withId").getTableInfo().idColumnName()).isEqualTo(quoted("WITH_ID_ID")); + + softly.assertThat(path("second").getTableInfo().idColumnName()).isNull(); + softly.assertThat(path("second.third2").getTableInfo().idColumnName()).isNull(); + softly.assertThat(path("withId.second").getTableInfo().idColumnName()).isNull(); + }); + } + + @Test // GH-1525 + void toDotPath() { + + assertSoftly(softly -> { + + softly.assertThat(path().toDotPath()).isEqualTo(""); + softly.assertThat(path("second.third.value").toDotPath()).isEqualTo("second.third.value"); + }); + } + + @Test // GH-1525 + void getRequiredPersistentPropertyPath() { + + assertSoftly(softly -> { + + softly.assertThat(path("second.third.value").getRequiredPersistentPropertyPath()) + .isEqualTo(createSimplePath("second.third.value")); + softly.assertThatThrownBy(() -> path().getRequiredPersistentPropertyPath()) + .isInstanceOf(IllegalStateException.class); + }); + } + + @Test // GH-1525 + void getEffectiveIdColumnName() { + + assertSoftly(softly -> { + + softly.assertThat(path().getTableInfo().effectiveIdColumnName()).isEqualTo(quoted("ENTITY_ID")); + softly.assertThat(path("second.third2").getTableInfo().effectiveIdColumnName()).isEqualTo(quoted("DUMMY_ENTITY")); + softly.assertThat(path("withId.second.third").getTableInfo().effectiveIdColumnName()) + .isEqualTo(quoted("WITH_ID")); + softly.assertThat(path("withId.second.third2.value").getTableInfo().effectiveIdColumnName()) + .isEqualTo(quoted("WITH_ID")); + }); + } + + @Test // GH-1525 + void getLength() { + + assertThat(path().getLength()).isEqualTo(1); + assertThat(path().stream().collect(Collectors.toList())).hasSize(1); + + assertThat(path("second.third2").getLength()).isEqualTo(3); + assertThat(path("second.third2").stream().collect(Collectors.toList())).hasSize(3); + + assertThat(path("withId.second.third").getLength()).isEqualTo(4); + assertThat(path("withId.second.third2.value").getLength()).isEqualTo(5); + } + + private AggregatePath path() { + return context.getAggregatePath(entity); + } + + private AggregatePath path(String path) { + return context.getAggregatePath(createSimplePath(path)); + } + + PersistentPropertyPath createSimplePath(String path) { + return PersistentPropertyPathTestUtils.getPath(context, path, DummyEntity.class); + } + + @SuppressWarnings("unused") + static class DummyEntity { + @Id Long entityId; + @ReadOnlyProperty Second second; + @Embedded(onEmpty = Embedded.OnEmpty.USE_NULL, prefix = "sec") Second second2; + @Embedded(onEmpty = Embedded.OnEmpty.USE_NULL) Second second3; + List secondList; + Map secondMap; + WithId withId; + } + + @SuppressWarnings("unused") + static class Second { + Third third; + @Embedded(onEmpty = Embedded.OnEmpty.USE_NULL, prefix = "thrd") Third third2; + } + + @SuppressWarnings("unused") + static class Third { + String value; + } + + @SuppressWarnings("unused") + static class WithId { + @Id Long withIdId; + Second second; + @Embedded(onEmpty = Embedded.OnEmpty.USE_NULL, prefix = "sec") Second second2; + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DefaultNamingStrategyUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DefaultNamingStrategyUnitTests.java new file mode 100644 index 0000000000..417c1ace49 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DefaultNamingStrategyUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping; + +import static org.assertj.core.api.Assertions.*; + +import java.time.LocalDateTime; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.relational.core.mapping.BasicRelationalPersistentEntityUnitTests.DummySubEntity; + +/** + * Unit tests for the {@link NamingStrategy}. + * + * @author Kazuki Shimizu + * @author Oliver Gierke + * @author Jens Schauder + */ +public class DefaultNamingStrategyUnitTests { + + private final NamingStrategy target = DefaultNamingStrategy.INSTANCE; + private final RelationalMappingContext context = new RelationalMappingContext(target); + private final RelationalPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); + + @Test + public void getTableName() { + + assertThat(target.getTableName(persistentEntity.getType())).isEqualTo("dummy_entity"); + assertThat(target.getTableName(DummySubEntity.class)).isEqualTo("dummy_sub_entity"); + } + + @Test + public void getColumnName() { + + assertThat(target.getColumnName(persistentEntity.getPersistentProperty("id"))).isEqualTo("id"); + assertThat(target.getColumnName(persistentEntity.getPersistentProperty("createdAt"))).isEqualTo("created_at"); + assertThat(target.getColumnName(persistentEntity.getPersistentProperty("dummySubEntities"))) + .isEqualTo("dummy_sub_entities"); + } + + @Test + public void getReverseColumnName() { + + assertThat(target.getReverseColumnName(persistentEntity.getPersistentProperty("dummySubEntities"))) + .isEqualTo("dummy_entity"); + } + + @Test + public void getKeyColumn() { + + assertThat(target.getKeyColumn(persistentEntity.getPersistentProperty("dummySubEntities"))) + .isEqualTo("dummy_entity_key"); + } + + @Test + public void getSchema() { + assertThat(target.getSchema()).isEqualTo(""); + } + + static class DummyEntity { + + @Id int id; + LocalDateTime createdAt, lastUpdatedAt; + List dummySubEntities; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DerivedSqlIdentifierUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DerivedSqlIdentifierUnitTests.java new file mode 100644 index 0000000000..6148f8ac85 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/DerivedSqlIdentifierUnitTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.IdentifierProcessing; +import org.springframework.data.relational.core.sql.IdentifierProcessing.LetterCasing; +import org.springframework.data.relational.core.sql.IdentifierProcessing.Quoting; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Unit tests for {@link DerivedSqlIdentifier}. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Kurt Niemi + */ +public class DerivedSqlIdentifierUnitTests { + + public static final IdentifierProcessing BRACKETS_LOWER_CASE = IdentifierProcessing.create(new Quoting("[", "]"), + LetterCasing.LOWER_CASE); + + @Test // DATAJDBC-386 + public void quotedSimpleObjectIdentifierWithAdjustableLetterCasing() { + + SqlIdentifier identifier = new DerivedSqlIdentifier("someName", true); + + assertThat(identifier.toSql(BRACKETS_LOWER_CASE)).isEqualTo("[somename]"); + assertThat(identifier.getReference()).isEqualTo("someName"); + } + + @Test // DATAJDBC-386 + public void unquotedSimpleObjectIdentifierWithAdjustableLetterCasing() { + + SqlIdentifier identifier = new DerivedSqlIdentifier("someName", false); + String sql = identifier.toSql(BRACKETS_LOWER_CASE); + + assertThat(sql).isEqualTo("somename"); + assertThat(identifier.getReference()).isEqualTo("someName"); + } + + @Test // DATAJDBC-386 + public void quotedMultipartObjectIdentifierWithAdjustableLetterCase() { + + SqlIdentifier identifier = SqlIdentifier.from(new DerivedSqlIdentifier("some", true), + new DerivedSqlIdentifier("name", true)); + String sql = identifier.toSql(IdentifierProcessing.ANSI); + + assertThat(sql).isEqualTo("\"SOME\".\"NAME\""); + } + + @Test // DATAJDBC-386 + public void equality() { + + SqlIdentifier basis = new DerivedSqlIdentifier("simple", false); + SqlIdentifier equal = new DerivedSqlIdentifier("simple", false); + SqlIdentifier quoted = new DerivedSqlIdentifier("simple", true); + SqlIdentifier notSimple = SqlIdentifier.from(new DerivedSqlIdentifier("simple", false), + new DerivedSqlIdentifier("not", false)); + + assertThat(basis).isEqualTo(equal).isEqualTo(SqlIdentifier.unquoted("simple")) + .hasSameHashCodeAs(SqlIdentifier.unquoted("simple")); + assertThat(equal).isEqualTo(basis); + assertThat(basis).isNotEqualTo(quoted); + assertThat(basis).isNotEqualTo(notSimple); + + assertThat(quoted).isEqualTo(SqlIdentifier.quoted("SIMPLE")).hasSameHashCodeAs(SqlIdentifier.quoted("SIMPLE")); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/EmbeddedRelationalPersistentPropertyUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/EmbeddedRelationalPersistentPropertyUnitTests.java new file mode 100644 index 0000000000..f9f4f39a5d --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/EmbeddedRelationalPersistentPropertyUnitTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.mapping; + +import static org.mockito.Mockito.*; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; + +class EmbeddedRelationalPersistentPropertyUnitTests { + + @Test // GH-1694 + void testEquals() { + + RelationalPersistentProperty delegate = mock(RelationalPersistentProperty.class); + EmbeddedRelationalPersistentProperty embeddedProperty = new EmbeddedRelationalPersistentProperty(delegate, mock(EmbeddedContext.class)); + + RelationalPersistentProperty otherDelegate = mock(RelationalPersistentProperty.class); + EmbeddedRelationalPersistentProperty otherEmbeddedProperty = new EmbeddedRelationalPersistentProperty(otherDelegate, mock(EmbeddedContext.class)); + + SoftAssertions.assertSoftly(softly -> { + softly.assertThat(embeddedProperty).isEqualTo(embeddedProperty); + softly.assertThat(embeddedProperty).isEqualTo(delegate); + + softly.assertThat(embeddedProperty).isNotEqualTo(otherEmbeddedProperty); + softly.assertThat(embeddedProperty).isNotEqualTo(otherDelegate); + }); + } + +} \ No newline at end of file diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/PersistentPropertyPathTestUtils.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/PersistentPropertyPathTestUtils.java new file mode 100644 index 0000000000..0a522fa4a5 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/PersistentPropertyPathTestUtils.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping; + +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPaths; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; + +/** + * @author Jens Schauder + */ +public final class PersistentPropertyPathTestUtils { + + private PersistentPropertyPathTestUtils() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + public static PersistentPropertyPath getPath(RelationalMappingContext context, + String path, Class baseType) { + + PersistentPropertyPaths persistentPropertyPaths = context + .findPersistentPropertyPaths(baseType, p -> true); + + return persistentPropertyPaths + .filter(p -> p.toDotPath().equals(path)) + .stream() + .findFirst() + .orElse(null); + + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/RelationalMappingContextUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/RelationalMappingContextUnitTests.java new file mode 100644 index 0000000000..4af641fb13 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/RelationalMappingContextUnitTests.java @@ -0,0 +1,148 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping; + +import static org.assertj.core.api.Assertions.*; + +import java.util.HashSet; +import java.util.List; +import java.util.UUID; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Unit tests for {@link RelationalMappingContext}. + * + * @author Toshiaki Maki + * @author Jens Schauder + */ +public class RelationalMappingContextUnitTests { + + RelationalMappingContext context = new RelationalMappingContext(); + SimpleTypeHolder holder = new SimpleTypeHolder(new HashSet<>(List.of(UUID.class)), true); + + @BeforeEach + void setup() { + context.setSimpleTypeHolder(holder); + } + + @Test // DATAJDBC-229 + public void uuidPropertyIsNotEntity() { + + RelationalPersistentEntity entity = context.getPersistentEntity(EntityWithUuid.class); + RelationalPersistentProperty uuidProperty = entity.getRequiredPersistentProperty("uuid"); + + assertThat(uuidProperty.isEntity()).isFalse(); + } + + @Test // GH-1525 + public void canObtainAggregatePath() { + + PersistentPropertyPath path = context.getPersistentPropertyPath("uuid", + EntityWithUuid.class); + AggregatePath aggregatePath = context.getAggregatePath(path); + + assertThat(aggregatePath).isNotNull(); + } + + @Test // GH-1525 + public void innerAggregatePathsGetCached() { + + context = new RelationalMappingContext(); + context.setSimpleTypeHolder(holder); + + PersistentPropertyPath path = context.getPersistentPropertyPath("uuid", + EntityWithUuid.class); + + AggregatePath one = context.getAggregatePath(path); + AggregatePath two = context.getAggregatePath(path); + + assertThat(one).isSameAs(two); + } + + @Test // GH-1525 + public void rootAggregatePathsGetCached() { + + context = new RelationalMappingContext(); + context.setSimpleTypeHolder(holder); + + AggregatePath one = context.getAggregatePath(context.getRequiredPersistentEntity(EntityWithUuid.class)); + AggregatePath two = context.getAggregatePath(context.getRequiredPersistentEntity(EntityWithUuid.class)); + + assertThat(one).isSameAs(two); + } + + @Test // GH-1586 + void correctlyCascadesPrefix() { + + RelationalPersistentEntity entity = context.getRequiredPersistentEntity(WithEmbedded.class); + + RelationalPersistentProperty parent = entity.getRequiredPersistentProperty("parent"); + RelationalPersistentEntity parentEntity = context.getRequiredPersistentEntity(parent); + RelationalPersistentProperty child = parentEntity.getRequiredPersistentProperty("child"); + RelationalPersistentEntity childEntity = context.getRequiredPersistentEntity(child); + RelationalPersistentProperty name = childEntity.getRequiredPersistentProperty("name"); + + assertThat(parent.getEmbeddedPrefix()).isEqualTo("prnt_"); + assertThat(child.getEmbeddedPrefix()).isEqualTo("prnt_chld_"); + assertThat(name.getColumnName()).isEqualTo(SqlIdentifier.quoted("PRNT_CHLD_NAME")); + } + + @Test // GH-1657 + void aggregatePathsOfBasePropertyForDifferentInheritedEntitiesAreDifferent() { + + PersistentPropertyPath path1 = context.getPersistentPropertyPath("name", + Inherit1.class); + PersistentPropertyPath path2 = context.getPersistentPropertyPath("name", + Inherit2.class); + + AggregatePath aggregatePath1 = context.getAggregatePath(path1); + AggregatePath aggregatePath2 = context.getAggregatePath(path2); + + assertThat(aggregatePath1).isNotEqualTo(aggregatePath2); + } + + static class EntityWithUuid { + @Id UUID uuid; + } + + static class WithEmbedded { + @Embedded.Empty(prefix = "prnt_") Parent parent; + } + + static class Parent { + + @Embedded.Empty(prefix = "chld_") Child child; + } + + static class Child { + String name; + } + + static class Base { + String name; + } + + static class Inherit1 extends Base {} + + static class Inherit2 extends Base {} + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/event/AbstractRelationalEventListenerUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/event/AbstractRelationalEventListenerUnitTests.java new file mode 100644 index 0000000000..4f22cd2b00 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/event/AbstractRelationalEventListenerUnitTests.java @@ -0,0 +1,135 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping.event; + +import static org.assertj.core.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.conversion.MutableAggregateChange; + +/** + * Unit tests for {@link AbstractRelationalEventListener}. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public class AbstractRelationalEventListenerUnitTests { + + List events = new ArrayList<>(); + EventListenerUnderTest listener = new EventListenerUnderTest(); + DummyEntity dummyEntity = new DummyEntity(); + + @Test // GH-1053 + public void afterConvert() { + + listener.onApplicationEvent(new AfterConvertEvent<>(dummyEntity)); + + assertThat(events).containsExactly("afterConvert"); + } + + @Test // DATAJDBC-454 + public void beforeConvert() { + + listener.onApplicationEvent(new BeforeConvertEvent<>(dummyEntity)); + + assertThat(events).containsExactly("beforeConvert"); + } + + @Test // DATAJDBC-454 + public void beforeSave() { + + listener.onApplicationEvent(new BeforeSaveEvent<>(dummyEntity, MutableAggregateChange.forSave(dummyEntity))); + + assertThat(events).containsExactly("beforeSave"); + } + + @Test // DATAJDBC-454 + public void afterSave() { + + listener.onApplicationEvent(new AfterSaveEvent<>(dummyEntity, MutableAggregateChange.forDelete(dummyEntity))); + + assertThat(events).containsExactly("afterSave"); + } + + @Test // DATAJDBC-454 + public void beforeDelete() { + + listener.onApplicationEvent( + new BeforeDeleteEvent<>(Identifier.of(23), dummyEntity, MutableAggregateChange.forDelete(dummyEntity))); + + assertThat(events).containsExactly("beforeDelete"); + } + + @Test // DATAJDBC-454 + public void afterDelete() { + + listener.onApplicationEvent( + new AfterDeleteEvent<>(Identifier.of(23), dummyEntity, MutableAggregateChange.forDelete(dummyEntity))); + + assertThat(events).containsExactly("afterDelete"); + } + + @Test // DATAJDBC-454 + public void eventWithNonMatchingDomainType() { + + String notADummyEntity = "I'm not a dummy entity"; + + listener.onApplicationEvent( + new AfterDeleteEvent<>(Identifier.of(23), String.class, MutableAggregateChange.forDelete(notADummyEntity))); + + assertThat(events).isEmpty(); + } + + static class DummyEntity { + + } + + private class EventListenerUnderTest extends AbstractRelationalEventListener { + + @Override + protected void onBeforeConvert(BeforeConvertEvent event) { + events.add("beforeConvert"); + } + + @Override + protected void onBeforeSave(BeforeSaveEvent event) { + events.add("beforeSave"); + } + + @Override + protected void onAfterSave(AfterSaveEvent event) { + events.add("afterSave"); + } + + @Override + protected void onAfterConvert(AfterConvertEvent event) { + events.add("afterConvert"); + } + + @Override + protected void onAfterDelete(AfterDeleteEvent event) { + events.add("afterDelete"); + } + + @Override + protected void onBeforeDelete(BeforeDeleteEvent event) { + events.add("beforeDelete"); + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/event/RelationalEventUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/event/RelationalEventUnitTests.java new file mode 100644 index 0000000000..9e87e14ea8 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/mapping/event/RelationalEventUnitTests.java @@ -0,0 +1,37 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.mapping.event; + +import static org.assertj.core.api.AssertionsForClassTypes.*; + +import org.junit.jupiter.api.Test; +import org.springframework.core.ResolvableType; + +/** + * Unit tests for {@link RelationalEvent}. + * + * @author Mark Paluch + */ +class RelationalEventUnitTests { + + @Test // GH-1539 + void shouldReportCorrectGenericType() { + assertThat(new BeforeConvertEvent<>(new MyAggregate()).getResolvableType()) + .isEqualTo(ResolvableType.forClassWithGenerics(BeforeConvertEvent.class, MyAggregate.class)); + } + + static class MyAggregate {} +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/CriteriaUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/CriteriaUnitTests.java new file mode 100644 index 0000000000..d107c67e72 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/CriteriaUnitTests.java @@ -0,0 +1,326 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.query; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.springframework.data.relational.core.query.Criteria.*; + +import java.util.Arrays; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Unit tests for {@link Criteria}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Roman Chigvintsev + */ +class CriteriaUnitTests { + + @Test // DATAJDBC-513 + void fromCriteria() { + + Criteria nested1 = where("foo").isNotNull(); + Criteria nested2 = where("foo").isNull(); + CriteriaDefinition criteria = Criteria.from(nested1, nested2); + + assertThat(criteria.isGroup()).isTrue(); + assertThat(criteria.getGroup()).containsExactly(nested1, nested2); + assertThat(criteria.getPrevious()).isEqualTo(Criteria.empty()); + assertThat(criteria).hasToString("(foo IS NOT NULL AND foo IS NULL)"); + } + + @Test // DATAJDBC-513 + void fromCriteriaOptimized() { + + Criteria nested = where("foo").is("bar").and("baz").isNotNull(); + CriteriaDefinition criteria = Criteria.from(nested); + + assertThat(criteria).isSameAs(nested).hasToString("foo = 'bar' AND baz IS NOT NULL"); + } + + @Test // DATAJDBC-513 + void isEmpty() { + + assertSoftly(softly -> { + + Criteria empty = empty(); + Criteria notEmpty = where("foo").is("bar"); + + assertThat(empty.isEmpty()).isTrue(); + assertThat(notEmpty.isEmpty()).isFalse(); + + assertThat(Criteria.from(notEmpty).isEmpty()).isFalse(); + assertThat(Criteria.from(notEmpty, notEmpty).isEmpty()).isFalse(); + + assertThat(Criteria.from(empty).isEmpty()).isTrue(); + assertThat(Criteria.from(empty, empty).isEmpty()).isTrue(); + + assertThat(Criteria.from(empty, notEmpty).isEmpty()).isFalse(); + assertThat(Criteria.from(notEmpty, empty).isEmpty()).isFalse(); + }); + } + + @Test // DATAJDBC-513 + void andChainedCriteria() { + + Criteria criteria = where("foo").is("bar").and("baz").isNotNull(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("baz")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.IS_NOT_NULL); + assertThat(criteria.getValue()).isNull(); + assertThat(criteria.getPrevious()).isNotNull(); + assertThat(criteria.getCombinator()).isEqualTo(Criteria.Combinator.AND); + + criteria = criteria.getPrevious(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // DATAJDBC-513 + void andGroupedCriteria() { + + Criteria grouped = where("foo").is("bar").and(where("foo").is("baz").or("bar").isNotNull()); + Criteria criteria = grouped; + + assertThat(criteria.isGroup()).isTrue(); + assertThat(criteria.getGroup()).hasSize(1); + assertThat(criteria.getGroup().get(0).getColumn()).isEqualTo(SqlIdentifier.unquoted("bar")); + assertThat(criteria.getCombinator()).isEqualTo(Criteria.Combinator.AND); + + criteria = criteria.getPrevious(); + + assertThat(criteria).isNotNull(); + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + + assertThat(grouped).hasToString("foo = 'bar' AND (foo = 'baz' OR bar IS NOT NULL)"); + } + + @Test // DATAJDBC-513 + void orChainedCriteria() { + + Criteria criteria = where("foo").is("bar").or("baz").isNotNull(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("baz")); + assertThat(criteria.getCombinator()).isEqualTo(Criteria.Combinator.OR); + + criteria = criteria.getPrevious(); + + assertThat(criteria).isNotNull(); + assertThat(criteria.getPrevious()).isNull(); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // DATAJDBC-513 + void orGroupedCriteria() { + + Criteria criteria = where("foo").is("bar").or(where("foo").is("baz")); + + assertThat(criteria.isGroup()).isTrue(); + assertThat(criteria.getGroup()).hasSize(1); + assertThat(criteria.getGroup().get(0).getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getCombinator()).isEqualTo(Criteria.Combinator.OR); + + criteria = criteria.getPrevious(); + + assertThat(criteria).isNotNull(); + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // DATAJDBC-513 + void shouldBuildEqualsCriteria() { + + Criteria criteria = where("foo").is("bar"); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test + void shouldBuildEqualsIgnoreCaseCriteria() { + Criteria criteria = where("foo").is("bar").ignoreCase(true); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + assertThat(criteria.isIgnoreCase()).isTrue(); + } + + @Test // DATAJDBC-513 + void shouldBuildNotEqualsCriteria() { + + Criteria criteria = where("foo").not("bar"); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.NEQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // DATAJDBC-513 + void shouldBuildInCriteria() { + + Criteria criteria = where("foo").in("bar", "baz"); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.IN); + assertThat(criteria.getValue()).isEqualTo(Arrays.asList("bar", "baz")); + assertThat(criteria).hasToString("foo IN ('bar', 'baz')"); + } + + @Test // DATAJDBC-513 + void shouldBuildNotInCriteria() { + + Criteria criteria = where("foo").notIn("bar", "baz"); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.NOT_IN); + assertThat(criteria.getValue()).isEqualTo(Arrays.asList("bar", "baz")); + } + + @Test // DATAJDBC-513 + void shouldBuildGtCriteria() { + + Criteria criteria = where("foo").greaterThan(1); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.GT); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // DATAJDBC-513 + void shouldBuildGteCriteria() { + + Criteria criteria = where("foo").greaterThanOrEquals(1); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.GTE); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // DATAJDBC-513 + void shouldBuildLtCriteria() { + + Criteria criteria = where("foo").lessThan(1); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.LT); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // DATAJDBC-513 + void shouldBuildLteCriteria() { + + Criteria criteria = where("foo").lessThanOrEquals(1); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.LTE); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // DATAJDBC-513 + void shouldBuildLikeCriteria() { + + Criteria criteria = where("foo").like("hello%"); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.LIKE); + assertThat(criteria.getValue()).isEqualTo("hello%"); + } + + @Test + void shouldBuildNotLikeCriteria() { + Criteria criteria = where("foo").notLike("hello%"); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.NOT_LIKE); + assertThat(criteria.getValue()).isEqualTo("hello%"); + } + + @Test // DATAJDBC-513 + void shouldBuildIsNullCriteria() { + + Criteria criteria = where("foo").isNull(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.IS_NULL); + } + + @Test // DATAJDBC-513 + void shouldBuildIsNotNullCriteria() { + + Criteria criteria = where("foo").isNotNull(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.IS_NOT_NULL); + } + + @Test // DATAJDBC-513 + void shouldBuildIsTrueCriteria() { + + Criteria criteria = where("foo").isTrue(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.IS_TRUE); + assertThat(criteria.getValue()).isEqualTo(true); + } + + @Test // DATAJDBC-513 + void shouldBuildIsFalseCriteria() { + + Criteria criteria = where("foo").isFalse(); + + assertThat(criteria.getColumn()).isEqualTo(SqlIdentifier.unquoted("foo")); + assertThat(criteria.getComparator()).isEqualTo(CriteriaDefinition.Comparator.IS_FALSE); + assertThat(criteria.getValue()).isEqualTo(false); + } + + @Test // GH-1960 + void identicallyCreatedCriteriaAreEqual() { + + Criteria c1 = Criteria.where("status").in("PUBLISHED", "DRAFT"); + Criteria c2 = Criteria.where("status").in("PUBLISHED", "DRAFT"); + + assertThat(c1).isEqualTo(c2); + assertThat(c1.hashCode()).isEqualTo(c2.hashCode()); + } + + @Test // GH-1960 + void notIdenticallyCreatedCriteriaAreNotEqual() { + + Criteria[] criteria = new Criteria[] { Criteria.where("status").is("PUBLISHED"), // + Criteria.where("statusx").is("PUBLISHED"), // + Criteria.where("status").greaterThan("PUBLISHED"), // + Criteria.where("status").is("PUBLISHEDx") }; + + for (int i = 0; i < criteria.length - 1; i++) { + for (int j = i + 1; j < criteria.length; j++) { + + assertThat(criteria[i]).isNotEqualTo(criteria[j]); + assertThat(criteria[i].hashCode()).isNotEqualTo(criteria[j].hashCode()); + } + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/QueryUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/QueryUnitTests.java new file mode 100644 index 0000000000..530dd437be --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/QueryUnitTests.java @@ -0,0 +1,80 @@ +/* +* Copyright 2020-2025 the original author or authors. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package org.springframework.data.relational.core.query; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; + +/** + * Tests the {@link Query} class. + * + * @author Jens Schauder + * @author Mark Paluch + */ +class QueryUnitTests { + + @Test // DATAJDBC-614 + void withCombinesSortAndPaging() { + + Query query = Query.empty() // + .sort(Sort.by("alpha")) // + .with(PageRequest.of(2, 20, Sort.by("beta"))); + + assertThat(query.getSort().get()) // + .extracting(Sort.Order::getProperty) // + .containsExactly("alpha", "beta"); + } + + @Test // DATAJDBC-614 + void withCombinesEmptySortAndPaging() { + + Query query = Query.empty() // + .with(PageRequest.of(2, 20, Sort.by("beta"))); + + assertThat(query.getSort().get()) // + .extracting(Sort.Order::getProperty) // + .containsExactly("beta"); + } + + @Test // DATAJDBC-614 + void withCombinesSortAndUnsortedPaging() { + + Query query = Query.empty() // + .sort(Sort.by("alpha")) // + .with(PageRequest.of(2, 20)); + + assertThat(query.getSort().get()) // + .extracting(Sort.Order::getProperty) // + .containsExactly("alpha"); + } + + @Test // GH-1939 + void withCombinesUnpagedWithSort() { + + Query query = Query.empty() // + .with(Pageable.unpaged(Sort.by("beta"))); + + assertThat(query.getSort().get()) // + .extracting(Sort.Order::getProperty) // + .containsExactly("beta"); + assertThat(query.getLimit()).isEqualTo(-1); + assertThat(query.getOffset()).isEqualTo(-1); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/UpdateUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/UpdateUnitTests.java new file mode 100644 index 0000000000..7ac7997724 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/query/UpdateUnitTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.query; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for {@link Update}. + * + * @author Mark Paluch + */ +public class UpdateUnitTests { + + @Test // DATAJDBC-513 + public void shouldRenderUpdateToString() { + + assertThat(Update.update("foo", "baz").set("bar", 42)).hasToString("SET foo = 'baz', bar = 42"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/AbstractSegmentTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/AbstractSegmentTests.java new file mode 100644 index 0000000000..3139732e03 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/AbstractSegmentTests.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link AbstractSegment}. + * + * @author Mark Paluch + */ +class AbstractSegmentTests { + + @Test // GH-1066 + void shouldReportToStringCorrectly() { + + Table table = Table.create("foo"); + AbstractSegment segment = new AbstractTestSegment(table.column("col1"), table.column("col2")); + + assertThat(segment).hasToString("AbstractTestSegment(foo.col1, foo.col2)"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/AbstractTestSegment.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/AbstractTestSegment.java new file mode 100644 index 0000000000..75d224e06c --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/AbstractTestSegment.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Public {@link AbstractSegment} for usage in tests in other packages. + * + * @author Jens Schauder + */ +public class AbstractTestSegment extends AbstractSegment { + protected AbstractTestSegment(Segment... children) { + super(children); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/CapturingVisitor.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/CapturingVisitor.java new file mode 100644 index 0000000000..3861b62d13 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/CapturingVisitor.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import java.util.ArrayList; +import java.util.List; + +/** + * @author Mark Paluch + */ +class CapturingVisitor implements Visitor { + + final List enter = new ArrayList<>(); + + @Override + public void enter(Visitable segment) { + enter.add(segment); + } + + @Override + public void leave(Visitable segment) { + leave.add(segment); + } + + final List leave = new ArrayList<>(); +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/ConditionsUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/ConditionsUnitTests.java new file mode 100644 index 0000000000..74c84b37a5 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/ConditionsUnitTests.java @@ -0,0 +1,42 @@ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +class ConditionsUnitTests { + + @Test // GH-916 + void notInOfColumnAndExpression() { + + Table table = Table.create("t"); + Column column = Column.create("col", table); + Expression expression = new Literal<>("expression"); + + In notIn = Conditions.notIn(column, expression); + + List segments = new ArrayList<>(); + notIn.visit(segments::add); + + assertThat(notIn.isNotIn()).isTrue(); + assertThat(segments).containsExactly(notIn, column, table, expression); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DefaultIdentifierProcessingUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DefaultIdentifierProcessingUnitTests.java new file mode 100644 index 0000000000..b04ccd4e28 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DefaultIdentifierProcessingUnitTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.IdentifierProcessing.LetterCasing; +import org.springframework.data.relational.core.sql.IdentifierProcessing.Quoting; + +/** + * unit tests for {@link DefaultIdentifierProcessing}. + * + * @author Jens Schauder + */ +public class DefaultIdentifierProcessingUnitTests { + + @Test // DATAJDBC-386 + public void ansiConformProcessing() { + + DefaultIdentifierProcessing processing = IdentifierProcessing.create(Quoting.ANSI, LetterCasing.UPPER_CASE); + + assertThat(processing.quote("something")).isEqualTo("\"something\""); + assertThat(processing.standardizeLetterCase("aBc")).isEqualTo("ABC"); + } + + @Test // DATAJDBC-386 + public void twoCharacterAsIs() { + + DefaultIdentifierProcessing processing = IdentifierProcessing.create(new Quoting("[", "]"), LetterCasing.AS_IS); + + assertThat(processing.quote("something")).isEqualTo("[something]"); + assertThat(processing.standardizeLetterCase("aBc")).isEqualTo("aBc"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DeleteBuilderUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DeleteBuilderUnitTests.java new file mode 100644 index 0000000000..e9c4a13feb --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DeleteBuilderUnitTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link DeleteBuilder}. + * + * @author Mark Paluch + */ +public class DeleteBuilderUnitTests { + + @Test // DATAJDBC-335 + public void simpleDelete() { + + DeleteBuilder builder = StatementBuilder.delete(); + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + + Delete delete = builder.from(table).where(foo.isEqualTo(bar)).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + delete.visit(visitor); + + assertThat(visitor.enter).containsSequence(new From(table), table, new Where(foo.isEqualTo(bar)), + foo.isEqualTo(bar), foo, table, bar, table); + + assertThat(delete.toString()).isEqualTo("DELETE FROM mytable WHERE mytable.foo = mytable.bar"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DeleteValidatorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DeleteValidatorUnitTests.java new file mode 100644 index 0000000000..b82df68bc5 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/DeleteValidatorUnitTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link DeleteValidator}. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public class DeleteValidatorUnitTests { + + @Test // DATAJDBC-335 + public void shouldReportMissingTableForDeleteViaWhere() { + + Column column = SQL.table("table").column("foo"); + Table bar = SQL.table("bar"); + + assertThatThrownBy(() -> { + StatementBuilder.delete() // + .from(bar) // + .where(column.isEqualTo(SQL.literalOf("foo"))) // + .build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [table] by a WHERE predicate not imported by FROM [bar]"); + } + + @Test // DATAJDBC-335 + public void shouldIgnoreImportsFromSubselectsInWhereClause() { + + Table foo = SQL.table("foo"); + Column bar = foo.column("bar"); + + Table floo = SQL.table("floo"); + Column bah = floo.column("bah"); + + Select subselect = Select.builder().select(bah).from(floo).build(); + + assertThat(Delete.builder().from(foo).where(Conditions.in(bar, subselect)).build()).isNotNull(); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/InTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/InTests.java new file mode 100644 index 0000000000..088128d4e6 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/InTests.java @@ -0,0 +1,46 @@ +/* +* Copyright 2020-2025 the original author or authors. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link In}. + * + * @author Mark Paluch + */ +public class InTests { + + @Test // DATAJDBC-604 + void shouldRenderToString() { + + Table table = Table.create("table"); + + assertThat(In.create(table.column("col"), SQL.bindMarker())).hasToString("table.col IN (?)"); + assertThat(In.create(table.column("col"), SQL.bindMarker()).not()).hasToString("table.col NOT IN (?)"); + } + + @Test // DATAJDBC-604 + void shouldRenderEmptyExpressionToString() { + + Table table = Table.create("table"); + + assertThat(In.create(table.column("col"))).hasToString("1 = 0"); + assertThat(In.create(table.column("col")).not()).hasToString("1 = 1"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/InsertBuilderUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/InsertBuilderUnitTests.java new file mode 100644 index 0000000000..9b235ff15f --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/InsertBuilderUnitTests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for {@link InsertBuilder}. + * + * @author Mark Paluch + */ +public class InsertBuilderUnitTests { + + @Test // DATAJDBC-335 + public void shouldCreateSimpleInsert() { + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + + Insert insert = StatementBuilder.insert().into(table).column(foo).column(bar).value(SQL.bindMarker()).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + insert.visit(visitor); + + assertThat(visitor.enter).containsSequence(insert, new Into(table), table, foo, table, bar, table, + new Values(SQL.bindMarker())); + + assertThat(insert.toString()).isEqualTo("INSERT INTO mytable (mytable.foo, mytable.bar) VALUES(?)"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SelectBuilderUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SelectBuilderUnitTests.java new file mode 100644 index 0000000000..409489cecd --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SelectBuilderUnitTests.java @@ -0,0 +1,210 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; + +import java.util.OptionalLong; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.relational.core.sql.Join.JoinType; + +/** + * Unit tests for {@link SelectBuilder}. + * + * @author Mark Paluch + * @author Myeonghyeon Lee + */ +public class SelectBuilderUnitTests { + + @Test // DATAJDBC-309 + public void simpleSelect() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + + Select select = builder.select(foo, bar).from(table).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, bar, table, new From(table), table); + } + + @Test // DATAJDBC-309 + public void selectTop() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + + Select select = builder.top(10).select(foo).from(table).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, new From(table), table); + assertThat(select.getLimit()).isEqualTo(OptionalLong.of(10)); + } + + @Test // DATAJDBC-347 + public void selectWithWhere() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + + Comparison condition = foo.isEqualTo(SQL.literalOf("bar")); + Select select = builder.select(foo).from(table).where(condition).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, new From(table), table, new Where(condition)); + } + + @Test // DATAJDBC-309 + public void moreAdvancedSelect() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table1 = SQL.table("mytable1"); + Table table2 = SQL.table("mytable2"); + + Column foo = SQL.column("foo", table1).as("foo_from_table1"); + Column bar = SQL.column("foo", table2).as("foo_from_table1"); + + Select select = builder.select(foo, bar).from(table1, table2).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table1, bar, table2, new From(table1, table2), table1, table2); + } + + @Test // DATAJDBC-309 + public void orderBy() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + + Column foo = SQL.column("foo", table).as("foo"); + + OrderByField orderByField = OrderByField.from(foo).asc(); + Select select = builder.select(foo).from(table).orderBy(orderByField).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, new From(table), table, orderByField, foo); + } + + @Test // DATAJDBC-309 + public void joins() { + + SelectBuilder builder = StatementBuilder.select(); + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Column name = employee.column("name").as("emp_name"); + Column department_name = employee.column("name").as("department_name"); + + Select select = builder.select(name, department_name).from(employee).join(department) + .on(SQL.column("department_id", employee)).equals(SQL.column("id", department)) + .and(SQL.column("tenant", employee)).equals(SQL.column("tenant", department)) + .orderBy(OrderByField.from(name).asc()).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).filteredOn(Join.class::isInstance).hasSize(1); + + Join join = visitor.enter.stream().filter(Join.class::isInstance).map(Join.class::cast).findFirst().get(); + + assertThat(join.getJoinTable()).isEqualTo(department); + assertThat(join.getOn().toString()).isEqualTo( + new SimpleSegment("employee.department_id = department.id AND employee.tenant = department.tenant").toString()); + assertThat(join.getType()).isEqualTo(JoinType.JOIN); + } + + @Test // DATAJDBC-498 + public void selectWithLock() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + + Select select = builder.select(foo, bar).from(table).lock(lockMode).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, bar, table, new From(table), table); + assertThat(select.getLockMode()).isEqualTo(lockMode); + } + + @Test // DATAJDBC-498 + public void selectWithWhereWithLock() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + + Comparison condition = foo.isEqualTo(SQL.literalOf("bar")); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + + Select select = builder.select(foo).from(table).where(condition).lock(lockMode).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, new From(table), table, new Where(condition)); + assertThat(select.getLockMode()).isEqualTo(lockMode); + } + + @Test // DATAJDBC-498 + public void orderByWithLock() { + + SelectBuilder builder = StatementBuilder.select(); + + Table table = SQL.table("mytable"); + + Column foo = SQL.column("foo", table).as("foo"); + + OrderByField orderByField = OrderByField.from(foo).asc(); + LockMode lockMode = LockMode.PESSIMISTIC_WRITE; + + Select select = builder.select(foo).from(table).orderBy(orderByField).lock(lockMode).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + select.visit(visitor); + + assertThat(visitor.enter).containsSequence(foo, table, new From(table), table, orderByField, foo); + assertThat(select.getLockMode()).isEqualTo(lockMode); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SelectValidatorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SelectValidatorUnitTests.java new file mode 100644 index 0000000000..ef2a5505f6 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SelectValidatorUnitTests.java @@ -0,0 +1,110 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for {@link SelectValidator}. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public class SelectValidatorUnitTests { + + @Test // DATAJDBC-309 + public void shouldReportMissingTableViaSelectlist() { + + Column column = SQL.table("table").column("foo"); + + assertThatThrownBy(() -> { + StatementBuilder.select(column).from(SQL.table("bar")).build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [table] by a SELECT column not imported by FROM [bar] or JOIN []"); + } + + @Test // DATAJDBC-309 + public void shouldReportMissingTableViaSelectlistCount() { + + Column column = SQL.table("table").column("foo"); + + assertThatThrownBy(() -> { + StatementBuilder.select(Functions.count(column)).from(SQL.table("bar")).build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [table] by a SELECT column not imported by FROM [bar] or JOIN []"); + } + + @Test // DATAJDBC-309 + public void shouldReportMissingTableViaSelectlistDistinct() { + + Column column = SQL.table("table").column("foo"); + + assertThatThrownBy(() -> { + StatementBuilder.select(column).distinct().from(SQL.table("bar")).build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [table] by a SELECT column not imported by FROM [bar] or JOIN []"); + } + + @Test // DATAJDBC-309 + public void shouldReportMissingTableViaOrderBy() { + + Column foo = SQL.table("table").column("foo"); + Table bar = SQL.table("bar"); + + assertThatThrownBy(() -> { + StatementBuilder.select(bar.column("foo")) // + .from(bar) // + .orderBy(foo) // + .build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [table] by a ORDER BY column not imported by FROM [bar] or JOIN []"); + } + + @Test // DATAJDBC-309 + public void shouldReportMissingTableViaWhere() { + + Column column = SQL.table("table").column("foo"); + Table bar = SQL.table("bar"); + + assertThatThrownBy(() -> { + StatementBuilder.select(bar.column("foo")) // + .from(bar) // + .where(column.isEqualTo(SQL.literalOf("foo"))) // + .build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [table] by a WHERE predicate not imported by FROM [bar] or JOIN []"); + } + + @Test // DATAJDBC-309 + public void shouldIgnoreImportsFromSubselectsInWhereClause() { + + Table foo = SQL.table("foo"); + Column bar = foo.column("bar"); + + Table floo = SQL.table("floo"); + Column bah = floo.column("bah"); + + Select subselect = Select.builder().select(bah).from(floo).build(); + + assertThatThrownBy(() -> { + Select.builder().select(bah).from(foo).where(Conditions.in(bar, subselect)).build(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Required table [floo] by a SELECT column not imported by FROM [foo] or JOIN []"); + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SqlIdentifierUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SqlIdentifierUnitTests.java new file mode 100644 index 0000000000..e035df66e4 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SqlIdentifierUnitTests.java @@ -0,0 +1,91 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.SoftAssertions.*; +import static org.springframework.data.relational.core.sql.SqlIdentifier.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.IdentifierProcessing.LetterCasing; +import org.springframework.data.relational.core.sql.IdentifierProcessing.Quoting; + +/** + * Unit tests for {@link SqlIdentifier}. + * + * @author Jens Schauder + * @author Mark Paluch + * @author Kurt Niemi + */ +public class SqlIdentifierUnitTests { + + public static final IdentifierProcessing BRACKETS_LOWER_CASE = IdentifierProcessing.create(new Quoting("[", "]"), + LetterCasing.LOWER_CASE); + + @Test // DATAJDBC-386 + public void quotedSimpleObjectIdentifier() { + + SqlIdentifier identifier = quoted("someName"); + + assertThat(identifier.toSql(BRACKETS_LOWER_CASE)).isEqualTo("[someName]"); + assertThat(identifier.getReference()).isEqualTo("someName"); + } + + @Test // DATAJDBC-386 + public void unquotedSimpleObjectIdentifier() { + + SqlIdentifier identifier = unquoted("someName"); + String sql = identifier.toSql(BRACKETS_LOWER_CASE); + + assertThat(sql).isEqualTo("someName"); + assertThat(identifier.getReference()).isEqualTo("someName"); + } + + @Test // DATAJDBC-386 + public void quotedMultipartObjectIdentifier() { + + SqlIdentifier identifier = SqlIdentifier.from(quoted("some"), quoted("name")); + String sql = identifier.toSql(IdentifierProcessing.ANSI); + + assertThat(sql).isEqualTo("\"some\".\"name\""); + } + + @Test // DATAJDBC-386 + public void unquotedMultipartObjectIdentifier() { + + SqlIdentifier identifier = SqlIdentifier.from(unquoted("some"), unquoted("name")); + String sql = identifier.toSql(IdentifierProcessing.ANSI); + + assertThat(sql).isEqualTo("some.name"); + } + + @Test // DATAJDBC-386 + public void equality() { + + SqlIdentifier basis = SqlIdentifier.unquoted("simple"); + SqlIdentifier equal = SqlIdentifier.unquoted("simple"); + SqlIdentifier quoted = quoted("simple"); + SqlIdentifier notSimple = SqlIdentifier.from(unquoted("simple"), unquoted("not")); + + assertSoftly(softly -> { + + softly.assertThat(basis).isEqualTo(equal); + softly.assertThat(equal).isEqualTo(basis); + softly.assertThat(basis).isNotEqualTo(quoted); + softly.assertThat(basis).isNotEqualTo(notSimple); + }); + } +} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/ConversionCustomizer.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/TestFrom.java similarity index 62% rename from src/main/java/org/springframework/data/jdbc/mapping/model/ConversionCustomizer.java rename to spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/TestFrom.java index 80ba336ed5..d7e27eea78 100644 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/ConversionCustomizer.java +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/TestFrom.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2021-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,14 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.jdbc.mapping.model; - -import org.springframework.core.convert.support.GenericConversionService; +package org.springframework.data.relational.core.sql; /** + * A variant of {@link From} that can be used in tests in other packages. + * * @author Jens Schauder */ -public interface ConversionCustomizer { +public class TestFrom extends From { - void customize(GenericConversionService conversions); + public TestFrom(TableLike... tables) { + super(tables); + } } diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/TestJoin.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/TestJoin.java new file mode 100644 index 0000000000..25a4bf9fbb --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/TestJoin.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +/** + * Public {@link Join} with public constructor for tests in other packages. + * + * @author Jens Schauder + */ +public class TestJoin extends Join { + public TestJoin(JoinType type, TableLike joinTable, Condition on) { + super(type, joinTable, on); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/UpdateBuilderUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/UpdateBuilderUnitTests.java new file mode 100644 index 0000000000..1da7609efb --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/UpdateBuilderUnitTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.*; + +/** + * Unit tests for {@link UpdateBuilder}. + * + * @author Mark Paluch + */ +public class UpdateBuilderUnitTests { + + @Test // DATAJDBC-335 + public void shouldCreateSimpleUpdate() { + + Table table = SQL.table("mytable"); + Column column = table.column("foo"); + + Update update = StatementBuilder.update(table).set(column.set(SQL.bindMarker())).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + update.visit(visitor); + + assertThat(visitor.enter).containsSequence(update, table, Assignments.value(column, SQL.bindMarker()), column, + table, SQL.bindMarker()); + + assertThat(update.toString()).isEqualTo("UPDATE mytable SET mytable.foo = ?"); + } + + @Test // DATAJDBC-335 + public void shouldCreateUpdateWIthCondition() { + + Table table = SQL.table("mytable"); + Column column = table.column("foo"); + + Update update = StatementBuilder.update(table).set(column.set(SQL.bindMarker())).where(column.isNull()).build(); + + CapturingVisitor visitor = new CapturingVisitor(); + update.visit(visitor); + + assertThat(visitor.enter).containsSequence(update, table, Assignments.value(column, SQL.bindMarker()), column, + table, SQL.bindMarker(), new Where(column.isNull())); + + assertThat(update.toString()).isEqualTo("UPDATE mytable SET mytable.foo = ? WHERE mytable.foo IS NULL"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/ConditionRendererUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/ConditionRendererUnitTests.java new file mode 100644 index 0000000000..8def3c61bc --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/ConditionRendererUnitTests.java @@ -0,0 +1,255 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Conditions; +import org.springframework.data.relational.core.sql.Functions; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; + +/** + * Unit tests for rendered {@link org.springframework.data.relational.core.sql.Conditions}. + * + * @author Mark Paluch + * @author Daniele Canteri + */ +public class ConditionRendererUnitTests { + + Table table = Table.create("my_table"); + Column left = table.column("left"); + Column right = table.column("right"); + Column other = table.column("other"); + + @Test // DATAJDBC-309 + public void shouldRenderEquals() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isEqualTo(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left = my_table.right"); + } + + @Test // DATAJDBC-514 + public void shouldRenderEqualsCaseInsensitive() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table) + .where(Conditions.isEqual(Functions.upper(left), Functions.upper(right))).build()); + + assertThat(sql).endsWith("WHERE UPPER(my_table.left) = UPPER(my_table.right)"); + } + + @Test // DATAJDBC-490 + public void shouldRenderEqualsNested() { + + String sql = SqlRenderer + .toString(StatementBuilder.select(left).from(table).where(Conditions.nest(left.isEqualTo(right))).build()); + + assertThat(sql).endsWith("WHERE (my_table.left = my_table.right)"); + } + + @Test // DATAJDBC-490 + public void shouldRenderAndNest() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table) + .where(Conditions.nest(left.isEqualTo(right).and(left.isGreater(right)))).build()); + + assertThat(sql).endsWith("WHERE (my_table.left = my_table.right AND my_table.left > my_table.right)"); + } + + @Test // DATAJDBC-490 + public void shouldRenderAndGroupOr() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table) + .where(Conditions.nest(left.isEqualTo(right).and(left.isGreater(right))).or(left.like(right))).build()); + + assertThat(sql).endsWith( + "WHERE (my_table.left = my_table.right AND my_table.left > my_table.right) OR my_table.left LIKE my_table.right"); + } + + @Test // DATAJDBC-490 + public void shouldRenderAndGroupOrAndNested() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table) + .where(Conditions.nest(left.isEqualTo(right).and(left.isGreater(right))) + .or(Conditions.nest(left.like(right).and(right.like(left))))) + .build()); + + assertThat(sql).endsWith( + "WHERE (my_table.left = my_table.right AND my_table.left > my_table.right) OR (my_table.left LIKE my_table.right AND my_table.right LIKE my_table.left)"); + } + + @Test // DATAJDBC-309 + public void shouldRenderNotEquals() { + + String sql = SqlRenderer + .toString(StatementBuilder.select(left).from(table).where(left.isNotEqualTo(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left != my_table.right"); + + sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isEqualTo(right).not()).build()); + + assertThat(sql).endsWith("WHERE my_table.left != my_table.right"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIsLess() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isLess(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left < my_table.right"); + } + + @Test // DATAJDBC-513 + public void shouldRenderBetween() { + + String sql = SqlRenderer + .toString(StatementBuilder.select(left).from(table).where(left.between(right, other)).build()); + + assertThat(sql).endsWith("WHERE my_table.left BETWEEN my_table.right AND my_table.other"); + } + + @Test // DATAJDBC-513 + public void shouldRenderNotBetween() { + + String sql = SqlRenderer + .toString(StatementBuilder.select(left).from(table).where(left.notBetween(right, other)).build()); + + assertThat(sql).endsWith("WHERE my_table.left NOT BETWEEN my_table.right AND my_table.other"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIsLessOrEqualTo() { + + String sql = SqlRenderer + .toString(StatementBuilder.select(left).from(table).where(left.isLessOrEqualTo(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left <= my_table.right"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIsGreater() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isGreater(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left > my_table.right"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIsGreaterOrEqualTo() { + + String sql = SqlRenderer + .toString(StatementBuilder.select(left).from(table).where(left.isGreaterOrEqualTo(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left >= my_table.right"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIn() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.in(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left IN (my_table.right)"); + } + + @Test // DATAJDBC-604 + public void shouldRenderEmptyIn() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.in()).build()); + + assertThat(sql).endsWith("WHERE 1 = 0"); + } + + @Test // DATAJDBC-604 + public void shouldRenderEmptyNotIn() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.notIn()).build()); + + assertThat(sql).endsWith("WHERE 1 = 1"); + } + + @Test // DATAJDBC-309 + public void shouldRenderLike() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.like(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left LIKE my_table.right"); + } + + @Test // DATAJDBC-513 + public void shouldRenderNotLike() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.notLike(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left NOT LIKE my_table.right"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIsNull() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isNull()).build()); + + assertThat(sql).endsWith("WHERE my_table.left IS NULL"); + } + + @Test // DATAJDBC-309 + public void shouldRenderIsNotNull() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isNotNull()).build()); + + assertThat(sql).endsWith("WHERE my_table.left IS NOT NULL"); + + sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.isNull().not()).build()); + + assertThat(sql).endsWith("WHERE my_table.left IS NOT NULL"); + } + + @Test // DATAJDBC-410 + public void shouldRenderNotIn() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.in(right).not()).build()); + + assertThat(sql).endsWith("WHERE my_table.left NOT IN (my_table.right)"); + + sql = SqlRenderer.toString(StatementBuilder.select(left).from(table).where(left.notIn(right)).build()); + + assertThat(sql).endsWith("WHERE my_table.left NOT IN (my_table.right)"); + } + + @Test // GH-907 + public void shouldRenderJust() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table) + .where(Conditions.just("sql")) + .build()); + + assertThat(sql).endsWith("WHERE sql"); + } + + @Test // GH-907 + public void shouldRenderMultipleJust() { + + String sql = SqlRenderer.toString(StatementBuilder.select(left).from(table) + .where( Conditions.just("sql1").and(Conditions.just("sql2"))) + .build()); + + assertThat(sql).endsWith("WHERE sql1 AND sql2"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/DeleteRendererUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/DeleteRendererUnitTests.java new file mode 100644 index 0000000000..b451fea90b --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/DeleteRendererUnitTests.java @@ -0,0 +1,65 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.Delete; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.Table; + +/** + * Unit tests for {@link SqlRenderer}. + * + * @author Mark Paluch + */ +public class DeleteRendererUnitTests { + + @Test // DATAJDBC-335 + public void shouldRenderWithoutWhere() { + + Table bar = SQL.table("bar"); + + Delete delete = Delete.builder().from(bar).build(); + + assertThat(SqlRenderer.toString(delete)).isEqualTo("DELETE FROM bar"); + } + + @Test // DATAJDBC-335 + public void shouldRenderWithCondition() { + + Table table = Table.create("bar"); + + Delete delete = Delete.builder().from(table) // + .where(table.column("foo").isEqualTo(table.column("baz"))) // + .and(table.column("doe").isNull()).build(); + + assertThat(SqlRenderer.toString(delete)).isEqualTo("DELETE FROM bar WHERE bar.foo = bar.baz AND bar.doe IS NULL"); + } + + @Test // DATAJDBC-335 + public void shouldConsiderTableAlias() { + + Table table = Table.create("bar").as("my_bar"); + + Delete delete = Delete.builder().from(table) // + .where(table.column("foo").isEqualTo(table.column("baz"))) // + .build(); + + assertThat(SqlRenderer.toString(delete)).isEqualTo("DELETE FROM bar my_bar WHERE my_bar.foo = my_bar.baz"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/ExpressionVisitorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/ExpressionVisitorUnitTests.java new file mode 100644 index 0000000000..685f28f790 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/ExpressionVisitorUnitTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.Expressions; +import org.springframework.data.relational.core.sql.Functions; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.SimpleFunction; +import org.springframework.data.relational.core.sql.Table; + +/** + * Tests for the {@link ExpressionVisitor}. + * + * @author Jens Schauder + */ +public class ExpressionVisitorUnitTests { + + static SimpleRenderContext simpleRenderContext = new SimpleRenderContext(NamingStrategies.asIs()); + + @ParameterizedTest // GH-1003 + @MethodSource + void expressionsWithOutAliasGetRendered(Fixture f) { + + ExpressionVisitor visitor = new ExpressionVisitor(simpleRenderContext); + + f.expression.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).as(f.comment).isEqualTo(f.renderResult); + } + + static List expressionsWithOutAliasGetRendered() { + + // final Select select = Select.builder().select(Functions.count(Expressions.asterisk()), + // SQL.nullLiteral()).build(); + + return asList( // + fixture("String literal", SQL.literalOf("one"), "'one'"), // + fixture("Numeric literal", SQL.literalOf(23L), "23"), // + fixture("Boolean literal", SQL.literalOf(true), "TRUE"), // + fixture("Just", SQL.literalOf(Expressions.just("just an arbitrary String")), "just an arbitrary String"), // + fixture("Column", Column.create("col", Table.create("tab")), "tab.col"), // + fixture("*", Expressions.asterisk(), "*"), // + fixture("tab.*", Expressions.asterisk(Table.create("tab")), "tab.*"), // + fixture("Count 1", Functions.count(SQL.literalOf(1)), "COUNT(1)"), // + fixture("Count *", Functions.count(Expressions.asterisk()), "COUNT(*)"), // + fixture("Function", SimpleFunction.create("Function", asList(SQL.literalOf("one"), SQL.literalOf("two"))), // + "Function('one', 'two')"), // + fixture("Null", SQL.nullLiteral(), "NULL"), // + fixture("Cast", Expressions.cast(Column.create("col", Table.create("tab")), "JSON"), "CAST(tab.col AS JSON)"), // + fixture("Cast with alias", Expressions.cast(Column.create("col", Table.create("tab")).as("alias"), "JSON"), + "CAST(tab.col AS JSON)")); // + } + + @Test // GH-1003 + void renderAliasedExpressionWithAliasHandlingUse() { + + ExpressionVisitor visitor = new ExpressionVisitor(simpleRenderContext, ExpressionVisitor.AliasHandling.USE); + + Column expression = Column.aliased("col", Table.create("tab"), "col_alias"); + expression.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("col_alias"); + } + + @Test // GH-1003 + void renderAliasedExpressionWithAliasHandlingDeclare() { + + ExpressionVisitor visitor = new ExpressionVisitor(simpleRenderContext, ExpressionVisitor.AliasHandling.IGNORE); + + Column expression = Column.aliased("col", Table.create("tab"), "col_alias"); + expression.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("tab.col"); + } + + @Test // GH-1003 + void considersNamingStrategy() { + + ExpressionVisitor visitor = new ExpressionVisitor(new SimpleRenderContext(NamingStrategies.toUpper())); + + Column expression = Column.create("col", Table.create("tab")); + expression.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("TAB.COL"); + } + + @Test // GH-1003 + void considerNamingStrategyForTableAsterisk() { + + ExpressionVisitor visitor = new ExpressionVisitor(new SimpleRenderContext(NamingStrategies.toUpper())); + + Expression expression = Table.create("tab").asterisk(); + expression.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("TAB.*"); + } + + static Fixture fixture(String comment, Expression expression, String renderResult) { + + Fixture f = new Fixture(); + f.comment = comment; + f.expression = expression; + f.renderResult = renderResult; + + return f; + } + + static class Fixture { + + String comment; + Expression expression; + String renderResult; + + @Override + public String toString() { + return comment; + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/FromClauseVisitorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/FromClauseVisitorUnitTests.java new file mode 100644 index 0000000000..1178db550f --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/FromClauseVisitorUnitTests.java @@ -0,0 +1,93 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.From; +import org.springframework.data.relational.core.sql.InlineQuery; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.TestFrom; + +/** + * Unit tests for the {@link FromClauseVisitor}. + * + * @author Jens Schauder + */ +class FromClauseVisitorUnitTests { + + StringBuilder renderResult = new StringBuilder(); + FromClauseVisitor visitor = new FromClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs()), renderResult::append); + + @ParameterizedTest + @MethodSource + void testRendering(Fixture f) { + + From from = f.from; + + from.visit(visitor); + + assertThat(renderResult).hasToString(f.renderResult); + } + + static List testRendering() { + + Table tabOne = Table.create("tabOne"); + Table tabTwo = Table.create("tabTwo"); + Select selectOne = Select.builder().select(Column.create("oneId", tabOne)).from(tabOne).build(); + Select selectTwo = Select.builder().select(Column.create("twoId", tabTwo)).from(tabTwo).build(); + + return Arrays.asList( + fixture("single table", new TestFrom(Table.create("one")), "one"), + fixture("single table with alias", new TestFrom(Table.aliased("one", "one_alias")), "one one_alias"), + fixture("multiple tables", new TestFrom(Table.create("one"),Table.create("two")), "one, two"), + fixture("multiple tables with alias", new TestFrom(Table.aliased("one", "one_alias"),Table.aliased("two", "two_alias")), "one one_alias, two two_alias"), + fixture("single inline query", new TestFrom(InlineQuery.create(selectOne, "ilAlias")), "(SELECT tabOne.oneId FROM tabOne) ilAlias"), + fixture("inline query with table", new TestFrom(InlineQuery.create(selectOne, "ilAlias"), tabTwo), "(SELECT tabOne.oneId FROM tabOne) ilAlias, tabTwo"), + fixture("table with inline query", new TestFrom(tabTwo,InlineQuery.create(selectOne, "ilAlias")), "tabTwo, (SELECT tabOne.oneId FROM tabOne) ilAlias"), + fixture("two inline queries", new TestFrom(InlineQuery.create(selectOne, "aliasOne"),InlineQuery.create(selectTwo, "aliasTwo")), "(SELECT tabOne.oneId FROM tabOne) aliasOne, (SELECT tabTwo.twoId FROM tabTwo) aliasTwo") + ); + } + + private static Fixture fixture(String comment, From from, String renderResult) { + + Fixture fixture = new Fixture(); + fixture.comment = comment; + fixture.from = from; + fixture.renderResult = renderResult; + return fixture; + } + + static class Fixture { + + String comment; + From from; + String renderResult; + + @Override + public String toString() { + return comment; + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/InsertRendererUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/InsertRendererUnitTests.java new file mode 100644 index 0000000000..1d26ca38ea --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/InsertRendererUnitTests.java @@ -0,0 +1,74 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.Insert; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.Table; + +/** + * Unit tests for {@link SqlRenderer}. + * + * @author Mark Paluch + * @author Jens Schauder + */ +public class InsertRendererUnitTests { + + @Test // DATAJDBC-335 + public void shouldRenderInsert() { + + Table bar = SQL.table("bar"); + + Insert insert = Insert.builder().into(bar).values(SQL.bindMarker()).build(); + + assertThat(SqlRenderer.toString(insert)).isEqualTo("INSERT INTO bar VALUES (?)"); + } + + @Test // DATAJDBC-335 + public void shouldRenderInsertColumn() { + + Table bar = SQL.table("bar"); + + Insert insert = Insert.builder().into(bar).column(bar.column("foo")).values(SQL.bindMarker()).build(); + + assertThat(SqlRenderer.toString(insert)).isEqualTo("INSERT INTO bar (foo) VALUES (?)"); + } + + @Test // DATAJDBC-335 + public void shouldRenderInsertMultipleColumns() { + + Table bar = SQL.table("bar"); + + Insert insert = Insert.builder().into(bar).columns(bar.columns("foo", "baz")).value(SQL.bindMarker()) + .value(SQL.literalOf("foo")).build(); + + assertThat(SqlRenderer.toString(insert)).isEqualTo("INSERT INTO bar (foo, baz) VALUES (?, 'foo')"); + } + + @Test // DATAJDBC-340 + public void shouldRenderInsertWithZeroColumns() { + + Table bar = SQL.table("bar"); + + Insert insert = Insert.builder().into(bar).build(); + + assertThat(SqlRenderer.toString(insert)).isEqualTo("INSERT INTO bar VALUES (DEFAULT)"); + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/JoinVisitorTestsUnitTest.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/JoinVisitorTestsUnitTest.java new file mode 100644 index 0000000000..5e67fe8755 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/JoinVisitorTestsUnitTest.java @@ -0,0 +1,93 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.InlineQuery; +import org.springframework.data.relational.core.sql.Join; +import org.springframework.data.relational.core.sql.Select; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.TestJoin; +import org.springframework.data.relational.core.sql.Visitor; + +/** + * Unit tests for {@link JoinVisitor}. + * + * @author Jens Schauder + */ +public class JoinVisitorTestsUnitTest { + + final StringBuilder builder = new StringBuilder(); + Visitor visitor = new JoinVisitor(new SimpleRenderContext(NamingStrategies.asIs()), builder::append); + + @ParameterizedTest + @MethodSource + void renderJoins(Fixture f) { + + Join join = f.join; + + join.visit(visitor); + + assertThat(builder).hasToString(f.renderResult); + } + + static List renderJoins() { + + Column colOne = Column.create("colOne", Table.create("tabOne")); + Table tabTwo = Table.create("tabTwo"); + Column colTwo = Column.create("colTwo", tabTwo); + Column renamed = colOne.as("renamed"); + Select select = Select.builder().select(renamed).from(colOne.getTable()).build(); + InlineQuery inlineQuery = InlineQuery.create(select, "inline"); + + return Arrays.asList( + fixture("simple join", new TestJoin(Join.JoinType.JOIN, tabTwo, colOne.isEqualTo(colTwo)), + "JOIN tabTwo ON tabOne.colOne = tabTwo.colTwo"), + fixture("inlineQuery", + new TestJoin(Join.JoinType.JOIN, inlineQuery, colTwo.isEqualTo(inlineQuery.column("renamed"))), + "JOIN (SELECT tabOne.colOne AS renamed FROM tabOne) inline ON tabTwo.colTwo = inline.renamed")); + } + + private static Fixture fixture(String comment, Join join, String renderResult) { + + Fixture fixture = new Fixture(); + fixture.comment = comment; + fixture.join = join; + fixture.renderResult = renderResult; + + return fixture; + } + + static class Fixture { + + String comment; + Join join; + String renderResult; + + @Override + public String toString() { + return comment; + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/NameRendererUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/NameRendererUnitTests.java new file mode 100644 index 0000000000..e4d0c0846e --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/NameRendererUnitTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Table; + +/** + * Unit tests for the {@link NameRenderer}. + * + * @author Jens Schauder + */ +class NameRendererUnitTests { + + RenderContext context = new SimpleRenderContext(NamingStrategies.asIs()); + + @Test // GH-1003 + void rendersColumnWithoutTableName() { + + Column column = Column.create("column", Table.create("table")); + + CharSequence rendered = NameRenderer.render(context, column); + + assertThat(rendered).isEqualTo("column"); + } + + @Test // GH-1003, GH-968 + void fullyQualifiedReferenceWithAlias() { + + Column column = Column.aliased("col", Table.aliased("table", "tab_alias"), "col_alias"); + + CharSequence rendered = NameRenderer.fullyQualifiedReference(context, column); + + assertThat(rendered).isEqualTo("col_alias"); + } + + @Test // GH-1003, GH-968 + void fullyQualifiedReference() { + + Column column = Table.aliased("table", "tab_alias").column("col"); + + CharSequence rendered = NameRenderer.fullyQualifiedReference(context, column); + + assertThat(rendered).isEqualTo("tab_alias.col"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/OrderByClauseVisitorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/OrderByClauseVisitorUnitTests.java new file mode 100644 index 0000000000..e093f8ec62 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/OrderByClauseVisitorUnitTests.java @@ -0,0 +1,141 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.*; + +import java.util.Arrays; +import java.util.List; + +/** + * Unit tests for {@link OrderByClauseVisitor}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Koen Punt + * @author Sven Rienstra + */ +class OrderByClauseVisitorUnitTests { + + @Test // DATAJDBC-309 + void shouldRenderOrderByAlias() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name").as("emp_name"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(column).asc()).build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("emp_name ASC"); + } + + @Test // DATAJDBC-309 + void shouldApplyNamingStrategy() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name").as("emp_name"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(column).asc()).build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.toUpper())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("EMP_NAME ASC"); + } + + @Test // GH-968 + void shouldRenderOrderByFullyQualifiedName() { + + Table employee = SQL.table("employee"); + Column column = employee.column("name"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(column).asc()).build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("employee.name ASC"); + } + + @Test // GH-968 + void shouldRenderOrderByFullyQualifiedNameWithTableAlias() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(column).asc()).build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("emp.name ASC"); + } + + @Test // GH-1348 + void shouldRenderOrderBySimpleFunction() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name"); + List columns = Arrays.asList(employee.column("id"), column); + + SimpleFunction simpleFunction = SimpleFunction.create("GREATEST", columns); + + Select select = Select.builder().select(column).from(employee) + .orderBy(OrderByField.from(simpleFunction).asc(), OrderByField.from(column).asc()).build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("GREATEST(emp.id, emp.name) ASC, emp.name ASC"); + } + + @Test // GH-1348 + void shouldRenderOrderBySimpleExpression() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name"); + + Expression simpleExpression = Expressions.just("1"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(simpleExpression).asc()) + .build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("1 ASC"); + } + + @Test + void shouldRenderOrderByCase() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name"); + + CaseExpression caseExpression = CaseExpression.create(When.when(column.isNull(), SQL.literalOf(1))).elseExpression(SQL.literalOf(column)); + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(caseExpression).asc()).build(); + + OrderByClauseVisitor visitor = new OrderByClauseVisitor(new SimpleRenderContext(NamingStrategies.asIs())); + select.visit(visitor); + + assertThat(visitor.getRenderedPart().toString()).isEqualTo("CASE WHEN emp.name IS NULL THEN 1 ELSE emp.name END ASC"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/SelectRendererUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/SelectRendererUnitTests.java new file mode 100644 index 0000000000..4f2121656e --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/SelectRendererUnitTests.java @@ -0,0 +1,834 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.dialect.PostgresDialect; +import org.springframework.data.relational.core.dialect.RenderContextFactory; +import org.springframework.data.relational.core.sql.*; +import org.springframework.util.StringUtils; + +import java.util.List; + +/** + * Unit tests for {@link SqlRenderer}. + * + * @author Mark Paluch + * @author Jens Schauder + * @author Sven Rienstra + */ +class SelectRendererUnitTests { + + @Test // DATAJDBC-309, DATAJDBC-278 + void shouldRenderSingleColumn() { + + Table bar = SQL.table("bar"); + Column foo = bar.column("foo"); + + Select select = Select.builder().select(foo).from(bar).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT bar.foo FROM bar"); + } + + @Test + void honorsNamingStrategy() { + + Table bar = SQL.table("bar"); + Column foo = bar.column("foo"); + + Select select = Select.builder().select(foo).from(bar).build(); + + assertThat(SqlRenderer.create(new SimpleRenderContext(NamingStrategies.toUpper())).render(select)) + .isEqualTo("SELECT BAR.FOO FROM BAR"); + } + + @Test // DATAJDBC-309 + void shouldRenderAliasedColumnAndFrom() { + + Table table = Table.create("bar").as("my_bar"); + + Select select = Select.builder().select(table.column("foo").as("my_foo")).from(table).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT my_bar.foo AS my_foo FROM bar my_bar"); + } + + @Test // DATAJDBC-309 + void shouldRenderMultipleColumnsFromTables() { + + Table table1 = Table.create("table1"); + Table table2 = Table.create("table2"); + + Select select = Select.builder().select(table1.column("col1")).select(table2.column("col2")).from(table1) + .from(table2).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT table1.col1, table2.col2 FROM table1, table2"); + } + + @Test // DATAJDBC-309 + void shouldRenderDistinct() { + + Table table = SQL.table("bar"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().distinct().select(foo, bar).from(table).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT DISTINCT bar.foo, bar.bar FROM bar"); + } + + @Test // DATAJDBC-309 + void shouldRenderCountFunction() { + + Table table = SQL.table("bar"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().select(Functions.count(foo), bar).from(table).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT COUNT(bar.foo), bar.bar FROM bar"); + } + + @Test // DATAJDBC-340 + void shouldRenderCountFunctionWithAliasedColumn() { + + Table table = SQL.table("bar"); + Column foo = table.column("foo").as("foo_bar"); + + Select select = Select.builder().select(Functions.count(foo), foo).from(table).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT COUNT(bar.foo), bar.foo AS foo_bar FROM bar"); + } + + @Test // DATAJDBC-309 + void shouldRenderSimpleJoin() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select select = Select.builder().select(employee.column("id"), department.column("name")).from(employee) // + .join(department).on(employee.column("department_id")).equals(department.column("id")) // + .build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT employee.id, department.name FROM employee " + + "JOIN department ON employee.department_id = department.id"); + } + + @Test // DATAJDBC-340 + void shouldRenderOuterJoin() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select select = Select.builder().select(employee.column("id"), department.column("name")) // + .from(employee) // + .leftOuterJoin(department).on(employee.column("department_id")).equals(department.column("id")) // + .build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT employee.id, department.name FROM employee " + + "LEFT OUTER JOIN department ON employee.department_id = department.id"); + } + + @Test // GH-1421 + void shouldRenderFullOuterJoin() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select select = Select.builder().select(employee.column("id"), department.column("name")) // + .from(employee) // + .join(department, Join.JoinType.FULL_OUTER_JOIN).on(employee.column("department_id")) + .equals(department.column("id")) // + .build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT employee.id, department.name FROM employee " + + "FULL OUTER JOIN department ON employee.department_id = department.id"); + } + + @Test // DATAJDBC-309 + void shouldRenderSimpleJoinWithAnd() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select select = Select.builder().select(employee.column("id"), department.column("name")).from(employee) // + .join(department).on(employee.column("department_id")).equals(department.column("id")) // + .and(employee.column("tenant")).equals(department.column("tenant")) // + .build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT employee.id, department.name FROM employee " // + + "JOIN department ON employee.department_id = department.id " // + + "AND employee.tenant = department.tenant"); + } + + @Test // #995 + void shouldRenderArbitraryJoinCondition() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select select = Select.builder() // + .select(employee.column("id"), department.column("name")) // + .from(employee) // + .join(department) // + .on(Conditions.isEqual(employee.column("department_id"), department.column("id")) // + .or(Conditions.isNotEqual(employee.column("tenant"), department.column("tenant")) // + )).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT employee.id, department.name FROM employee " // + + "JOIN department ON employee.department_id = department.id " // + + "OR employee.tenant != department.tenant"); + } + + @Test // #1009 + void shouldRenderJoinWithJustExpression() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select select = Select.builder().select(employee.column("id"), department.column("name")).from(employee) // + .join(department).on(Expressions.just("alpha")).equals(Expressions.just("beta")) // + .build(); + + assertThat(SqlRenderer.toString(select)) + .isEqualTo("SELECT employee.id, department.name FROM employee " + "JOIN department ON alpha = beta"); + } + + @Test // DATAJDBC-309 + void shouldRenderMultipleJoinWithAnd() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + Table tenant = SQL.table("tenant").as("tenant_base"); + + Select select = Select.builder().select(employee.column("id"), department.column("name")).from(employee) // + .join(department).on(employee.column("department_id")).equals(department.column("id")) // + .and(employee.column("tenant")).equals(department.column("tenant")) // + .join(tenant).on(tenant.column("tenant_id")).equals(department.column("tenant")) // + .build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT employee.id, department.name FROM employee " // + + "JOIN department ON employee.department_id = department.id " // + + "AND employee.tenant = department.tenant " // + + "JOIN tenant tenant_base ON tenant_base.tenant_id = department.tenant"); + } + + @Test // GH-1003 + void shouldRenderJoinWithInlineQuery() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select innerSelect = Select.builder() + .select(employee.column("id"), employee.column("department_Id"), employee.column("name")).from(employee) + .build(); + + InlineQuery one = InlineQuery.create(innerSelect, "one"); + + Select select = Select.builder().select(one.column("id"), department.column("name")).from(department) // + .join(one).on(one.column("department_id")).equals(department.column("id")) // + .build(); + + String sql = SqlRenderer.toString(select); + + assertThat(sql).isEqualTo("SELECT one.id, department.name FROM department " // + + "JOIN (SELECT employee.id, employee.department_Id, employee.name FROM employee) one " // + + "ON one.department_id = department.id"); + } + + @Test // GH-1362 + void shouldRenderNestedJoins() { + + Table merchantCustomers = Table.create("merchants_customers"); + Table customerDetails = Table.create("customer_details"); + + Select innerSelect = Select.builder().select(customerDetails.column("cd_user_id")).from(customerDetails) + .join(merchantCustomers) + .on(merchantCustomers.column("mc_user_id").isEqualTo(customerDetails.column("cd_user_id"))).build(); + + InlineQuery innerTable = InlineQuery.create(innerSelect, "inner"); + + Select select = Select.builder().select(merchantCustomers.asterisk()) // + .from(merchantCustomers) // + .join(innerTable).on(innerTable.column("i_user_id").isEqualTo(merchantCustomers.column("mc_user_id"))) // + .build(); + + String sql = SqlRenderer.toString(select); + + assertThat(sql).isEqualTo("SELECT merchants_customers.* FROM merchants_customers " + // + "JOIN (" + // + "SELECT customer_details.cd_user_id " + // + "FROM customer_details " + // + "JOIN merchants_customers ON merchants_customers.mc_user_id = customer_details.cd_user_id" + // + ") inner " + // + "ON inner.i_user_id = merchants_customers.mc_user_id"); + } + + @Test // GH-1003 + void shouldRenderJoinWithTwoInlineQueries() { + + Table employee = SQL.table("employee"); + Table department = SQL.table("department"); + + Select innerSelectOne = Select.builder() + .select(employee.column("id").as("empId"), employee.column("department_Id"), employee.column("name")) + .from(employee).build(); + Select innerSelectTwo = Select.builder().select(department.column("id"), department.column("name")).from(department) + .build(); + + InlineQuery one = InlineQuery.create(innerSelectOne, "one"); + InlineQuery two = InlineQuery.create(innerSelectTwo, "two"); + + Select select = Select.builder().select(one.column("empId"), two.column("name")).from(one) // + .join(two).on(two.column("department_id")).equals(one.column("empId")) // + .build(); + + String sql = SqlRenderer.toString(select); + assertThat(sql).isEqualTo("SELECT one.empId, two.name FROM (" // + + "SELECT employee.id AS empId, employee.department_Id, employee.name FROM employee) one " // + + "JOIN (SELECT department.id, department.name FROM department) two " // + + "ON two.department_id = one.empId"); + } + + @Test // DATAJDBC-309 + void shouldRenderOrderByName() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(column).asc()).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT emp.name FROM employee emp ORDER BY emp.name ASC"); + } + + @Test // GH-968 + void shouldRenderOrderByAlias() { + + Table employee = SQL.table("employee").as("emp"); + Column column = employee.column("name").as("my_emp_name"); + + Select select = Select.builder().select(column).from(employee).orderBy(OrderByField.from(column).asc()).build(); + + assertThat(SqlRenderer.toString(select)) + .isEqualTo("SELECT emp.name AS my_emp_name FROM employee emp ORDER BY my_emp_name ASC"); + } + + @Test // DATAJDBC-309 + void shouldRenderIsNull() { + + Table table = SQL.table("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().select(bar).from(table).where(Conditions.isNull(bar)).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar IS NULL"); + } + + @Test // DATAJDBC-309 + void shouldRenderNotNull() { + + Table table = SQL.table("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().select(bar).from(table).where(Conditions.isNull(bar).not()).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar IS NOT NULL"); + } + + @Test // DATAJDBC-309 + void shouldRenderEqualityCondition() { + + Table table = SQL.table("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().select(bar).from(table).where(Conditions.isEqual(bar, SQL.bindMarker(":name"))) + .build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar = :name"); + } + + @Test // DATAJDBC-309 + void shouldRendersAndOrConditionWithProperParentheses() { + + Table table = SQL.table("foo"); + Column bar = table.column("bar"); + Column baz = table.column("baz"); + + Select select = Select.builder().select(bar).from(table).where(Conditions.isEqual(bar, SQL.bindMarker(":name")) + .or(Conditions.isEqual(bar, SQL.bindMarker(":name2"))).and(Conditions.isNull(baz))).build(); + + assertThat(SqlRenderer.toString(select)) + .isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar = :name OR foo.bar = :name2 AND foo.baz IS NULL"); + } + + @Test // DATAJDBC-309 + void shouldInWithNamedParameter() { + + Table table = SQL.table("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().select(bar).from(table).where(Conditions.in(bar, SQL.bindMarker(":name"))).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar IN (:name)"); + } + + @Test // DATAJDBC-309 + void shouldInWithNamedParameters() { + + Table table = SQL.table("foo"); + Column bar = table.column("bar"); + + Select select = Select.builder().select(bar).from(table) + .where(Conditions.in(bar, SQL.bindMarker(":name"), SQL.bindMarker(":name2"))).build(); + + assertThat(SqlRenderer.toString(select)).isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar IN (:name, :name2)"); + } + + @Test // DATAJDBC-309 + void shouldRenderInSubselect() { + + Table foo = SQL.table("foo"); + Column bar = foo.column("bar"); + + Table floo = SQL.table("floo"); + Column bah = floo.column("bah"); + + Select subselect = Select.builder().select(bah).from(floo).build(); + + Select select = Select.builder().select(bar).from(foo).where(bar.in(subselect)).build(); + + assertThat(SqlRenderer.toString(select)) + .isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar IN (SELECT floo.bah FROM floo)"); + } + + @Test // GH-1831 + void shouldRenderSimpleFunctionWithSubselect() { + + Table foo = SQL.table("foo"); + + Table floo = SQL.table("floo"); + Column bah = floo.column("bah"); + + + Select subselect = Select.builder().select(bah).from(floo).build(); + + SimpleFunction func = SimpleFunction.create("func", List.of(SubselectExpression.of(subselect))); + + Select select = Select.builder() // + .select(func.as("alias")) // + .from(foo) // + .where(Conditions.isEqual(func, SQL.literalOf(23))) // + .build(); + + assertThat(SqlRenderer.toString(select)) + .isEqualTo("SELECT func(SELECT floo.bah FROM floo) AS alias FROM foo WHERE func(SELECT floo.bah FROM floo) = 23"); + } + + @Test // DATAJDBC-309 + void shouldConsiderNamingStrategy() { + + Table foo = SQL.table("Foo"); + Column bar = foo.column("BaR"); + Column baz = foo.column("BaZ"); + + Select select = Select.builder().select(bar).from(foo).where(bar.isEqualTo(baz)).build(); + + String upper = SqlRenderer.create(new SimpleRenderContext(NamingStrategies.toUpper())).render(select); + assertThat(upper).isEqualTo("SELECT FOO.BAR FROM FOO WHERE FOO.BAR = FOO.BAZ"); + + String lower = SqlRenderer.create(new SimpleRenderContext(NamingStrategies.toLower())).render(select); + assertThat(lower).isEqualTo("SELECT foo.bar FROM foo WHERE foo.bar = foo.baz"); + + String mapped = SqlRenderer.create(new SimpleRenderContext(NamingStrategies.mapWith(StringUtils::uncapitalize))) + .render(select); + assertThat(mapped).isEqualTo("SELECT foo.baR FROM foo WHERE foo.baR = foo.baZ"); + } + + @Test // DATAJDBC-340 + void shouldRenderCountStar() { + + Select select = Select.builder() // + .select(Functions.count(Expressions.asterisk())) // + .from(SQL.table("foo")) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT COUNT(*) FROM foo"); + } + + @Test // DATAJDBC-340 + void shouldRenderCountTableStar() { + + Table foo = SQL.table("foo"); + Select select = Select.builder() // + .select(Functions.count(foo.asterisk())) // + .from(foo) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT COUNT(foo.*) FROM foo"); + } + + @Test // DATAJDBC-340 + void shouldRenderFunctionWithAlias() { + + Table foo = SQL.table("foo"); + Select select = Select.builder() // + .select(Functions.count(foo.asterisk()).as("counter")) // + .from(foo) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT COUNT(foo.*) AS counter FROM foo"); + } + + @Test // DATAJDBC-479 + void shouldRenderWithRenderContext() { + + Table table = Table.create(SqlIdentifier.quoted("my_table")); + Table join_table = Table.create(SqlIdentifier.quoted("join_table")); + Select select = Select.builder() // + .select(Functions.count(table.asterisk()).as("counter"), table.column(SqlIdentifier.quoted("reserved_keyword"))) // + .from(table) // + .join(join_table).on(table.column("source")).equals(join_table.column("target")).build(); + + String rendered = SqlRenderer.create(new RenderContextFactory(PostgresDialect.INSTANCE).createRenderContext()) + .render(select); + + assertThat(rendered).isEqualTo( + "SELECT COUNT(\"my_table\".*) AS counter, \"my_table\".\"reserved_keyword\" FROM \"my_table\" JOIN \"join_table\" ON \"my_table\".source = \"join_table\".target"); + } + + @Test // GH-1034 + void simpleComparisonWithStringArguments() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name"), table_user.column("age")).from(table_user) + .where(Comparison.create("age", ">", 20)).build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.name, User.age FROM User WHERE age > 20"); + } + + @Test // GH-1034 + void simpleComparison() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name"), table_user.column("age")).from(table_user) + .where(Comparison.create(table_user.column("age"), ">", SQL.literalOf(20))).build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.name, User.age FROM User WHERE User.age > 20"); + } + + @Test // GH-1066 + void shouldRenderCast() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(Expressions.cast(table_user.column("name"), "VARCHAR2")).from(table_user) + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT CAST(User.name AS VARCHAR2) FROM User"); + } + + @Test // GH-1076 + void rendersLimitAndOffset() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name")).from(table_user).limitOffset(10, 5).build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.name FROM User OFFSET 5 ROWS FETCH FIRST 10 ROWS ONLY"); + } + + @Test // GH-1076 + void rendersLimit() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name")).from(table_user) // + .limit(3) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.name FROM User FETCH FIRST 3 ROWS ONLY"); + } + + @Test // GH-1076 + void rendersLock() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name")).from(table_user) // + .lock(LockMode.PESSIMISTIC_READ) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.name FROM User FOR UPDATE"); + } + + @Test // GH-1076 + void rendersLockAndOffset() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name")).from(table_user).offset(3) // + .lock(LockMode.PESSIMISTIC_WRITE) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.name FROM User FOR UPDATE OFFSET 3 ROWS"); + } + + @Test // GH-1076 + void rendersLockAndOffsetUsingDialect() { + + Table table_user = SQL.table("User"); + Select select = StatementBuilder.select(table_user.column("name")).from(table_user).limitOffset(3, 6) // + .lock(LockMode.PESSIMISTIC_WRITE) // + .build(); + + String rendered = SqlRenderer.create(new RenderContextFactory(PostgresDialect.INSTANCE).createRenderContext()) + .render(select); + assertThat(rendered).isEqualTo("SELECT User.name FROM User LIMIT 3 OFFSET 6 FOR UPDATE OF User"); + } + + @Test // GH-1007 + void shouldRenderConditionAsExpression() { + + Table table = SQL.table("User"); + Select select = StatementBuilder.select( // + Conditions.isGreater(table.column("age"), SQL.literalOf(18))) // + .from(table) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT User.age > 18 FROM User"); + } + + @Test // GH-968 + void rendersFullyQualifiedNamesInOrderBy() { + + Table tableA = SQL.table("tableA"); + Column tableAName = tableA.column("name"); + Column tableAId = tableA.column("id"); + + Table tableB = SQL.table("tableB"); + Column tableBId = tableB.column("id"); + Column tableBName = tableB.column("name"); + + Select select = StatementBuilder.select(Expressions.asterisk()) // + .from(tableA) // + .join(tableB).on(tableAId.isEqualTo(tableBId)) // + .orderBy(tableAName, tableBName) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered) + .isEqualTo("SELECT * FROM tableA JOIN tableB ON tableA.id = tableB.id ORDER BY tableA.name, tableB.name"); + } + + @Test // GH-1446 + void rendersAliasedExpression() { + + Table table = SQL.table("table"); + Column tableName = table.column("name"); + + Select select = StatementBuilder.select(new AliasedExpression(tableName, "alias")) // + .from(table) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT table.name AS alias FROM table"); + } + + @Test // GH-1653 + void notOfNested() { + + Table table = SQL.table("atable"); + + Select select = StatementBuilder.select(table.asterisk()).from(table).where(Conditions.nest( + table.column("id").isEqualTo(Expressions.just("1")).and(table.column("id").isEqualTo(Expressions.just("2")))) + .not()).build(); + String sql = SqlRenderer.toString(select); + + assertThat(sql).isEqualTo("SELECT atable.* FROM atable WHERE NOT (atable.id = 1 AND atable.id = 2)"); + + select = StatementBuilder.select(table.asterisk()).from(table).where(Conditions.not(Conditions.nest( + table.column("id").isEqualTo(Expressions.just("1")).and(table.column("id").isEqualTo(Expressions.just("2")))))) + .build(); + sql = SqlRenderer.toString(select); + + assertThat(sql).isEqualTo("SELECT atable.* FROM atable WHERE NOT (atable.id = 1 AND atable.id = 2)"); + } + + @Test // GH-1945 + void notOfTrue() { + + Select selectFalse = Select.builder().select(Expressions.just("*")).from("test_table") + .where(Conditions.just("true").not()).build(); + String renderSelectFalse = SqlRenderer.create().render(selectFalse); + + assertThat(renderSelectFalse).isEqualTo("SELECT * FROM test_table WHERE NOT true"); + } + + @Test // GH-1945 + void notOfNestedTrue() { + + Select selectFalseNested = Select.builder().select(Expressions.just("*")).from("test_table") + .where(Conditions.nest(Conditions.just("true")).not()).build(); + String renderSelectFalseNested = SqlRenderer.create().render(selectFalseNested); + + assertThat(renderSelectFalseNested).isEqualTo("SELECT * FROM test_table WHERE NOT (true)"); + } + + @Test // GH-1651 + void asteriskOfAliasedTableUsesAlias() { + + Table employee = SQL.table("employee").as("e"); + Select select = Select.builder().select(employee.asterisk()).select(employee.column("id")).from(employee).build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT e.*, e.id FROM employee e"); + } + + @Test + void rendersCaseExpression() { + + Table table = SQL.table("table"); + Column column = table.column("name"); + + CaseExpression caseExpression = CaseExpression.create(When.when(column.isNull(), SQL.literalOf(1))) // + .when(When.when(column.isNotNull(), column)) // + .elseExpression(SQL.literalOf(3)); + + Select select = StatementBuilder.select(caseExpression) // + .from(table) // + .build(); + + String rendered = SqlRenderer.toString(select); + assertThat(rendered).isEqualTo("SELECT CASE WHEN table.name IS NULL THEN 1 WHEN table.name IS NOT NULL THEN table.name ELSE 3 END FROM table"); + } + + /** + * Tests the rendering of analytic functions. + */ + @Nested + class AnalyticFunctionsTests { + + Table employee = SQL.table("employee"); + Column department = employee.column("department"); + Column age = employee.column("age"); + Column salary = employee.column("salary"); + + @Test // GH-1019 + void renderEmptyOver() { + + Select select = StatementBuilder.select( // + AnalyticFunction.create("MAX", salary) // + ) // + .from(employee) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT MAX(employee.salary) OVER() FROM employee"); + } + + @Test // GH-1019 + void renderPartition() { + + Select select = StatementBuilder.select( // + AnalyticFunction.create("MAX", salary) // + .partitionBy(department) // + ) // + .from(employee) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered) + .isEqualTo("SELECT MAX(employee.salary) OVER(PARTITION BY employee.department) FROM employee"); + } + + @Test // GH-1019 + void renderOrderBy() { + + Select select = StatementBuilder.select( // + AnalyticFunction.create("MAX", salary) // + .orderBy(age) // + ) // + .from(employee) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT MAX(employee.salary) OVER(ORDER BY employee.age) FROM employee"); + } + + @Test // GH-1019 + void renderFullAnalyticFunction() { + + final Select select = StatementBuilder.select( // + AnalyticFunction.create("MAX", salary) // + .partitionBy(department) // + .orderBy(age) // + ) // + .from(employee) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo( + "SELECT MAX(employee.salary) OVER(PARTITION BY employee.department ORDER BY employee.age) FROM employee"); + } + + @Test // GH-1019 + void renderAnalyticFunctionWithAlias() { + + final Select select = StatementBuilder.select( // + AnalyticFunction.create("MAX", salary) // + .partitionBy(department) // + .orderBy(age) // + .as("MAX_SELECT")) // + .from(employee) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo( + "SELECT MAX(employee.salary) OVER(PARTITION BY employee.department ORDER BY employee.age) AS MAX_SELECT FROM employee"); + } + + @Test // GH-1153 + void renderAnalyticFunctionWithOutArgument() { + + final Select select = StatementBuilder.select( // + AnalyticFunction.create("ROW_NUMBER") // + .partitionBy(department)) // + .from(employee) // + .build(); + + String rendered = SqlRenderer.toString(select); + + assertThat(rendered).isEqualTo("SELECT ROW_NUMBER() OVER(PARTITION BY employee.department) FROM employee"); + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/TypedSubtreeVisitorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/TypedSubtreeVisitorUnitTests.java new file mode 100644 index 0000000000..e8f18caaa0 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/TypedSubtreeVisitorUnitTests.java @@ -0,0 +1,230 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.relational.core.sql.render.DelegatingVisitor.Delegation.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; + +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.sql.AbstractTestSegment; +import org.springframework.data.relational.core.sql.Segment; +import org.springframework.data.relational.core.sql.Visitable; + +/** + * Unit tests for {@link org.springframework.data.relational.core.sql.render.TypedSubtreeVisitor}. + * + * @author Jens Schauder + */ +class TypedSubtreeVisitorUnitTests { + + List events = new ArrayList<>(); + + @Test // GH-1003 + void enterAndLeavesSingleSegment() { + + TypedSubtreeVisitor visitor = new LoggingTypedSubtreeVisitor(); + TestSegment root = new TestSegment("root"); + + root.visit(visitor); + + assertThat(events).containsExactly("enter matched root", "leave matched root"); + } + + @Test // GH-1003 + void enterAndLeavesChainOfMatchingSegmentsAsNested() { + + TypedSubtreeVisitor visitor = new LoggingTypedSubtreeVisitor(); + TestSegment root = new TestSegment("root", new TestSegment("level 1", new TestSegment("level 2"))); + + root.visit(visitor); + + assertThat(events).containsExactly("enter matched root", "enter nested level 1", "enter nested level 2", + "leave nested level 2", "leave nested level 1", "leave matched root"); + } + + @Test // GH-1003 + void enterAndLeavesMatchingChildrenAsNested() { + + TypedSubtreeVisitor visitor = new LoggingTypedSubtreeVisitor(); + TestSegment root = new TestSegment("root", new TestSegment("child 1"), new TestSegment("child 2")); + + root.visit(visitor); + + assertThat(events).containsExactly("enter matched root", "enter nested child 1", "leave nested child 1", + "enter nested child 2", "leave nested child 2", "leave matched root"); + } + + @Test // GH-1003 + void enterAndLeavesChainOfOtherSegmentsAsNested() { + + TypedSubtreeVisitor visitor = new LoggingTypedSubtreeVisitor(); + TestSegment root = new TestSegment("root", new OtherSegment("level 1", new OtherSegment("level 2"))); + + root.visit(visitor); + + assertThat(events).containsExactly("enter matched root", "enter nested level 1", "enter nested level 2", + "leave nested level 2", "leave nested level 1", "leave matched root"); + } + + @Test // GH-1003 + void enterAndLeavesOtherChildrenAsNested() { + + TypedSubtreeVisitor visitor = new LoggingTypedSubtreeVisitor(); + TestSegment root = new TestSegment("root", new OtherSegment("child 1"), new OtherSegment("child 2")); + + root.visit(visitor); + + assertThat(events).containsExactly("enter matched root", "enter nested child 1", "leave nested child 1", + "enter nested child 2", "leave nested child 2", "leave matched root"); + } + + @Test // GH-1003 + void visitorIsReentrant() { + + LoggingTypedSubtreeVisitor visitor = new LoggingTypedSubtreeVisitor(); + TestSegment root1 = new TestSegment("root 1"); + TestSegment root2 = new TestSegment("root 2"); + + root1.visit(visitor); + root2.visit(visitor); + + assertThat(events).containsExactly("enter matched root 1", "leave matched root 1", "enter matched root 2", + "leave matched root 2"); + } + + @Test // GH-1003 + void delegateToOtherVisitorOnEnterMatchedRevisitsTheSegment() { + + LoggingTypedSubtreeVisitor first = new LoggingTypedSubtreeVisitor("first "); + LoggingTypedSubtreeVisitor second = new LoggingTypedSubtreeVisitor("second "); + first.enterMatched(s -> delegateTo(second)); + TestSegment root = new TestSegment("root", new TestSegment("child 1"), new TestSegment("child 2")); + + root.visit(first); + + assertThat(events).containsExactly("first enter matched root", "second enter matched root", + "second enter nested child 1", "second leave nested child 1", "second enter nested child 2", + "second leave nested child 2", "second leave matched root", "first leave matched root"); + } + + @Test // GH-1003 + void delegateToOtherVisitorOnEnterNestedRevisitsTheNestedSegment() { + + LoggingTypedSubtreeVisitor first = new LoggingTypedSubtreeVisitor("first "); + LoggingTypedSubtreeVisitor second = new LoggingTypedSubtreeVisitor("second "); + first.enterNested( + s -> ((TestSegment) s).name.equals("child 2") ? delegateTo(second) : DelegatingVisitor.Delegation.retain()); + TestSegment root = new TestSegment("root", new TestSegment("child 1"), new TestSegment("child 2"), + new TestSegment("child 3")); + + root.visit(first); + + assertThat(events).containsExactly("first enter matched root", "first enter nested child 1", + "first leave nested child 1", "first enter nested child 2", "second enter matched child 2", + "second leave matched child 2", "first leave nested child 2", "first enter nested child 3", + "first leave nested child 3", "first leave matched root"); + } + + static class TestSegment extends AbstractTestSegment { + + private String name; + + TestSegment(String name, Segment... children) { + + super(children); + this.name = name; + } + + @Override + public String toString() { + return name; + } + } + + static class OtherSegment extends AbstractTestSegment { + + private String name; + + public OtherSegment(String name, Segment... children) { + + super(children); + this.name = name; + } + + @Override + public String toString() { + return name; + } + } + + class LoggingTypedSubtreeVisitor extends TypedSubtreeVisitor { + + String prefix; + Function enterMatchedDelegation; + Function enterNestedDelegation; + + LoggingTypedSubtreeVisitor(String prefix) { + this.prefix = prefix; + } + + LoggingTypedSubtreeVisitor() { + this(""); + } + + @Override + Delegation enterMatched(TestSegment segment) { + + events.add(prefix + "enter matched " + segment); + Delegation delegation = super.enterMatched(segment); + + return enterMatchedDelegation == null ? delegation : enterMatchedDelegation.apply(segment); + } + + void enterMatched(Function delegation) { + enterMatchedDelegation = delegation; + } + + @Override + Delegation leaveMatched(TestSegment segment) { + + events.add(prefix + "leave matched " + segment); + return super.leaveMatched(segment); + } + + @Override + Delegation enterNested(Visitable segment) { + + events.add(prefix + "enter nested " + segment); + return enterNestedDelegation == null ? super.enterNested(segment) : enterNestedDelegation.apply(segment); + } + + void enterNested(Function delegation) { + enterNestedDelegation = delegation; + } + + @Override + Delegation leaveNested(Visitable segment) { + + events.add(prefix + "leave nested " + segment); + return super.leaveNested(segment); + } + + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/UpdateRendererUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/UpdateRendererUnitTests.java new file mode 100644 index 0000000000..ea5cfe0c1e --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/render/UpdateRendererUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sql.render; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.StatementBuilder; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.relational.core.sql.Update; + +/** + * Unit tests for {@link SqlRenderer}. + * + * @author Mark Paluch + */ +public class UpdateRendererUnitTests { + + @Test // DATAJDBC-335 + public void shouldRenderSimpleUpdate() { + + Table table = SQL.table("mytable"); + Column column = table.column("foo"); + + Update update = StatementBuilder.update(table).set(column.set(SQL.bindMarker())).build(); + + assertThat(SqlRenderer.toString(update)).isEqualTo("UPDATE mytable SET foo = ?"); + } + + @Test // DATAJDBC-335 + public void shouldRenderMultipleColumnUpdate() { + + Table table = SQL.table("mytable"); + Column foo = table.column("foo"); + Column bar = table.column("bar"); + + Update update = StatementBuilder.update(table) // + .set(foo.set(SQL.bindMarker()), bar.set(SQL.bindMarker())) // + .build(); + + assertThat(SqlRenderer.toString(update)).isEqualTo("UPDATE mytable SET foo = ?, bar = ?"); + } + + @Test // DATAJDBC-335 + public void shouldRenderUpdateWithLiteral() { + + Table table = SQL.table("mytable"); + Column column = table.column("foo"); + + Update update = StatementBuilder.update(table).set(column.set(SQL.literalOf(20))).build(); + + assertThat(SqlRenderer.toString(update)).isEqualTo("UPDATE mytable SET foo = 20"); + } + + @Test // DATAJDBC-335 + public void shouldCreateUpdateWIthCondition() { + + Table table = SQL.table("mytable"); + Column column = table.column("foo"); + + Update update = StatementBuilder.update(table).set(column.set(SQL.bindMarker())).where(column.isNull()).build(); + + assertThat(SqlRenderer.toString(update)).isEqualTo("UPDATE mytable SET foo = ? WHERE mytable.foo IS NULL"); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AliasFactoryUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AliasFactoryUnitTests.java new file mode 100644 index 0000000000..7ec6678f8b --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AliasFactoryUnitTests.java @@ -0,0 +1,164 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; + +/** + * Unit tests for the {@link AliasFactory}. + * @author Jens Schauder + */ +class AliasFactoryUnitTests { + + RelationalMappingContext context = new RelationalMappingContext(); + AliasFactory aliasFactory = new AliasFactory(); + + @Nested + class SimpleAlias { + @Test // GH-1446 + void aliasForRoot() { + + String alias = aliasFactory + .getColumnAlias(context.getAggregatePath(context.getRequiredPersistentEntity(DummyEntity.class))); + + assertThat(alias).isEqualTo("c_dummy_entity_1"); + } + + @Test // GH-1446 + void aliasSimpleProperty() { + + String alias = aliasFactory + .getColumnAlias(context.getAggregatePath(context.getPersistentPropertyPath("name", DummyEntity.class))); + + assertThat(alias).isEqualTo("c_name_1"); + } + + @Test // GH-1446 + void nameGetsSanitized() { + + String alias = aliasFactory.getColumnAlias( + context.getAggregatePath( context.getPersistentPropertyPath("evil", DummyEntity.class))); + + assertThat(alias).isEqualTo("c_ameannamecontains3illegal_characters_1"); + } + + @Test // GH-1446 + void aliasIsStable() { + + String alias1 = aliasFactory.getColumnAlias( + context.getAggregatePath( context.getRequiredPersistentEntity(DummyEntity.class))); + String alias2 = aliasFactory.getColumnAlias( + context.getAggregatePath( context.getRequiredPersistentEntity(DummyEntity.class))); + + assertThat(alias1).isEqualTo(alias2); + } + } + + @Nested + class RnAlias { + + @Test // GH-1446 + void aliasIsStable() { + + String alias1 = aliasFactory.getRowNumberAlias( + context.getAggregatePath(context.getRequiredPersistentEntity(DummyEntity.class))); + String alias2 = aliasFactory.getRowNumberAlias( + context.getAggregatePath( context.getRequiredPersistentEntity(DummyEntity.class))); + + assertThat(alias1).isEqualTo(alias2); + } + + @Test // GH-1446 + void aliasProjectsOnTableReferencingPath() { + + String alias1 = aliasFactory.getRowNumberAlias( + context.getAggregatePath(context.getRequiredPersistentEntity(DummyEntity.class))); + + String alias2 = aliasFactory.getRowNumberAlias( + context.getAggregatePath(context.getPersistentPropertyPath("evil", DummyEntity.class))); + + assertThat(alias1).isEqualTo(alias2); + } + + @Test // GH-1446 + void rnAliasIsIndependentOfTableAlias() { + + String alias1 = aliasFactory.getRowNumberAlias( + context.getAggregatePath(context.getRequiredPersistentEntity(DummyEntity.class))); + String alias2 = aliasFactory.getColumnAlias( + context.getAggregatePath(context.getRequiredPersistentEntity(DummyEntity.class))); + + assertThat(alias1).isNotEqualTo(alias2); + } + + } + + @Nested + class BackReferenceAlias { + @Test // GH-1446 + void testBackReferenceAlias() { + + String alias = aliasFactory.getBackReferenceAlias( + context.getAggregatePath(context.getPersistentPropertyPath("dummy", Reference.class))); + + assertThat(alias).isEqualTo("br_dummy_entity_1"); + } + } + + @Nested + class KeyAlias { + @Test // GH-1446 + void testKeyAlias() { + + String alias = aliasFactory.getKeyAlias( + context.getAggregatePath(context.getPersistentPropertyPath("dummy", Reference.class))); + + assertThat(alias).isEqualTo("key_dummy_entity_1"); + } + } + + @Nested + class TableAlias { + @Test // GH-1448 + void tableAliasIsDifferentForDifferentPathsToSameEntity() { + + String alias = aliasFactory.getTableAlias( + context.getAggregatePath(context.getPersistentPropertyPath("dummy", Reference.class))); + + String alias2 = aliasFactory.getTableAlias( + context.getAggregatePath(context.getPersistentPropertyPath("dummy2", Reference.class))); + + assertThat(alias).isNotEqualTo(alias2); + } + } + + static class DummyEntity { + String name; + + @Column("a mean name <-- contains > 3 illegal_characters.") String evil; + } + + static class Reference { + DummyEntity dummy; + DummyEntity dummy2; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AliasedPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AliasedPattern.java new file mode 100644 index 0000000000..25db21d474 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AliasedPattern.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.statement.select.SelectItem; + +/** + * Matches an expression with an alias. + * + * @param pattern for the expression to match + * @param alias to match + * @author Jens Schauder + */ +record AliasedPattern(SelectItemPattern pattern, String alias) implements SelectItemPattern { + + @Override + public boolean matches(SelectItem selectItem) { + return pattern.matches(selectItem) && selectItem.getAlias() != null + && selectItem.getAlias().getName().equals(alias); + } + + @Override + public String toString() { + return pattern + " as " + alias; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AnalyticFunctionPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AnalyticFunctionPattern.java new file mode 100644 index 0000000000..2503d82722 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/AnalyticFunctionPattern.java @@ -0,0 +1,69 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.expression.AnalyticExpression; +import net.sf.jsqlparser.expression.Expression; +import net.sf.jsqlparser.statement.select.SelectItem; + +import java.util.List; + +/** + * Pattern matching analytic functions + * + * @author Jens Schauder + */ +public class AnalyticFunctionPattern extends TypedExpressionPattern { + + private final ExpressionPattern partitionBy; + private String functionName; + + public AnalyticFunctionPattern(String rowNumber, ExpressionPattern partitionBy) { + + super(AnalyticExpression.class); + + this.functionName = rowNumber; + this.partitionBy = partitionBy; + } + + @Override + public boolean matches(SelectItem selectItem) { + + Expression expression = selectItem.getExpression(); + if (expression instanceof AnalyticExpression analyticExpression) { + return matches(analyticExpression); + } + + return false; + } + + @Override + boolean matches(AnalyticExpression analyticExpression) { + return analyticExpression.getName().toLowerCase().equals(functionName) && partitionByMatches(analyticExpression); + } + + private boolean partitionByMatches(AnalyticExpression analyticExpression) { + + List expressions = analyticExpression.getPartitionExpressionList(); + return expressions != null && expressions.size() == 1 && partitionBy.matches(expressions.get(0)); + } + + @Override + public String toString() { + return "row_number() OVER (PARTITION BY " + partitionBy + ')'; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/ColumnPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/ColumnPattern.java new file mode 100644 index 0000000000..96a19d04d1 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/ColumnPattern.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.schema.Column; + +import java.util.Objects; + +/** + * Pattern matching just a simple column + * + * @author Jens Schauder + */ +class ColumnPattern extends TypedExpressionPattern { + private final String columnName; + + /** + * @param columnName name of the expected column. + */ + ColumnPattern(String columnName) { + + super(Column.class); + + this.columnName = columnName; + } + + @Override + public boolean matches(Column actualColumn) { + return actualColumn.getColumnName().equals(columnName); + } + + @Override + public String toString() { + return columnName; + } + + public String columnName() { + return columnName; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) + return true; + if (obj == null || obj.getClass() != this.getClass()) + return false; + var that = (ColumnPattern) obj; + return Objects.equals(this.columnName, that.columnName); + } + + @Override + public int hashCode() { + return Objects.hash(columnName); + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/ExpressionPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/ExpressionPattern.java new file mode 100644 index 0000000000..4b6b35bbaf --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/ExpressionPattern.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.expression.Expression; + +/** + * A pattern that matches various SQL expressions. + * + * @author Jens Schauder + */ +public interface ExpressionPattern { + boolean matches(Expression expression); +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/FunctionPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/FunctionPattern.java new file mode 100644 index 0000000000..3e0dfdce78 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/FunctionPattern.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.expression.Expression; +import net.sf.jsqlparser.expression.Function; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * A pattern matching a function call. + * + * @author Jens Schauder + */ +public final class FunctionPattern extends TypedExpressionPattern { + private final String name; + private final List params; + + /** + * @param name name of the function. + * @param params patterns to match the function arguments. + */ + public FunctionPattern(String name, List params) { + + super(Function.class); + + this.name = name; + this.params = params; + } + + FunctionPattern(String name, ExpressionPattern... params) { + this(name, Arrays.asList(params)); + } + + + @Override + public boolean matches(Function function) { + + if (function.getName().equalsIgnoreCase(name)) { + List expressions = new ArrayList<>(function.getParameters().getExpressions()); + for (ExpressionPattern param : params) { + boolean found = false; + for (Expression exp : expressions) { + if (param.matches(exp)) { + expressions.remove(exp); + found = true; + break; + } + } + if (!found) { + return false; + } + } + + return expressions.isEmpty(); + } + return false; + } + + @Override + public String toString() { + return name + "(" + params.stream().map(Object::toString).collect(Collectors.joining(", ")) + ")"; + } + + public String name() { + return name; + } + + public List params() { + return params; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) return true; + if (obj == null || obj.getClass() != this.getClass()) return false; + var that = (FunctionPattern) obj; + return Objects.equals(this.name, that.name) && + Objects.equals(this.params, that.params); + } + + @Override + public int hashCode() { + return Objects.hash(name, params); + } + +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/JoinAssert.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/JoinAssert.java new file mode 100644 index 0000000000..49a6279a76 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/JoinAssert.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.expression.Expression; +import net.sf.jsqlparser.statement.select.Join; + +import java.util.Collection; + +import org.assertj.core.api.AbstractAssert; + +/** + * AspectJ {@link org.assertj.core.api.Assert} for writing assertions about joins in SQL statements. + * + * @author Jens Schauder + */ +public class JoinAssert extends AbstractAssert { + public JoinAssert(Join join) { + super(join, JoinAssert.class); + } + + JoinAssert on(String left, String right) { + + Collection onExpressions = actual.getOnExpressions(); + + if (!(onExpressions.iterator().next().toString().equals(left + " = " + right))) { + throw failureWithActualExpected(actual, left + " = " + right, + "actual join condition %s does not match expected %s = %s", actual, left, right); + } + return this; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/LiteralPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/LiteralPattern.java new file mode 100644 index 0000000000..801642e3f1 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/LiteralPattern.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.expression.Expression; +import net.sf.jsqlparser.statement.select.SelectItem; + +/** + * Pattern matching a literal expression in a SQL statement. + * + * @param value the value of the expression + * @author Jens Schauder + */ +record LiteralPattern(Object value) implements SelectItemPattern, ExpressionPattern { + + @Override + public boolean matches(SelectItem selectItem) { + return matches(selectItem.getExpression()); + } + + @Override + public boolean matches(Expression expression) { + return expression.toString().equals(String.valueOf(value)); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SelectItemPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SelectItemPattern.java new file mode 100644 index 0000000000..3660725d9d --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SelectItemPattern.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.statement.select.SelectItem; + +/** + * A pattern matching a simple column. + * @author Jens Schauder + */ +interface SelectItemPattern { + default AliasedPattern as(String alias) { + return new AliasedPattern(this, alias); + } + + boolean matches(SelectItem selectItem); +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SingleQuerySqlGeneratorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SingleQuerySqlGeneratorUnitTests.java new file mode 100644 index 0000000000..666c1fd82b --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SingleQuerySqlGeneratorUnitTests.java @@ -0,0 +1,258 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.sqlgeneration; + +import static org.springframework.data.relational.core.sqlgeneration.SqlAssert.*; + +import java.util.List; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.relational.core.dialect.Dialect; +import org.springframework.data.relational.core.dialect.PostgresDialect; +import org.springframework.data.relational.core.mapping.AggregatePath; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.Conditions; +import org.springframework.data.relational.core.sql.Table; + +/** + * Tests for {@link SingleQuerySqlGenerator}. + * + * @author Jens Schauder + */ +class SingleQuerySqlGeneratorUnitTests { + + RelationalMappingContext context = new RelationalMappingContext(); + Dialect dialect = createDialect(); + + @Nested + class TrivialAggregateWithoutReferences extends AbstractTestFixture { + + TrivialAggregateWithoutReferences() { + super(TrivialAggregate.class); + } + + @Test // GH-1446 + void createSelectForFindAll() { + + String sql = sqlGenerator.findAll(persistentEntity); + + SqlAssert fullSelect = assertThatParsed(sql); + fullSelect.extractOrderBy().isEqualTo(alias("id") + ", rn"); + + SqlAssert baseSelect = fullSelect.hasInlineView(); + + baseSelect // + .hasExactlyColumns( // + col(rnAlias()).as("rn"), // + col(rnAlias()), // + col(alias("id")), // + col(alias("name")) // + ) // + .hasInlineViewSelectingFrom("\"trivial_aggregate\"") // + .hasExactlyColumns( // + lit(1).as(rnAlias()), // + lit(1).as(rcAlias()), // + col("\"id\"").as(alias("id")), // + col("\"name\"").as(alias("name")) // + ); + } + + @Test // GH-1446 + void createSelectForFindById() { + + Table table = Table.create(persistentEntity.getQualifiedTableName()); + String sql = sqlGenerator.findAll(persistentEntity, table.column("id").isEqualTo(Conditions.just(":id"))); + + SqlAssert baseSelect = assertThatParsed(sql).hasInlineView(); + + baseSelect // + .hasExactlyColumns( // + col(rnAlias()).as("rn"), // + col(rnAlias()), // + col(alias("id")), // + col(alias("name")) // + ) // + .hasInlineViewSelectingFrom("\"trivial_aggregate\"") // + .hasExactlyColumns( // + lit(1).as(rnAlias()), // + lit(1).as(rcAlias()), // + col("\"id\"").as(alias("id")), // + col("\"name\"").as(alias("name")) // + ) // + .extractWhereClause().isEqualTo("\"trivial_aggregate\".id = :id"); + } + + @Test // GH-1446 + void createSelectForFindAllById() { + + Table table = Table.create(persistentEntity.getQualifiedTableName()); + String sql = sqlGenerator.findAll(persistentEntity, table.column("id").in(Conditions.just(":ids"))); + + SqlAssert baseSelect = assertThatParsed(sql).hasInlineView(); + + baseSelect // + .hasExactlyColumns( // + col(rnAlias()).as("rn"), // + col(rnAlias()), // + col(alias("id")), // + col(alias("name")) // + ) // + .hasInlineViewSelectingFrom("\"trivial_aggregate\"") // + .hasExactlyColumns( // + lit(1).as(rnAlias()), // + lit(1).as(rcAlias()), // + col("\"id\"").as(alias("id")), // + col("\"name\"").as(alias("name")) // + ) // + .extractWhereClause().isEqualTo("\"trivial_aggregate\".id IN (:ids)"); + } + + } + + @Nested + class AggregateWithSingleReference extends AbstractTestFixture { + + private AggregateWithSingleReference() { + super(SingleReferenceAggregate.class); + } + + @Test // GH-1446 + void createSelectForFindById() { + + Table table = Table.create(persistentEntity.getQualifiedTableName()); + String sql = sqlGenerator.findAll(persistentEntity, table.column("id").isEqualTo(Conditions.just(":id"))); + + String rootRowNumber = rnAlias(); + String rootCount = rcAlias(); + String trivialsRowNumber = rnAlias("trivials"); + String backref = backRefAlias("trivials"); + String keyAlias = keyAlias("trivials"); + + SqlAssert baseSelect = assertThatParsed(sql).hasInlineView(); + + baseSelect // + .hasExactlyColumns( // + + col(rootRowNumber), // + col(alias("id")), // + col(alias("name")), // + col(trivialsRowNumber), // + col(alias("trivials.id")), // + col(alias("trivials.name")), // + func("greatest", func("coalesce", col(rootRowNumber), lit(1)), + func("coalesce", col(trivialsRowNumber), lit(1))), // + col(backref), // + col(keyAlias) // + ).extractWhereClause() // + .isEqualTo(""); + baseSelect.hasInlineViewSelectingFrom("\"single_reference_aggregate\"") // + .hasExactlyColumns( // + lit(1).as(rnAlias()), lit(1).as(rootCount), // + col("\"id\"").as(alias("id")), // + col("\"name\"").as(alias("name")) // + ) // + .extractWhereClause().isEqualTo("\"single_reference_aggregate\".id = :id"); + baseSelect.hasInlineViewSelectingFrom("\"trivial_aggregate\"") // + .hasExactlyColumns( // + rn(col("\"single_reference_aggregate\"")).as(trivialsRowNumber), // + count(col("\"single_reference_aggregate\"")).as(rcAlias("trivials")), // + col("\"id\"").as(alias("trivials.id")), // + col("\"name\"").as(alias("trivials.name")), // + col("\"single_reference_aggregate\"").as(backref), // + col("\"single_reference_aggregate_key\"").as(keyAlias) // + ).extractWhereClause().isEmpty(); + baseSelect.hasJoin().on(alias("id"), backref); + } + + } + + private AggregatePath path(Class type) { + return context.getAggregatePath(context.getRequiredPersistentEntity(type)); + } + + private AggregatePath path(Class type, String pathAsString) { + PersistentPropertyPath persistentPropertyPath = context + .getPersistentPropertyPath(pathAsString, type); + return context.getAggregatePath(persistentPropertyPath); + } + + private static Dialect createDialect() { + + return PostgresDialect.INSTANCE; + } + + record TrivialAggregate(@Id Long id, String name) { + } + + record SingleReferenceAggregate(@Id Long id, String name, List trivials) { + } + + private class AbstractTestFixture { + final Class aggregateRootType; + final SingleQuerySqlGenerator sqlGenerator; + final RelationalPersistentEntity persistentEntity; + final AliasFactory aliases; + + private AbstractTestFixture(Class aggregateRootType) { + + this.aggregateRootType = aggregateRootType; + this.persistentEntity = context.getRequiredPersistentEntity(aggregateRootType); + this.sqlGenerator = new SingleQuerySqlGenerator(context, new AliasFactory(), dialect); + this.aliases = sqlGenerator.getAliasFactory(); + } + + AggregatePath path() { + return SingleQuerySqlGeneratorUnitTests.this.path(aggregateRootType); + } + + AggregatePath path(String pathAsString) { + return SingleQuerySqlGeneratorUnitTests.this.path(aggregateRootType, pathAsString); + } + + protected String rnAlias() { + return aliases.getRowNumberAlias(path()); + } + + protected String rnAlias(String path) { + return aliases.getRowNumberAlias(path(path)); + } + + protected String rcAlias() { + return aliases.getRowCountAlias(path()); + } + + protected String rcAlias(String path) { + return aliases.getRowCountAlias(path(path)); + } + + protected String alias(String path) { + return aliases.getColumnAlias(path(path)); + } + + protected String backRefAlias(String path) { + return aliases.getBackReferenceAlias(path(path)); + } + + protected String keyAlias(String path) { + return aliases.getKeyAlias(path(path)); + } + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SqlAssert.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SqlAssert.java new file mode 100644 index 0000000000..fe2a34a5a6 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SqlAssert.java @@ -0,0 +1,239 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.JSQLParserException; +import net.sf.jsqlparser.expression.Expression; +import net.sf.jsqlparser.parser.CCJSqlParserUtil; +import net.sf.jsqlparser.schema.Table; +import net.sf.jsqlparser.statement.Statement; +import net.sf.jsqlparser.statement.select.FromItem; +import net.sf.jsqlparser.statement.select.Join; +import net.sf.jsqlparser.statement.select.OrderByElement; +import net.sf.jsqlparser.statement.select.PlainSelect; +import net.sf.jsqlparser.statement.select.Select; +import net.sf.jsqlparser.statement.select.SelectItem; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.assertj.core.api.AbstractAssert; +import org.assertj.core.api.StringAssert; +import org.assertj.core.util.Strings; +import org.junit.jupiter.api.Assertions; + +/** + * AssertJ {@link org.assertj.core.api.Assert} for writing assertions about SQL statements. + * + * @author Jens Schauder + */ +class SqlAssert extends AbstractAssert { + private final PlainSelect actual; + + public SqlAssert(PlainSelect actual) { + super(actual, SqlAssert.class); + + this.actual = actual; + } + + static SqlAssert assertThatParsed(String actualSql) { + + try { + Statement parsed = CCJSqlParserUtil.parse(actualSql); + return new SqlAssert((PlainSelect) ((Select) parsed).getSelectBody()); + } catch (JSQLParserException e) { + Assertions.fail("Couldn't parse '%s'".formatted(actualSql), e); + } + + throw new IllegalStateException("This should be unreachable"); + } + + static LiteralPattern lit(Object value) { + return new LiteralPattern(value); + } + + static ColumnPattern col(String columnName) { + return new ColumnPattern(columnName); + } + + static AnalyticFunctionPattern rn(ExpressionPattern partitionBy) { + return new AnalyticFunctionPattern("row_number", partitionBy); + } + + static AnalyticFunctionPattern count(ExpressionPattern partitionBy) { + return new AnalyticFunctionPattern("count", partitionBy); + } + + static FunctionPattern func(String name, ExpressionPattern... params) { + return new FunctionPattern(name, params); + } + + static FunctionPattern func(String name, String... params) { + return new FunctionPattern(name, Arrays.stream(params).map(p -> col(p)).collect(Collectors.toList())); + } + + SqlAssert hasExactlyColumns(String... columns) { + + SelectItemPattern[] patterns = new SelectItemPattern[columns.length]; + + for (int i = 0; i < columns.length; i++) { + patterns[i] = col(columns[i]); + } + + return hasExactlyColumns(patterns); + } + + SqlAssert hasExactlyColumns(SelectItemPattern... columns) { + + List> actualSelectItems = actual.getSelectItems(); + List unmatchedPatterns = new ArrayList<>(Arrays.asList(columns)); + List unmatchedSelectItems = new ArrayList<>(); + + for (SelectItem selectItem : actualSelectItems) { + + SelectItemPattern matchedPattern = null; + for (SelectItemPattern column : unmatchedPatterns) { + if (column.matches(selectItem)) { + matchedPattern = column; + break; + } + } + + if (matchedPattern != null) { + unmatchedPatterns.remove(matchedPattern); + } else { + unmatchedSelectItems.add(selectItem); + } + } + + if (unmatchedPatterns.isEmpty() && unmatchedSelectItems.isEmpty()) { + return this; + } + + String preparedExpectedColumns = prepare(columns); + + if (unmatchedPatterns.isEmpty()) { + throw failureWithActualExpected(actual, preparedExpectedColumns, """ + Expected + %s + to select the columns + %s + but + %s + were not expected + """, actual, preparedExpectedColumns, unmatchedSelectItems); + } + if (unmatchedSelectItems.isEmpty()) { + throw failureWithActualExpected(actual, preparedExpectedColumns, """ + Expected + %s + to select the columns + %s + but + %s + were not present + """, actual, preparedExpectedColumns, unmatchedPatterns); + } + throw failureWithActualExpected(actual, preparedExpectedColumns, """ + Expected + %s + to select the columns + %s + but + %s + were not present and + %s + were not expected""", actual, preparedExpectedColumns, unmatchedPatterns, unmatchedSelectItems); + } + + public StringAssert extractWhereClause() { + Expression where = actual.getWhere(); + return new StringAssert(where == null ? "" : where.toString()); + } + + public JoinAssert hasJoin() { + List joins = actual.getJoins(); + + if (joins == null || joins.size() < 1) { + throw failureWithActualExpected(actual, "select with a join", "Expected %s to contain a join but it doesn't.", + actual); + } + + return new JoinAssert(joins.get(0)); + } + + private String prepare(SelectItemPattern[] columns) { + return Arrays.toString(columns); + } + + SqlAssert hasInlineViewSelectingFrom(String tableName) { + + Optional matchingSelect = getSubSelects(actual) + .filter(ps -> (ps.getFromItem() instanceof Table t) && t.getName().equals(tableName)).findFirst(); + + if (matchingSelect.isEmpty()) { + throw failureWithActualExpected(actual, "Subselect from " + tableName, + "%s is expected to contain a subselect selecting from %s but doesn't", actual, tableName); + } + + return new SqlAssert(matchingSelect.get()); + } + + public SqlAssert hasInlineView() { + Optional matchingSelect = getSubSelects(actual).findFirst(); + + if (matchingSelect.isEmpty()) { + throw failureWithActualExpected(actual, "Subselect", "%s is expected to contain a subselect", actual); + } + + return new SqlAssert(matchingSelect.get()); + } + + private static Stream getSubSelects(PlainSelect select) { + + FromItem fromItem = select.getFromItem(); + + Stream fromStream = subSelects(fromItem); + + return Stream.of(select).flatMap(s -> { + List joins = s.getJoins(); + if (joins == null) { + return fromStream; + } + + Stream joinStream = joins.stream() // + .map(j -> j.getRightItem()) // + .flatMap(ss -> subSelects(ss)); + return Stream.concat(fromStream, joinStream); + }); + } + + private static Stream subSelects(FromItem fromItem) { + + return fromItem instanceof Select ss ? Stream.of(ss.getPlainSelect()) : Stream.empty(); + } + + public StringAssert extractOrderBy() { + + List orderByElements = actual.getOrderByElements(); + return new StringAssert(orderByElements == null ? "" : Strings.join(orderByElements).with(", ")); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SqlAssertUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SqlAssertUnitTests.java new file mode 100644 index 0000000000..e0ca3065ee --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/SqlAssertUnitTests.java @@ -0,0 +1,297 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.relational.core.sqlgeneration.SqlAssert.*; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +/** + * Tests for SqlAssert. + * @author Jens Schauder + */ +class SqlAssertUnitTests { + + @Test // GH-1446 + void givesProperNullPointerExceptionWhenSqlIsNull() { + assertThatThrownBy(() -> SqlAssert.assertThatParsed(null)).isInstanceOf(NullPointerException.class); + } + + @Nested + class AssertWhereClause { + @Test // GH-1446 + void assertWhereClause() { + SqlAssert.assertThatParsed("select x from t where z > y").extractWhereClause().isEqualTo("z > y"); + } + + @Test // GH-1446 + void assertNoWhereClause() { + SqlAssert.assertThatParsed("select x from t").extractWhereClause().isEmpty(); + } + + } + + @Nested + class AssertOrderByClause { + @Test // GH-1446 + void assertOrderByClause() { + SqlAssert.assertThatParsed("select x from t order by x, y").extractOrderBy().isEqualTo("x, y"); + } + + @Test // GH-1446 + void assertNoOrderByClause() { + SqlAssert.assertThatParsed("select x from t").extractOrderBy().isEmpty(); + } + + } + + @Nested + class AssertColumns { + @Test // GH-1446 + void matchingSimpleColumns() { + SqlAssert.assertThatParsed("select x, y, z from t").hasExactlyColumns("x", "y", "z"); + } + + @Test // GH-1446 + void extraSimpleColumn() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select x, y, z, a from t"); + + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns("x", "y", "z")) // + .hasMessageContaining("x, y, z") // + .hasMessageContaining("x, y, z, a") // + .hasMessageContaining("a"); + } + + @Test // GH-1446 + void missingSimpleColumn() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select x, y, z from t"); + + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns("x", "y", "z", "a")) // + .hasMessageContaining("x, y, z") // + .hasMessageContaining("x, y, z, a") // + .hasMessageContaining("a"); + } + + @Test // GH-1446 + void wrongSimpleColumn() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select x, y, z from t"); + + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns("x", "a", "z")) // + .hasMessageContaining("x, y, z") // + .hasMessageContaining("x, a, z") // + .hasMessageContaining("a") // + .hasMessageContaining("y"); + } + + @Test // GH-1446 + void matchesFullyQualifiedColumn() { + + SqlAssert.assertThatParsed("select t.x from t") // + .hasExactlyColumns("x"); + } + + @Test // GH-1446 + void matchesFunction() { // + + SqlAssert.assertThatParsed("select someFunc(x) from t") + .hasExactlyColumns(func("someFunc", col("x"))); + } + + @Test // GH-1446 + void matchesFunctionCaseInsensitive() { + + SqlAssert.assertThatParsed("select COUNT(x) from t") // + .hasExactlyColumns(func("count", col("x"))); + } + + @Test // GH-1446 + void matchFunctionFailsOnDifferentName() { + SqlAssert sqlAssert = assertThatParsed("select countx(x) from t"); + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns(func("count", col("x")))) // + .hasMessageContaining("countx(x)") // + .hasMessageContaining("count(x)"); + } + + @Test // GH-1446 + void matchFunctionFailsOnDifferentParameter() { + + SqlAssert sqlAssert = assertThatParsed("select count(y) from t"); + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns(func("count", col("x")))) // + .hasMessageContaining("count(y)") // + .hasMessageContaining("count(x)"); + } + + @Test // GH-1446 + void matchFunctionFailsOnWrongParameterCount() { + + SqlAssert sqlAssert = assertThatParsed("select count(x, y) from t"); + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns(func("count", col("x")))) // + .hasMessageContaining("count(x, y)") // + .hasMessageContaining("count(x)"); + } + } + + @Nested + class AssertRowNumber { + @Test // GH-1446 + void testMatchingRowNumber() { + + SqlAssert sqlAssert = assertThatParsed("select row_number() over (partition by x) from t"); + + sqlAssert.hasExactlyColumns(rn(col("x"))); + } + + @Test // GH-1446 + void testMatchingRowNumberUpperCase() { + + SqlAssert sqlAssert = assertThatParsed("select ROW_NUMBER() over (partition by x) from t"); + + sqlAssert.hasExactlyColumns(rn(col("x"))); + } + + @Test // GH-1446 + void testFailureNoRowNumber() { + + SqlAssert sqlAssert = assertThatParsed("select row_number as x from t"); + + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns(rn(col("x")))) // + .hasMessageContaining("row_number AS x") // + .hasMessageContaining("row_number() OVER (PARTITION BY x)"); + ; + } + + @Test // GH-1446 + void testFailureWrongPartitionBy() { + + SqlAssert sqlAssert = assertThatParsed("select row_number() over (partition by y) from t"); + + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns(rn(col("x")))) // + .hasMessageContaining("row_number() OVER (PARTITION BY y )") // + .hasMessageContaining("row_number() OVER (PARTITION BY x)"); + } + } + + @Nested + class AssertAliases { + @Test // GH-1446 + void simpleColumnMatchesWithAlias() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select x as a from t"); + + sqlAssert.hasExactlyColumns("x"); + } + + @Test // GH-1446 + void matchWithAlias() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select x as a from t"); + + sqlAssert.hasExactlyColumns(col("x").as("a")); + } + + @Test // GH-1446 + void matchWithWrongAlias() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select x as b from t"); + + assertThatThrownBy(() -> sqlAssert.hasExactlyColumns(col("x").as("a"))) // + .hasMessageContaining("x as a") // + .hasMessageContaining("x AS b"); + } + + @Test // GH-1446 + void matchesIdenticalColumnsWithDifferentAliases() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select 1 as x, 1 as y from t"); + + sqlAssert.hasExactlyColumns(lit(1).as("x"), lit(1).as("y")); + } + } + + @Nested + class AssertSubSelects { + @Test // GH-1446 + void subselectGetsFound() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select a from (select x as a from t) s"); + + sqlAssert // + .hasInlineViewSelectingFrom("t") // + .hasExactlyColumns(col("x").as("a")); + } + + @Test // GH-1446 + void subselectWithWrongTableDoesNotGetFound() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select a from (select x as a from u) s"); + + assertThatThrownBy(() -> sqlAssert // + .hasInlineViewSelectingFrom("t")) + .hasMessageContaining("is expected to contain a subselect selecting from t but doesn't"); + } + } + + @Nested + class AssertJoins { + @Test // GH-1446 + void hasJoin() { + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select c from t join s on x = y"); + + sqlAssert.hasJoin(); + } + + @Test // GH-1446 + void hasJoinFailure() { + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select c from t where x = y"); + + assertThatThrownBy(() -> sqlAssert // + .hasJoin()).hasMessageContaining("to contain a join but it doesn't"); + } + + @Test // GH-1446 + void on() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select c from t join s on x = y"); + + sqlAssert.hasJoin().on("x", "y"); + } + + @Test // GH-1446 + void onFailureFirst() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select c from t join s on z = y"); + + assertThatThrownBy(() -> sqlAssert.hasJoin().on("x", "y")) + .hasMessageContaining("z = y does not match expected x = y"); + } + + @Test // GH-1446 + void onFailureSecond() { + + SqlAssert sqlAssert = SqlAssert.assertThatParsed("select c from t join s on x = z"); + + assertThatThrownBy(() -> sqlAssert.hasJoin().on("x", "y")) + .hasMessageContaining("x = z does not match expected x = y"); + } + + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/TypedExpressionPattern.java b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/TypedExpressionPattern.java new file mode 100644 index 0000000000..606bb14a50 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/core/sqlgeneration/TypedExpressionPattern.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.core.sqlgeneration; + +import net.sf.jsqlparser.expression.Expression; +import net.sf.jsqlparser.statement.select.SelectItem; + +/** + * A {@link SelectItemPattern} that matches a specific type of expression + * + * @param the type of the expression that is matched by this pattern. + */ +abstract class TypedExpressionPattern implements SelectItemPattern, ExpressionPattern { + + private final Class type; + + TypedExpressionPattern(Class type) { + + this.type = type; + } + + @Override + public boolean matches(SelectItem selectItem) { + + Expression expression = selectItem.getExpression(); + return matches(expression); + } + + @Override + public boolean matches(Expression expression) { + + if (type.isInstance(expression)) { + return matches((T) expression); + } + return false; + } + + abstract boolean matches(T expression); +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/domain/SqlSortUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/domain/SqlSortUnitTests.java new file mode 100644 index 0000000000..2d65564fa1 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/domain/SqlSortUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.domain; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; + +/** + * Unit tests for {@link SqlSort} and + * {@link SqlSort.SqlOrder}. + * + * @author Jens Schauder + */ +class SqlSortUnitTests { + + @Test + void sortOfDirectionAndProperties() { + + SqlSort sort = SqlSort.of(Sort.Direction.DESC, "firstName", "lastName"); + + assertThat(sort).containsExactly( // + SqlSort.SqlOrder.desc("firstName"), // + SqlSort.SqlOrder.desc("lastName") // + ); + } + + @Test + void unsafeSortOfProperties() { + + SqlSort sort = SqlSort.unsafe("firstName", "lastName"); + + assertThat(sort).containsExactly( // + SqlSort.SqlOrder.by("firstName"), // + SqlSort.SqlOrder.by("lastName") // + ); + } + + @Test + void mixingDirections() { + + SqlSort sort = SqlSort.of("firstName").and(Sort.Direction.DESC, "lastName", "address"); + + assertThat(sort).containsExactly( // + SqlSort.SqlOrder.asc("firstName"), // + SqlSort.SqlOrder.desc("lastName"), // + SqlSort.SqlOrder.desc("address") // + ); + } + + @Test + void mixingDirectionsAndSafety() { + + SqlSort sort = SqlSort.of("firstName").andUnsafe(Sort.Direction.DESC, "lastName", "address"); + + assertThat(sort).containsExactly( // + SqlSort.SqlOrder.by("firstName"), // + SqlSort.SqlOrder.desc("lastName").withUnsafe(), // + SqlSort.SqlOrder.desc("address").withUnsafe() // + ); + } + + @Test + void orderDoesNotDependOnOrderOfMethodCalls() { + + assertThat( + SqlSort.SqlOrder.desc("property").ignoreCase().withUnsafe().with(Sort.NullHandling.NULLS_LAST)) + .isEqualTo(SqlSort.SqlOrder.by("property").with(Sort.NullHandling.NULLS_LAST).withUnsafe() + .ignoreCase().with(Sort.Direction.DESC)); + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/CriteriaFactoryUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/CriteriaFactoryUnitTests.java new file mode 100644 index 0000000000..6461423e57 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/CriteriaFactoryUnitTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import static org.assertj.core.api.Assertions.*; + + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.relational.core.query.Criteria; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.parser.Part; + +/** + * Unit tests for {@link CriteriaFactory}. + * + * @author Mark Paluch + * @author Daeho Kwon + */ +public class CriteriaFactoryUnitTests { + + @Test // DATAJDBC-539 + void shouldConsiderIterableValuesInInOperator() { + + QueryMethod queryMethod = getQueryMethod("findAllByNameIn", List.class); + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, Arrays.asList("foo", "bar")); + ParameterMetadataProvider parameterMetadata = new ParameterMetadataProvider(accessor); + CriteriaFactory criteriaFactory = new CriteriaFactory(parameterMetadata); + + Part part = new Part("NameIn", User.class); + + Criteria criteria = criteriaFactory.createCriteria(part); + + assertThat(criteria.getValue()).isEqualTo(Arrays.asList("foo", "bar")); + } + + @Test // DATAJDBC-539 + void shouldConsiderArrayValuesInInOperator() { + + QueryMethod queryMethod = getQueryMethod("findAllByNameIn", String[].class); + + RelationalParametersParameterAccessor accessor = getAccessor(queryMethod, + new Object[] { new String[] { "foo", "bar" } }); + ParameterMetadataProvider parameterMetadata = new ParameterMetadataProvider(accessor); + CriteriaFactory criteriaFactory = new CriteriaFactory(parameterMetadata); + + Part part = new Part("NameIn", User.class); + + Criteria criteria = criteriaFactory.createCriteria(part); + + assertThat(criteria.getValue()).isEqualTo(Arrays.asList("foo", "bar")); + } + + private QueryMethod getQueryMethod(String methodName, Class... parameterTypes) { + + Method method = null; + try { + method = UserRepository.class.getMethod(methodName, parameterTypes); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + return new QueryMethod(method, new DefaultRepositoryMetadata(UserRepository.class), + new SpelAwareProxyProjectionFactory(), RelationalParameters::new); + } + + private RelationalParametersParameterAccessor getAccessor(QueryMethod queryMethod, Object... values) { + return new RelationalParametersParameterAccessor(queryMethod, values); + } + + interface UserRepository extends Repository { + + User findAllByNameIn(List names); + + User findAllByNameIn(String[] names); + } + + static class User { + + String name; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/ParameterMetadataProviderUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/ParameterMetadataProviderUnitTests.java new file mode 100644 index 0000000000..7d79a06def --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/ParameterMetadataProviderUnitTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.repository.query; + +import static org.assertj.core.api.Assertions.*; + +import java.lang.reflect.Method; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.relational.core.dialect.Escaper; +import org.springframework.data.relational.core.query.ValueFunction; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.parser.PartTree; + +/** + * Unit tests for {@link ParameterMetadataProvider}. + * + * @author Mark Paluch + * @author Daeho Kwon + */ +public class ParameterMetadataProviderUnitTests { + + @Test // DATAJDBC-514 + public void shouldCreateValueFunctionForContains() throws Exception { + + ParameterMetadata metadata = getParameterMetadata("findByNameContains", "hell%o"); + + assertThat(metadata.getValue()).isInstanceOf(ValueFunction.class); + ValueFunction function = (ValueFunction) metadata.getValue(); + assertThat(function.apply(Escaper.DEFAULT)).isEqualTo("%hell\\%o%"); + } + + @Test // DATAJDBC-514 + public void shouldCreateValueFunctionForStartingWith() throws Exception { + + ParameterMetadata metadata = getParameterMetadata("findByNameStartingWith", "hell%o"); + + assertThat(metadata.getValue()).isInstanceOf(ValueFunction.class); + ValueFunction function = (ValueFunction) metadata.getValue(); + assertThat(function.apply(Escaper.DEFAULT)).isEqualTo("hell\\%o%"); + } + + @Test // DATAJDBC-514 + public void shouldCreateValue() throws Exception { + + ParameterMetadata metadata = getParameterMetadata("findByName", "hell%o"); + + assertThat(metadata.getValue()).isEqualTo("hell%o"); + } + + private ParameterMetadata getParameterMetadata(String methodName, Object value) throws Exception { + + Method method = UserRepository.class.getMethod(methodName, String.class); + ParameterMetadataProvider provider = new ParameterMetadataProvider(new RelationalParametersParameterAccessor( + new RelationalQueryMethod(method, new DefaultRepositoryMetadata(UserRepository.class), + new SpelAwareProxyProjectionFactory()), + new Object[] { value })); + + PartTree tree = new PartTree(methodName, User.class); + + return provider.next(tree.getParts().iterator().next()); + } + + static class RelationalQueryMethod extends QueryMethod { + + public RelationalQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory factory) { + super(method, metadata, factory, RelationalParameters::new); + } + } + + interface UserRepository extends Repository { + + String findByNameStartingWith(String prefix); + + String findByNameContains(String substring); + + String findByName(String substring); + } + + static class User { + String name; + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/RelationalExampleMapperTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/RelationalExampleMapperTests.java new file mode 100644 index 0000000000..a29ef24845 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/query/RelationalExampleMapperTests.java @@ -0,0 +1,425 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.domain.ExampleMatcher.*; +import static org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers.*; +import static org.springframework.data.domain.ExampleMatcher.StringMatcher.*; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.ExampleMatcher; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.query.Query; +import org.springframework.lang.Nullable; + +/** + * Verify that the {@link RelationalExampleMapper} properly turns {@link Example}s into {@link Query}'s. + * + * @author Greg Turnquist + * @author Jens Schauder + */ +class RelationalExampleMapperTests { + + RelationalExampleMapper exampleMapper; + + @BeforeEach + void before() { + exampleMapper = new RelationalExampleMapper(new RelationalMappingContext()); + } + + @Test // GH-929 + void queryByExampleWithId() { + + Person person = new Person("id1", null, null, null, null, null); + + Example example = Example.of(person); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Objects::toString) // + .hasValue("(id = 'id1')"); + } + + @Test // GH-929 + void queryByExampleWithFirstname() { + + Person person = new Person(null, "Frodo", null, null, null, null); + + Example example = Example.of(person); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname = 'Frodo')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameAndLastname() { + + Person person = new Person(null, "Frodo", "Baggins", null, null, null); + + Example example = Example.of(person); + + Query query = exampleMapper.getMappedExample(example); + assertThat(query.getCriteria().map(Object::toString).get()) // + .contains("(firstname = 'Frodo')", // + " AND ", // + "(lastname = 'Baggins')"); + } + + @Test // GH-929 + void queryByExampleWithNullMatchingLastName() { + + Person person = new Person(null, null, "Baggins", null, null, null); + + ExampleMatcher matcher = matching().withIncludeNullValues(); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(lastname IS NULL OR lastname = 'Baggins')"); + } + + @Test // GH-929 + void queryByExampleWithNullMatchingFirstnameAndLastname() { + + Person person = new Person(null, "Bilbo", "Baggins", null, null, null); + + ExampleMatcher matcher = matching().withIncludeNullValues(); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + assertThat(query.getCriteria().map(Object::toString).get()) // + .contains("(firstname IS NULL OR firstname = 'Bilbo')", // + " AND ", // + "(lastname IS NULL OR lastname = 'Baggins')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameAndLastnameIgnoringFirstname() { + + Person person = new Person(null, "Bilbo", "Baggins", null, null, null); + + ExampleMatcher matcher = matching().withIgnorePaths("firstname"); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(lastname = 'Baggins')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameAndLastnameWithNullMatchingIgnoringFirstName() { + + Person person = new Person(null, "Bilbo", "Baggins", null, null, null); + + ExampleMatcher matcher = matching().withIncludeNullValues().withIgnorePaths("firstname"); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(lastname IS NULL OR lastname = 'Baggins')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingAtTheBeginning() { + + Person person = new Person(null, "Fro", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(STARTING); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE 'Fro%')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingOnTheEnding() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(ENDING); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE '%do')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingContaining() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(CONTAINING); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE '%do%')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingRegEx() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(ExampleMatcher.StringMatcher.REGEX); + Example example = Example.of(person, matcher); + + assertThatIllegalStateException().isThrownBy(() -> exampleMapper.getMappedExample(example)) + .withMessageContaining("REGEX is not supported"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithFieldSpecificStringMatcherEndsWith() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withMatcher("firstname", endsWith()); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE '%do')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithFieldSpecificStringMatcherStartsWith() { + + Person person = new Person(null, "Fro", null, null, null, null); + + ExampleMatcher matcher = matching().withMatcher("firstname", startsWith()); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE 'Fro%')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithFieldSpecificStringMatcherContains() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withMatcher("firstname", contains()); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE '%do%')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingAtTheBeginningIncludingNull() { + + Person person = new Person(null, "Fro", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(STARTING).withIncludeNullValues(); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname IS NULL OR firstname LIKE 'Fro%')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingOnTheEndingIncludingNull() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(ENDING).withIncludeNullValues(); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname IS NULL OR firstname LIKE '%do')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameIgnoreCaseFieldLevel() { + + Person person = new Person(null, "fro", null, null, null, null); + + ExampleMatcher matcher = matching().withMatcher("firstname", startsWith().ignoreCase()); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname LIKE 'fro%')"); + + assertThat(example.getMatcher().getPropertySpecifiers().getForPath("firstname").getIgnoreCase()).isTrue(); + } + + @Test // GH-929 + void queryByExampleWithFirstnameWithStringMatchingContainingIncludingNull() { + + Person person = new Person(null, "do", null, null, null, null); + + ExampleMatcher matcher = matching().withStringMatcher(CONTAINING).withIncludeNullValues(); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname IS NULL OR firstname LIKE '%do%')"); + } + + @Test // GH-929 + void queryByExampleWithFirstnameIgnoreCase() { + + Person person = new Person(null, "Frodo", null, null, null, null); + + ExampleMatcher matcher = matching().withIgnoreCase(true); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria()) // + .map(Object::toString) // + .hasValue("(firstname = 'Frodo')"); + + assertThat(example.getMatcher().isIgnoreCaseEnabled()).isTrue(); + } + + @Test // GH-929 + void queryByExampleWithFirstnameOrLastname() { + + Person person = new Person(null, "Frodo", "Baggins", null, null, null); + + ExampleMatcher matcher = matchingAny(); + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + assertThat(query.getCriteria().map(Object::toString).get()) // + .contains("(firstname = 'Frodo')", // + " OR ", // + "(lastname = 'Baggins')"); + } + + @Test // GH-929 + void queryByExampleEvenHandlesInvisibleFields() { + + Person person = new Person(null, "Frodo", null, "I have the ring!", null, null); + + Example example = Example.of(person); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria().map(Object::toString).get()) // + .contains("(firstname = 'Frodo')", // + " AND ", // + "(secret = 'I have the ring!')"); + } + + @Test // GH-929 + void queryByExampleSupportsPropertyTransforms() { + + Person person = new Person(null, "Frodo", "Baggins", "I have the ring!", null, null); + + ExampleMatcher matcher = matching() // + .withTransformer("firstname", o -> { + if (o.isPresent()) { + return o.map(o1 -> ((String) o1).toUpperCase()); + } + return o; + }) // + .withTransformer("lastname", o -> { + if (o.isPresent()) { + return o.map(o1 -> ((String) o1).toLowerCase()); + } + return o; + }); + + Example example = Example.of(person, matcher); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria().map(Object::toString).get()) // + .contains("(firstname = 'FRODO')", // + " AND ", // + "(lastname = 'baggins')", // + "(secret = 'I have the ring!')"); + } + + @Test // GH-1969 + void collectionLikeAttributesGetIgnored() { + + Example example = Example.of(new Person(null, "Frodo", null, null, List.of(new Possession("Ring")), null)); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria().orElseThrow().toString()).doesNotContainIgnoringCase("possession"); + } + + @Test // GH-1969 + void mapAttributesGetIgnored() { + + Example example = Example + .of(new Person(null, "Frodo", null, null, null, Map.of("Home", new Address("Bag End")))); + + Query query = exampleMapper.getMappedExample(example); + + assertThat(query.getCriteria().orElseThrow().toString()).doesNotContainIgnoringCase("address"); + } + + record Person(@Id @Nullable String id, @Nullable String firstname, @Nullable String lastname, @Nullable String secret, + @Nullable List possessions, @Nullable Map addresses) { + } + + record Possession(String name) { + } + + record Address(String description) { + } +} diff --git a/spring-data-relational/src/test/java/org/springframework/data/relational/repository/support/TableNameQueryPreprocessorUnitTests.java b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/support/TableNameQueryPreprocessorUnitTests.java new file mode 100644 index 0000000000..d1e6fdbc96 --- /dev/null +++ b/spring-data-relational/src/test/java/org/springframework/data/relational/repository/support/TableNameQueryPreprocessorUnitTests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.relational.repository.support; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Test; +import org.springframework.data.relational.core.dialect.AnsiDialect; +import org.springframework.data.relational.core.sql.SqlIdentifier; + +/** + * Tests for {@link TableNameQueryPreprocessor}. + * + * @author Jens Schauder + */ +class TableNameQueryPreprocessorUnitTests { + + @Test // GH-1856 + void transform() { + + TableNameQueryPreprocessor preprocessor = new TableNameQueryPreprocessor(SqlIdentifier.quoted("some_table_name"), SqlIdentifier.quoted("qualified_table_name"), AnsiDialect.INSTANCE); + SoftAssertions.assertSoftly(softly -> { + + softly.assertThat(preprocessor.transform("someString")).isEqualTo("someString"); + softly.assertThat(preprocessor.transform("someString#{#tableName}restOfString")) + .isEqualTo("someString\"some_table_name\"restOfString"); + softly.assertThat(preprocessor.transform("select from #{#tableName} where x = :#{#some other spel}")) + .isEqualTo("select from \"some_table_name\" where x = :#{#some other spel}"); + softly.assertThat(preprocessor.transform("select from #{#qualifiedTableName}")) + .isEqualTo("select from \"qualified_table_name\""); + }); + } +} diff --git a/spring-data-relational/src/test/kotlin/org/springframework/data/relational/core/query/CriteriaStepExtensionsTests.kt b/spring-data-relational/src/test/kotlin/org/springframework/data/relational/core/query/CriteriaStepExtensionsTests.kt new file mode 100644 index 0000000000..748125423c --- /dev/null +++ b/spring-data-relational/src/test/kotlin/org/springframework/data/relational/core/query/CriteriaStepExtensionsTests.kt @@ -0,0 +1,75 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.relational.core.query + +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test + +/** + * Unit tests for [Criteria.CriteriaStep] extensions. + * + * @author Juan Medina + */ +class CriteriaStepExtensionsTests { + + @Test // DATAJDBC-522 + fun eqIsCriteriaStep(){ + + val spec = mockk() + val criteria = mockk() + + every { spec.`is`("test") } returns criteria + + assertThat(spec isEqual "test").isEqualTo(criteria) + + verify { + spec.`is`("test") + } + } + + @Test // DATAJDBC-522 + fun inVarargCriteriaStep() { + + val spec = mockk() + val criteria = mockk() + + every { spec.`in`(any() as Array) } returns criteria + + assertThat(spec.isIn("test")).isEqualTo(criteria) + + verify { + spec.`in`(arrayOf("test")) + } + } + + @Test // DATAJDBC-522 + fun inListCriteriaStep() { + + val spec = mockk() + val criteria = mockk() + + every { spec.`in`(listOf("test")) } returns criteria + + assertThat(spec.isIn(listOf("test"))).isEqualTo(criteria) + + verify { + spec.`in`(listOf("test")) + } + } +} diff --git a/spring-data-relational/src/test/resources/logback.xml b/spring-data-relational/src/test/resources/logback.xml new file mode 100644 index 0000000000..12e9683d43 --- /dev/null +++ b/spring-data-relational/src/test/resources/logback.xml @@ -0,0 +1,16 @@ + + + + + + %d %5p %40.40c:%4L - %m%n + + + + + + + + + + diff --git a/src/main/antora/antora-playbook.yml b/src/main/antora/antora-playbook.yml new file mode 100644 index 0000000000..b9ff43cc73 --- /dev/null +++ b/src/main/antora/antora-playbook.yml @@ -0,0 +1,40 @@ +# PACKAGES antora@3.2.0-alpha.2 @antora/atlas-extension:1.0.0-alpha.1 @antora/collector-extension@1.0.0-alpha.3 @springio/antora-extensions@1.1.0-alpha.2 @asciidoctor/tabs@1.0.0-alpha.12 @opendevise/antora-release-line-extension@1.0.0-alpha.2 +# +# The purpose of this Antora playbook is to build the docs in the current branch. +antora: + extensions: + - require: '@springio/antora-extensions' + root_component_name: 'data-relational' +site: + title: Spring Data Relational + url: https://docs.spring.io/spring-data/relational/reference/ +content: + sources: + - url: ./../../.. + branches: HEAD + start_path: src/main/antora + worktrees: true + - url: https://github.com/spring-projects/spring-data-commons + # Refname matching: + # https://docs.antora.org/antora/latest/playbook/content-refname-matching/ + branches: [ main, 3.2.x ] + start_path: src/main/antora +asciidoc: + attributes: + hide-uri-scheme: '@' + tabs-sync-option: '@' + extensions: + - '@asciidoctor/tabs' + - '@springio/asciidoctor-extensions' + - '@springio/asciidoctor-extensions/javadoc-extension' + sourcemap: true +urls: + latest_version_segment: '' +runtime: + log: + failure_level: warn + format: pretty +ui: + bundle: + url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.16/ui-bundle.zip + snapshot: true diff --git a/src/main/antora/antora.yml b/src/main/antora/antora.yml new file mode 100644 index 0000000000..cb3083f785 --- /dev/null +++ b/src/main/antora/antora.yml @@ -0,0 +1,17 @@ +name: data-relational +version: true +title: Spring Data Relational +nav: + - modules/ROOT/nav.adoc +ext: + collector: + - run: + command: ./mvnw validate process-resources -pl :spring-data-jdbc-distribution -am -Pantora-process-resources + local: true + scan: + dir: spring-data-jdbc-distribution/target/classes/ + - run: + command: ./mvnw package -Pdistribute + local: true + scan: + dir: target/antora diff --git a/src/main/antora/modules/ROOT/examples/r2dbc b/src/main/antora/modules/ROOT/examples/r2dbc new file mode 120000 index 0000000000..498306e099 --- /dev/null +++ b/src/main/antora/modules/ROOT/examples/r2dbc @@ -0,0 +1 @@ +../../../../../../spring-data-r2dbc/src/test/java/org/springframework/data/r2dbc/documentation \ No newline at end of file diff --git a/src/main/antora/modules/ROOT/nav.adoc b/src/main/antora/modules/ROOT/nav.adoc new file mode 100644 index 0000000000..b139edc8fc --- /dev/null +++ b/src/main/antora/modules/ROOT/nav.adoc @@ -0,0 +1,56 @@ +* xref:index.adoc[Overview] +** xref:commons/upgrade.adoc[] + +* xref:repositories/introduction.adoc[] +** xref:repositories/core-concepts.adoc[] +** xref:repositories/definition.adoc[] +** xref:repositories/create-instances.adoc[] +** xref:repositories/query-methods-details.adoc[] +** xref:repositories/projections.adoc[] +** xref:object-mapping.adoc[] +** xref:commons/custom-conversions.adoc[] +** xref:repositories/custom-implementations.adoc[] +** xref:repositories/core-extensions.adoc[] +** xref:value-expressions.adoc[] +** xref:query-by-example.adoc[] +** xref:repositories/core-domain-events.adoc[] +** xref:commons/entity-callbacks.adoc[] +** xref:repositories/null-handling.adoc[] +** xref:repositories/query-keywords-reference.adoc[] +** xref:repositories/query-return-types-reference.adoc[] + +* xref:jdbc.adoc[] +** xref:jdbc/why.adoc[] +** xref:jdbc/domain-driven-design.adoc[] +** xref:jdbc/getting-started.adoc[] +** xref:jdbc/entity-persistence.adoc[] +** xref:jdbc/sequences.adoc[] +** xref:jdbc/mapping.adoc[] +** xref:jdbc/query-methods.adoc[] +** xref:jdbc/mybatis.adoc[] +** xref:jdbc/events.adoc[] +** xref:jdbc/auditing.adoc[] +** xref:jdbc/transactions.adoc[] +** xref:jdbc/schema-support.adoc[] + +* xref:r2dbc.adoc[] +** xref:r2dbc/getting-started.adoc[] +** xref:r2dbc/entity-persistence.adoc[] +** xref:r2dbc/sequences.adoc[] +** xref:r2dbc/mapping.adoc[] +** xref:r2dbc/repositories.adoc[] +** xref:r2dbc/query-methods.adoc[] +** xref:r2dbc/entity-callbacks.adoc[] +** xref:r2dbc/auditing.adoc[] +** xref:r2dbc/kotlin.adoc[] +** xref:r2dbc/migration-guide.adoc[] + +* xref:kotlin.adoc[] +** xref:kotlin/requirements.adoc[] +** xref:kotlin/null-safety.adoc[] +** xref:kotlin/object-mapping.adoc[] +** xref:kotlin/extensions.adoc[] +** xref:kotlin/coroutines.adoc[] + +* xref:attachment$api/java/index.html[Javadoc,role=link-external,window=_blank] +* https://github.com/spring-projects/spring-data-commons/wiki[Wiki,role=link-external,window=_blank] diff --git a/src/main/antora/modules/ROOT/pages/commons/criteria-methods.adoc b/src/main/antora/modules/ROOT/pages/commons/criteria-methods.adoc new file mode 100644 index 0000000000..8e965b2f86 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/criteria-methods.adoc @@ -0,0 +1,19 @@ +=== Methods for the Criteria Class + +The `Criteria` class provides the following methods, all of which correspond to SQL operators: + +* `Criteria` *and* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one. +* `Criteria` *or* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one. +* `Criteria` *greaterThan* `(Object o)`: Creates a criterion by using the `>` operator. +* `Criteria` *greaterThanOrEquals* `(Object o)`: Creates a criterion by using the `>=` operator. +* `Criteria` *in* `(Object... o)`: Creates a criterion by using the `IN` operator for a varargs argument. +* `Criteria` *in* `(Collection collection)`: Creates a criterion by using the `IN` operator using a collection. +* `Criteria` *is* `(Object o)`: Creates a criterion by using column matching (`property = value`). +* `Criteria` *isNull* `()`: Creates a criterion by using the `IS NULL` operator. +* `Criteria` *isNotNull* `()`: Creates a criterion by using the `IS NOT NULL` operator. +* `Criteria` *lessThan* `(Object o)`: Creates a criterion by using the `<` operator. +* `Criteria` *lessThanOrEquals* `(Object o)`: Creates a criterion by using the `<=` operator. +* `Criteria` *like* `(Object o)`: Creates a criterion by using the `LIKE` operator without escape character processing. +* `Criteria` *not* `(Object o)`: Creates a criterion by using the `!=` operator. +* `Criteria` *notIn* `(Object... o)`: Creates a criterion by using the `NOT IN` operator for a varargs argument. +* `Criteria` *notIn* `(Collection collection)`: Creates a criterion by using the `NOT IN` operator using a collection. diff --git a/src/main/antora/modules/ROOT/pages/commons/custom-conversions.adoc b/src/main/antora/modules/ROOT/pages/commons/custom-conversions.adoc new file mode 100644 index 0000000000..063a04903d --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/custom-conversions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$custom-conversions.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/commons/entity-callbacks.adoc b/src/main/antora/modules/ROOT/pages/commons/entity-callbacks.adoc new file mode 100644 index 0000000000..90656a3062 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/entity-callbacks.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$entity-callbacks.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc new file mode 100644 index 0000000000..51a9189aa0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$upgrade.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/index.adoc b/src/main/antora/modules/ROOT/pages/index.adoc new file mode 100644 index 0000000000..800a5aed01 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/index.adoc @@ -0,0 +1,21 @@ +[[spring-data-jpa-reference-documentation]] += Spring Data JDBC and R2DBC +:revnumber: {version} +:revdate: {localdate} +:feature-scroll: true + +_Spring Data JDBC and R2DBC provide repository support for the Java Database Connectivity (JDBC) respective Reactive Relational Database Connectivity (R2DBC) APIs. +It eases development of applications with a consistent programming model that need to access SQL data sources._ + +[horizontal] +xref:repositories/introduction.adoc[Introduction] :: Introduction to Repositories +xref:jdbc.adoc[JDBC] :: JDBC Object Mapping and Repositories +xref:r2dbc.adoc[R2DBC] :: R2DBC Object Mapping and Repositories +xref:kotlin.adoc[Kotlin] :: Kotlin-specific Support +https://github.com/spring-projects/spring-data-commons/wiki[Wiki] :: What's New, Upgrade Notes, Supported Versions, additional cross-version information. + +Jens Schauder, Jay Bryant, Mark Paluch, Bastian Wilhelm + +(C) 2008-{copyright-year} VMware, Inc. + +Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. diff --git a/src/main/antora/modules/ROOT/pages/jdbc.adoc b/src/main/antora/modules/ROOT/pages/jdbc.adoc new file mode 100644 index 0000000000..358b7c42bc --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc.adoc @@ -0,0 +1,16 @@ +[[jdbc.repositories]] += JDBC +:page-section-summary-toc: 1 + +The Spring Data JDBC module applies core Spring concepts to the development of solutions that use JDBC database drivers aligned with xref:jdbc/domain-driven-design.adoc[Domain-driven design principles]. +We provide a "`template`" as a high-level abstraction for storing and querying aggregates. + +This document is the reference guide for Spring Data JDBC support. +It explains the concepts and semantics and syntax. + +This chapter points out the specialties for repository support for JDBC. +This builds on the core repository support explained in xref:repositories/introduction.adoc[Working with Spring Data Repositories]. +You should have a sound understanding of the basic concepts explained there. + + + diff --git a/src/main/antora/modules/ROOT/pages/jdbc/auditing.adoc b/src/main/antora/modules/ROOT/pages/jdbc/auditing.adoc new file mode 100644 index 0000000000..317347e23e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/auditing.adoc @@ -0,0 +1,23 @@ +[[jdbc.auditing]] += Auditing +:page-section-summary-toc: 1 + +In order to activate auditing, add `@EnableJdbcAuditing` to your configuration, as the following example shows: + +.Activating auditing with Java configuration +[source,java] +---- +@Configuration +@EnableJdbcAuditing +class Config { + + @Bean + AuditorAware auditorProvider() { + return new AuditorAwareImpl(); + } +} +---- + +If you expose a bean of type `AuditorAware` to the `ApplicationContext`, the auditing infrastructure automatically picks it up and uses it to determine the current user to be set on domain types. +If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableJdbcAuditing`. + diff --git a/src/main/antora/modules/ROOT/pages/jdbc/domain-driven-design.adoc b/src/main/antora/modules/ROOT/pages/jdbc/domain-driven-design.adoc new file mode 100644 index 0000000000..1682e51b23 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/domain-driven-design.adoc @@ -0,0 +1,26 @@ +[[jdbc.domain-driven-design]] += Domain Driven Design and Relational Databases + +All Spring Data modules are inspired by the concepts of "`repository`", "`aggregate`", and "`aggregate root`" from Domain Driven Design. +These are possibly even more important for Spring Data JDBC, because they are, to some extent, contrary to normal practice when working with relational databases. + +An aggregate is a group of entities that is guaranteed to be consistent between atomic changes to it. +A classic example is an `Order` with `OrderItems`. +A property on `Order` (for example, `numberOfItems` is consistent with the actual number of `OrderItems`) remains consistent as changes are made. + +References across aggregates are not guaranteed to be consistent at all times. +They are guaranteed to become consistent eventually. + +Each aggregate has exactly one aggregate root, which is one of the entities of the aggregate. +The aggregate gets manipulated only through methods on that aggregate root. +These are the atomic changes mentioned earlier. + +A repository is an abstraction over a persistent store that looks like a collection of all the aggregates of a certain type. +For Spring Data in general, this means you want to have one `Repository` per aggregate root. +In addition, for Spring Data JDBC this means that all entities reachable from an aggregate root are considered to be part of that aggregate root. +Spring Data JDBC assumes that only the aggregate has a foreign key to a table storing non-root entities of the aggregate and no other entity points toward non-root entities. + +WARNING: In the current implementation, entities referenced from an aggregate root are deleted and recreated by Spring Data JDBC. + +You can overwrite the repository methods with implementations that match your style of working and designing your database. + diff --git a/src/main/antora/modules/ROOT/pages/jdbc/entity-persistence.adoc b/src/main/antora/modules/ROOT/pages/jdbc/entity-persistence.adoc new file mode 100644 index 0000000000..4ead95ff58 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/entity-persistence.adoc @@ -0,0 +1,105 @@ +[[jdbc.entity-persistence]] += Persisting Entities + +Saving an aggregate can be performed with the `CrudRepository.save(…)` method. +If the aggregate is new, this results in an insert for the aggregate root, followed by insert statements for all directly or indirectly referenced entities. + +If the aggregate root is not new, all referenced entities get deleted, the aggregate root gets updated, and all referenced entities get inserted again. +Note that whether an instance is new is part of the instance's state. + +NOTE: This approach has some obvious downsides. +If only few of the referenced entities have been actually changed, the deletion and insertion is wasteful. +While this process could and probably will be improved, there are certain limitations to what Spring Data JDBC can offer. +It does not know the previous state of an aggregate. +So any update process always has to take whatever it finds in the database and make sure it converts it to whatever is the state of the entity passed to the save method. + +See also xref:repositories/core-concepts.adoc#is-new-state-detection[Entity State Detection] for further details. + +[[jdbc.loading-aggregates]] +== Loading Aggregates + +Spring Data JDBC offers two ways how it can load aggregates: + +. The traditional and before version 3.2 the only way is really simple: +Each query loads the aggregate roots, independently if the query is based on a `CrudRepository` method, a derived query or a annotated query. +If the aggregate root references other entities those are loaded with separate statements. + +. Spring Data JDBC 3.2 allows the use of _Single Query Loading_. +With this an arbitrary number of aggregates can be fully loaded with a single SQL query. +This should be significantly more efficient, especially for complex aggregates, consisting of many entities. ++ +Currently, Single Query Loading is restricted in different ways: + +1. The aggregate must not have nested collections, this includes `Map`. +The plan is to remove this constraint in the future. + +2. The aggregate must not use `AggregateReference` or embedded entities. +The plan is to remove this constraint in the future. + +3. The database dialect must support it.Of the dialects provided by Spring Data JDBC all but H2 and HSQL support this. +H2 and HSQL don't support analytic functions (aka windowing functions). + +4. It only works for the find methods in `CrudRepository`, not for derived queries and not for annotated queries. +The plan is to remove this constraint in the future. + +5. Single Query Loading needs to be enabled in the `JdbcMappingContext`, by calling `setSingleQueryLoadingEnabled(true)` + +If any condition is not fulfilled Spring Data JDBC falls back to the default approach of loading aggregates. + +NOTE: Single Query Loading is to be considered experimental. +We appreciate feedback on how it works for you. + +NOTE: While Single Query Loading can be abbreviated as SQL, but we highly discourage doing so since confusion with Structured Query Language is almost guaranteed. + +include::partial$id-generation.adoc[] + +[[jdbc.template]] +== Template API + +As an alternative to repositories Spring Data JDBC offers the javadoc:org.springframework.data.jdbc.core.JdbcAggregateTemplate[] as a more direct means to load and persist entities in a relational database. +To a large extent, repositories use `JdbcAggregateTemplate` to implement their features. + +This section highlights only the most interesting parts of the `JdbcAggregateTemplate`. +For a more complete overview, see the JavaDoc of `JdbcAggregateTemplate`. + +=== Accessing the JdbcAggregateTemplate + +`JdbcAggregateTemplate` is intended to be used as a Spring bean. +If you have set up your application to include Spring Data JDBC, you can configure a dependency on `JdbcAggregateTemplate` in any Spring bean, and the Spring Framework injects a properly configured instance. + +This includes fragments you use to implement custom methods for your Spring Data Repositories, letting you to use `JdbcAggregateTemplate` to customize and extend your repositories. + +=== Persisting + +`JdbcAggregateTemplate` offers three types of methods for persisting entities: `save`, `insert`, and `update`. +Each comes in two flavors: +Operating on single aggregates, named exactly as mentioned above, and with an `All` suffix operation on an `Iterable`. + +`save` does the same as the method of same name in a repository. + +`insert` and `update` skip the test if the entity is new and assume a new or existing aggregate as indicated by their name. + +=== Querying + +`JdbcAggregateTemplate` offers a considerable array of methods for querying aggregates and about collections of aggregates. +There is one type of method that requires special attention. +That's the methods taking a `Query` as an argument. +They allow the execution of programmatically constructed queries, as follows: + +[source,java] +---- +template.findOne(query(where("name").is("Gandalf")), Person.class); +---- + +The javadoc:org.springframework.data.relational.core.query.Query[] returned by the `query` method defines the list of columns to select, a where clause (through a CriteriaDefinition), and specification of limit and offset clauses. +For details of the `Query` class, see its JavaDoc. + +The javadoc:org.springframework.data.relational.core.query.Criteria[] class, of which `where` is a static member, provides implementations of org.springframework.data.relational.core.query.CriteriaDefinition[], which represent the where-clause of the query. + +[[jdbc.criteria]] +include::../commons/criteria-methods.adoc[] + +[[jdbc.entity-persistence.optimistic-locking]] +== Optimistic Locking + +include::partial$optimistic-locking.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/jdbc/events.adoc b/src/main/antora/modules/ROOT/pages/jdbc/events.adoc new file mode 100644 index 0000000000..8123025191 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/events.adoc @@ -0,0 +1,110 @@ +[[jdbc.events]] += Lifecycle Events + +Spring Data JDBC publishes lifecycle events to `ApplicationListener` objects, typically beans in the application context. +Events are notifications about a certain lifecycle phase. +In contrast to entity callbacks, events are intended for notification. +Transactional listeners will receive events when the transaction completes. +Events and callbacks get only triggered for aggregate roots. +If you want to process non-root entities, you need to do that through a listener for the containing aggregate root. + +Entity lifecycle events can be costly, and you may notice a change in the performance profile when loading large result sets. +You can disable lifecycle events on javadoc:org.springframework.data.jdbc.core.JdbcAggregateTemplate#setEntityLifecycleEventsEnabled(boolean)[Template API]. + +For example, the following listener gets invoked before an aggregate gets saved: + +[source,java] +---- +@Bean +ApplicationListener> loggingSaves() { + + return event -> { + + Object entity = event.getEntity(); + LOG.info("{} is getting saved.", entity); + }; +} +---- + +If you want to handle events only for a specific domain type you may derive your listener from `AbstractRelationalEventListener` and overwrite one or more of the `onXXX` methods, where `XXX` stands for an event type. +Callback methods will only get invoked for events related to the domain type and their subtypes, therefore you don't require further casting. + +[source,java] +---- +class PersonLoadListener extends AbstractRelationalEventListener { + + @Override + protected void onAfterLoad(AfterLoadEvent personLoad) { + LOG.info(personLoad.getEntity()); + } +} +---- + +The following table describes the available events.For more details about the exact relation between process steps see the link:#jdbc.entity-callbacks[description of available callbacks] which map 1:1 to events. + +.Available events +|=== +| Event | When It Is Published + +| javadoc:org.springframework.data.relational.core.mapping.event.BeforeDeleteEvent[] +| Before an aggregate root gets deleted. + +| javadoc:org.springframework.data.relational.core.mapping.event.AfterDeleteEvent[] +| After an aggregate root gets deleted. + +| javadoc:org.springframework.data.relational.core.mapping.event.BeforeConvertEvent[] +| Before an aggregate root gets converted into a plan for executing SQL statements, but after the decision was made if the aggregate is new or not, i.e. if an update or an insert is in order. + +| javadoc:org.springframework.data.relational.core.mapping.event.BeforeSaveEvent[] +| Before an aggregate root gets saved (that is, inserted or updated but after the decision about whether if it gets inserted or updated was made). + +| javadoc:org.springframework.data.relational.core.mapping.event.AfterSaveEvent[] +| After an aggregate root gets saved (that is, inserted or updated). + +| javadoc:org.springframework.data.relational.core.mapping.event.AfterConvertEvent[] +| After an aggregate root gets created from a database `ResultSet` and all its properties get set. +|=== + +WARNING: Lifecycle events depend on an `ApplicationEventMulticaster`, which in case of the `SimpleApplicationEventMulticaster` can be configured with a `TaskExecutor`, and therefore gives no guarantees when an Event is processed. + + +[[jdbc.entity-callbacks]] +== Store-specific EntityCallbacks + +Spring Data JDBC uses the xref:commons/entity-callbacks.adoc[`EntityCallback` API] for its auditing support and reacts on the callbacks listed in the following table. + +.Process Steps and Callbacks of the Different Processes performed by Spring Data JDBC. +|=== +| Process | `EntityCallback` / Process Step | Comment + +.3+| Delete | javadoc:org.springframework.data.relational.core.mapping.event.BeforeDeleteCallback[] +| Before the actual deletion. + +2+| The aggregate root and all the entities of that aggregate get removed from the database. + +| javadoc:org.springframework.data.relational.core.mapping.event.AfterDeleteCallback[] +| After an aggregate gets deleted. + + +.6+| Save 2+| Determine if an insert or an update of the aggregate is to be performed dependent on if it is new or not. +| javadoc:org.springframework.data.relational.core.mapping.event.BeforeConvertCallback[] +| This is the correct callback if you want to set an id programmatically. In the previous step new aggregates got detected as such and a Id generated in this step would be used in the following step. + +2+| Convert the aggregate to a aggregate change, it is a sequence of SQL statements to be executed against the database. In this step the decision is made if an Id is provided by the aggregate or if the Id is still empty and is expected to be generated by the database. + +| javadoc:org.springframework.data.relational.core.mapping.event.BeforeSaveCallback[] +| Changes made to the aggregate root may get considered, but the decision if an id value will be sent to the database is already made in the previous step. +Do not use this for creating Ids for new aggregates. Use `BeforeConvertCallback` instead. + +2+| The SQL statements determined above get executed against the database. + +| javadoc:org.springframework.data.relational.core.mapping.event.AfterSaveCallback[] +| After an aggregate root gets saved (that is, inserted or updated). + + +.2+| Load 2+| Load the aggregate using 1 or more SQL queries. Construct the aggregate from the resultset. +| javadoc:org.springframework.data.relational.core.mapping.event.AfterConvertCallback[] +| +|=== + +We encourage the use of callbacks over events since they support the use of immutable classes and therefore are more powerful and versatile than events. diff --git a/src/main/antora/modules/ROOT/pages/jdbc/getting-started.adoc b/src/main/antora/modules/ROOT/pages/jdbc/getting-started.adoc new file mode 100644 index 0000000000..ed59a627c6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/getting-started.adoc @@ -0,0 +1,170 @@ +[[jdbc.getting-started]] += Getting Started + +An easy way to bootstrap setting up a working environment is to create a Spring-based project in https://spring.io/tools[Spring Tools] or from https://start.spring.io[Spring Initializr]. + +First, you need to set up a running database server. +Refer to your vendor documentation on how to configure your database for JDBC access. + +[[requirements]] +== Requirements + +Spring Data JDBC requires {springdocsurl}[Spring Framework] {springVersion} and above. + +In terms of databases, Spring Data JDBC requires a <> to abstract common SQL functionality over vendor-specific flavours. +Spring Data JDBC includes direct support for the following databases: + +* DB2 +* H2 +* HSQLDB +* MariaDB +* Microsoft SQL Server +* MySQL +* Oracle +* Postgres + +If you use a different database then your application won’t start up. +The <> section contains further detail on how to proceed in such case. + +[[jdbc.hello-world]] +== Hello World + +To create a Spring project in STS: + +. Go to File -> New -> Spring Template Project -> Simple Spring Utility Project, and press Yes when prompted. +Then enter a project and a package name, such as `org.spring.jdbc.example`. +. Add the following to the `pom.xml` files `dependencies` element: ++ +[source,xml,subs="+attributes"] +---- + + + + + + org.springframework.data + spring-data-jdbc + {version} + + + +---- + +. Change the version of Spring in the pom.xml to be ++ +[source,xml,subs="+attributes"] +---- +{springVersion} +---- + +. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level as your `` element: ++ +[source,xml] +---- + + + spring-milestone + Spring Maven MILESTONE Repository + https://repo.spring.io/milestone + + +---- + +The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here]. + +[[jdbc.logging]] +=== Logging + +Spring Data JDBC does little to no logging on its own. +Instead, the mechanics of `JdbcTemplate` to issue SQL statements provide logging. +Thus, if you want to inspect what SQL statements are run, activate logging for Spring's {spring-framework-docs}/data-access.html#jdbc-JdbcTemplate[`NamedParameterJdbcTemplate`] or https://www.mybatis.org/mybatis-3/logging.html[MyBatis]. + +You may also want to set the logging level to `DEBUG` to see some additional information. +To do so, edit the `application.properties` file to have the following content: + +[source] +---- +logging.level.org.springframework.jdbc=DEBUG +---- + +// TODO: Add example similar to + +[[jdbc.examples-repo]] +== Examples Repository + +There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works. + +[[jdbc.java-config]] +== Configuration + +The Spring Data JDBC repositories support can be activated by an annotation through Java configuration, as the following example shows: + +.Spring Data JDBC repositories using Java configuration +[source,java] +---- +@Configuration +@EnableJdbcRepositories // <1> +class ApplicationConfig extends AbstractJdbcConfiguration { // <2> + + @Bean + DataSource dataSource() { // <3> + + EmbeddedDatabaseBuilder builder = new EmbeddedDatabaseBuilder(); + return builder.setType(EmbeddedDatabaseType.HSQL).build(); + } + + @Bean + NamedParameterJdbcOperations namedParameterJdbcOperations(DataSource dataSource) { // <4> + return new NamedParameterJdbcTemplate(dataSource); + } + + @Bean + TransactionManager transactionManager(DataSource dataSource) { // <5> + return new DataSourceTransactionManager(dataSource); + } +} +---- + +<1> `@EnableJdbcRepositories` creates implementations for interfaces derived from `Repository` +<2> javadoc:org.springframework.data.jdbc.repository.config.AbstractJdbcConfiguration[] provides various default beans required by Spring Data JDBC +<3> Creates a `DataSource` connecting to a database. +This is required by the following two bean methods. +<4> Creates the `NamedParameterJdbcOperations` used by Spring Data JDBC to access the database. +<5> Spring Data JDBC utilizes the transaction management provided by Spring JDBC. + +The configuration class in the preceding example sets up an embedded HSQL database by using the `EmbeddedDatabaseBuilder` API of `spring-jdbc`. +The `DataSource` is then used to set up `NamedParameterJdbcOperations` and a `TransactionManager`. +We finally activate Spring Data JDBC repositories by using the `@EnableJdbcRepositories`. +If no base package is configured, it uses the package in which the configuration class resides. +Extending javadoc:org.springframework.data.jdbc.repository.config.AbstractJdbcConfiguration[] ensures various beans get registered. +Overwriting its methods can be used to customize the setup (see below). + +This configuration can be further simplified by using Spring Boot. +With Spring Boot a `DataSource` is sufficient once the starter `spring-boot-starter-data-jdbc` is included in the dependencies. +Everything else is done by Spring Boot. + +There are a couple of things one might want to customize in this setup. + +[[jdbc.dialects]] +== Dialects + +Spring Data JDBC uses implementations of the interface `Dialect` to encapsulate behavior that is specific to a database or its JDBC driver. +By default, the javadoc:org.springframework.data.jdbc.repository.config.AbstractJdbcConfiguration[] attempts to determine the dialect from the database configuration by obtaining a connection and registering the correct `Dialect`. +You override `AbstractJdbcConfiguration.jdbcDialect(NamedParameterJdbcOperations)` to customize dialect selection. + +If you use a database for which no dialect is available, then your application won’t start up. +In that case, you’ll have to ask your vendor to provide a `Dialect` implementation. +Alternatively, you can implement your own `Dialect`. + +[TIP] +==== +Dialects are resolved by javadoc:org.springframework.data.jdbc.core.dialect.DialectResolver[] from a `JdbcOperations` instance, typically by inspecting `Connection.getMetaData()`. ++ You can let Spring auto-discover your javadoc:org.springframework.data.jdbc.core.dialect.JdbcDialect[] by registering a class that implements `org.springframework.data.jdbc.core.dialect.DialectResolver$JdbcDialectProvider` through `META-INF/spring.factories`. +`DialectResolver` discovers dialect provider implementations from the class path using Spring's `SpringFactoriesLoader`. +To do so: + +. Implement your own `Dialect`. +. Implement a `JdbcDialectProvider` returning the `Dialect`. +. Register the provider by creating a `spring.factories` resource under `META-INF` and perform the registration by adding a line + +`org.springframework.data.jdbc.core.dialect.DialectResolver$JdbcDialectProvider`=` +==== diff --git a/src/main/antora/modules/ROOT/pages/jdbc/mapping.adoc b/src/main/antora/modules/ROOT/pages/jdbc/mapping.adoc new file mode 100644 index 0000000000..c3bba01ca0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/mapping.adoc @@ -0,0 +1,222 @@ +[[mapping]] += Mapping + +Rich mapping support is provided by the `MappingJdbcConverter`. `MappingJdbcConverter` has a rich metadata model that allows mapping domain objects to a data row. +The mapping metadata model is populated by using annotations on your domain objects. +However, the infrastructure is not limited to using annotations as the only source of metadata information. +The `MappingJdbcConverter` also lets you map objects to rows without providing any additional metadata, by following a set of conventions. + +This section describes the features of the `MappingJdbcConverter`, including how to use conventions for mapping objects to rows and how to override those conventions with annotation-based mapping metadata. + +Read on the basics about xref:object-mapping.adoc[] before continuing with this chapter. + +[[mapping.conventions]] +== Convention-based Mapping + +`MappingJdbcConverter` has a few conventions for mapping objects to rows when no additional mapping metadata is provided. +The conventions are: + +* The short Java class name is mapped to the table name in the following manner. +The `com.bigbank.SavingsAccount` class maps to the `SAVINGS_ACCOUNT` table name. +The same name mapping is applied for mapping fields to column names. +For example, the `firstName` field maps to the `FIRST_NAME` column. +You can control this mapping by providing a custom `NamingStrategy`. +See <> for more detail. +Table and column names that are derived from property or class names are used in SQL statements without quotes by default. +You can control this behavior by setting `RelationalMappingContext.setForceQuote(true)`. + +* The converter uses any Spring Converters registered with `CustomConversions` to override the default mapping of object properties to row columns and values. + +* The fields of an object are used to convert to and from columns in the row. +Public `JavaBean` properties are not used. + +* If you have a single non-zero-argument constructor whose constructor argument names match top-level column names of the row, that constructor is used. +Otherwise, the zero-argument constructor is used. +If there is more than one non-zero-argument constructor, an exception is thrown. +Refer to xref:object-mapping.adoc#mapping.object-creation[Object Creation] for further details. + +[[jdbc.entity-persistence.types]] +== Supported Types in Your Entity + +The properties of the following types are currently supported: + +* All primitive types and their boxed types (`int`, `float`, `Integer`, `Float`, and so on) + +* Enums get mapped to their name. + +* `String` + +* `java.util.Date`, `java.time.LocalDate`, `java.time.LocalDateTime`, and `java.time.LocalTime` + +* Arrays and Collections of the types mentioned above can be mapped to columns of array type if your database supports that. + +* Anything your database driver accepts. + +* References to other entities. +They are considered a one-to-one relationship, or an embedded type. +It is optional for one-to-one relationship entities to have an `id` attribute. +The table of the referenced entity is expected to have an additional column with a name based on the referencing entity see <>. +Embedded entities do not need an `id`. +If one is present it gets mapped as a normal attribute without any special meaning. + +* `Set` is considered a one-to-many relationship. +The table of the referenced entity is expected to have an additional column with a name based on the referencing entity see <>. + +* `Map` is considered a qualified one-to-many relationship. +The table of the referenced entity is expected to have two additional columns: One named based on the referencing entity for the foreign key (see <>) and one with the same name and an additional `_key` suffix for the map key. + +* `List` is mapped as a `Map`. The same additional columns are expected and the names used can be customized in the same way. ++ +For `List`, `Set`, and `Map` naming of the back reference can be controlled by implementing `NamingStrategy.getReverseColumnName(RelationalPersistentEntity owner)` and `NamingStrategy.getKeyColumn(RelationalPersistentProperty property)`, respectively. +Alternatively you may annotate the attribute with `@MappedCollection(idColumn="your_column_name", keyColumn="your_key_column_name")`. +Specifying a key column for a `Set` has no effect. + +* Types for which you registered suitable xref:#mapping.explicit.converters[custom converters]. + + +[[mapping.usage.annotations]] +=== Mapping Annotation Overview + +include::partial$mapping-annotations.adoc[] + +See xref:jdbc/entity-persistence.adoc#jdbc.entity-persistence.optimistic-locking[Optimistic Locking] for further reference. + +The mapping metadata infrastructure is defined in the separate `spring-data-commons` project that is technology-agnostic. +Specific subclasses are used in the JDBC support to support annotation based metadata. +Other strategies can also be put in place (if there is demand). + +[[jdbc.entity-persistence.types.referenced-entities]] +=== Referenced Entities + +The handling of referenced entities is limited. +This is based on the idea of aggregate roots as described above. +If you reference another entity, that entity is, by definition, part of your aggregate. +So, if you remove the reference, the previously referenced entity gets deleted. +This also means references are 1-1 or 1-n, but not n-1 or n-m. + +If you have n-1 or n-m references, you are, by definition, dealing with two separate aggregates. +References between those may be encoded as simple `id` values, which map properly with Spring Data JDBC. +A better way to encode these, is to make them instances of `AggregateReference`. +An `AggregateReference` is a wrapper around an id value which marks that value as a reference to a different aggregate. +Also, the type of that aggregate is encoded in a type parameter. + +[[jdbc.entity-persistence.types.backrefs]] +=== Back References + +All references in an aggregate result in a foreign key relationship in the opposite direction in the database. +By default, the name of the foreign key column is the table name of the referencing entity. + +Alternatively you may choose to have them named by the entity name of the referencing entity ignoring `@Table` annotations. +You activate this behaviour by calling `setForeignKeyNaming(ForeignKeyNaming.IGNORE_RENAMING)` on the `RelationalMappingContext`. + +For `List` and `Map` references an additional column is required for holding the list index or map key. +It is based on the foreign key column with an additional `_KEY` suffix. + +If you want a completely different way of naming these back references you may implement `NamingStrategy.getReverseColumnName(RelationalPersistentEntity owner)` in a way that fits your needs. + +.Declaring and setting an `AggregateReference` +[source,java] +---- +class Person { + @Id long id; + AggregateReference bestFriend; +} + +// ... + +Person p1, p2 = // some initialization + +p1.bestFriend = AggregateReference.to(p2.id); + +---- + +You should not include attributes in your entities to hold the actual value of a back reference, nor of the key column of maps or lists. +If you want these value to be available in your domain model we recommend to do this in a `AfterConvertCallback` and store the values in transient values. + +:mapped-collection: true +:embedded-entities: true +include::partial$mapping.adoc[] + +[[mapping.explicit.converters]] +== Overriding Mapping with Explicit Converters + +Spring Data allows registration of custom converters to influence how values are mapped in the database. +Currently, converters are only applied on property-level, i.e. you can only convert single values in your domain to single values in the database and back. +Conversion between complex objects and multiple columns isn't supported. + +[[custom-converters.writer]] +=== Writing a Property by Using a Registered Spring Converter + +The following example shows an implementation of a `Converter` that converts from a `Boolean` object to a `String` value: + +[source,java] +---- +import org.springframework.core.convert.converter.Converter; + +@WritingConverter +public class BooleanToStringConverter implements Converter { + + @Override + public String convert(Boolean source) { + return source != null && source ? "T" : "F"; + } +} +---- + +There are a couple of things to notice here: `Boolean` and `String` are both simple types hence Spring Data requires a hint in which direction this converter should apply (reading or writing). +By annotating this converter with `@WritingConverter` you instruct Spring Data to write every `Boolean` property as `String` in the database. + +[[custom-converters.reader]] +=== Reading by Using a Spring Converter + +The following example shows an implementation of a `Converter` that converts from a `String` to a `Boolean` value: + +[source,java] +---- +@ReadingConverter +public class StringToBooleanConverter implements Converter { + + @Override + public Boolean convert(String source) { + return source != null && source.equalsIgnoreCase("T") ? Boolean.TRUE : Boolean.FALSE; + } +} +---- + +There are a couple of things to notice here: `String` and `Boolean` are both simple types hence Spring Data requires a hint in which direction this converter should apply (reading or writing). +By annotating this converter with `@ReadingConverter` you instruct Spring Data to convert every `String` value from the database that should be assigned to a `Boolean` property. + +[[jdbc.custom-converters.configuration]] +=== Registering Spring Converters with the `JdbcConverter` + +[source,java] +---- +class MyJdbcConfiguration extends AbstractJdbcConfiguration { + + // … + + @Override + protected List userConverters() { + return Arrays.asList(new BooleanToStringConverter(), new StringToBooleanConverter()); + } + +} +---- + +NOTE: In previous versions of Spring Data JDBC it was recommended to directly overwrite `AbstractJdbcConfiguration.jdbcCustomConversions()`. +This is no longer necessary or even recommended, since that method assembles conversions intended for all databases, conversions registered by the `Dialect` used and conversions registered by the user. +If you are migrating from an older version of Spring Data JDBC and have `AbstractJdbcConfiguration.jdbcCustomConversions()` overwritten conversions from your `Dialect` will not get registered. + +[TIP] +==== +If you want to rely on https://spring.io/projects/spring-boot[Spring Boot] to bootstrap Spring Data JDBC, but still want to override certain aspects of the configuration, you may want to expose beans of that type. +For custom conversions you may e.g. choose to register a bean of type `JdbcCustomConversions` that will be picked up the by the Boot infrastructure. +To learn more about this please make sure to read the Spring Boot https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#data.sql.jdbc[Reference Documentation]. +==== + +[[jdbc.custom-converters.jdbc-value]] +=== JdbcValue + +Value conversion uses `JdbcValue` to enrich values propagated to JDBC operations with a `java.sql.Types` type. +Register a custom write converter if you need to specify a JDBC-specific type instead of using type derivation. +This converter should convert the value to `JdbcValue` which has a field for the value and for the actual `JDBCType`. diff --git a/src/main/antora/modules/ROOT/pages/jdbc/mybatis.adoc b/src/main/antora/modules/ROOT/pages/jdbc/mybatis.adoc new file mode 100644 index 0000000000..f36584cff7 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/mybatis.adoc @@ -0,0 +1,120 @@ +[[jdbc.mybatis]] += MyBatis Integration + +The CRUD operations and query methods can be delegated to MyBatis. +This section describes how to configure Spring Data JDBC to integrate with MyBatis and which conventions to follow to hand over the running of the queries as well as the mapping to the library. + +[[jdbc.mybatis.configuration]] +== Configuration + +The easiest way to properly plug MyBatis into Spring Data JDBC is by importing `MyBatisJdbcConfiguration` into you application configuration: + +[source,java] +---- +@Configuration +@EnableJdbcRepositories +@Import(MyBatisJdbcConfiguration.class) +class Application { + + @Bean + SqlSessionFactoryBean sqlSessionFactoryBean() { + // Configure MyBatis here + } +} +---- + +As you can see, all you need to declare is a `SqlSessionFactoryBean` as `MyBatisJdbcConfiguration` relies on a `SqlSession` bean to be available in the `ApplicationContext` eventually. + +[[jdbc.mybatis.conventions]] +== Usage conventions + +For each operation in `CrudRepository`, Spring Data JDBC runs multiple statements. +If there is a https://github.com/mybatis/mybatis-3/blob/master/src/main/java/org/apache/ibatis/session/SqlSessionFactory.java[`SqlSessionFactory`] in the application context, Spring Data checks, for each step, whether the `SessionFactory` offers a statement. +If one is found, that statement (including its configured mapping to an entity) is used. + +The name of the statement is constructed by concatenating the fully qualified name of the entity type with `Mapper.` and a `String` determining the kind of statement. +For example, if an instance of `org.example.User` is to be inserted, Spring Data JDBC looks for a statement named `org.example.UserMapper.insert`. + +When the statement is run, an instance of [`MyBatisContext`] gets passed as an argument, which makes various arguments available to the statement. + +The following table describes the available MyBatis statements: + +[cols="default,default,default,asciidoc"] +|=== +| Name | Purpose | CrudRepository methods that might trigger this statement | Attributes available in the `MyBatisContext` + +| `insert` | Inserts a single entity. This also applies for entities referenced by the aggregate root. | `save`, `saveAll`. | +`getInstance`: the instance to be saved + +`getDomainType`: The type of the entity to be saved. + +`get()`: ID of the referencing entity, where `` is the name of the back reference column provided by the `NamingStrategy`. + + +| `update` | Updates a single entity. This also applies for entities referenced by the aggregate root. | `save`, `saveAll`.| +`getInstance`: The instance to be saved + +`getDomainType`: The type of the entity to be saved. + +| `delete` | Deletes a single entity. | `delete`, `deleteById`.| +`getId`: The ID of the instance to be deleted + +`getDomainType`: The type of the entity to be deleted. + +| `deleteAll-` | Deletes all entities referenced by any aggregate root of the type used as prefix with the given property path. +Note that the type used for prefixing the statement name is the name of the aggregate root, not the one of the entity to be deleted. | `deleteAll`.| + +`getDomainType`: The types of the entities to be deleted. + +| `deleteAll` | Deletes all aggregate roots of the type used as the prefix | `deleteAll`.| + +`getDomainType`: The type of the entities to be deleted. + +| `delete-` | Deletes all entities referenced by an aggregate root with the given propertyPath | `deleteById`.| + +`getId`: The ID of the aggregate root for which referenced entities are to be deleted. + +`getDomainType`: The type of the entities to be deleted. + +| `findById` | Selects an aggregate root by ID | `findById`.| + +`getId`: The ID of the entity to load. + +`getDomainType`: The type of the entity to load. + +| `findAll` | Select all aggregate roots | `findAll`.| + +`getDomainType`: The type of the entity to load. + +| `findAllById` | Select a set of aggregate roots by ID values | `findAllById`.| + +`getId`: A list of ID values of the entities to load. + +`getDomainType`: The type of the entity to load. + +| `findAllByProperty-` | Select a set of entities that is referenced by another entity. The type of the referencing entity is used for the prefix. The referenced entities type is used as the suffix. _This method is deprecated. Use `findAllByPath` instead_ | All `find*` methods. If no query is defined for `findAllByPath`| + +`getId`: The ID of the entity referencing the entities to be loaded. + +`getDomainType`: The type of the entity to load. + + +| `findAllByPath-` | Select a set of entities that is referenced by another entity via a property path. | All `find*` methods.| + +`getIdentifier`: The `Identifier` holding the id of the aggregate root plus the keys and list indexes of all path elements. + +`getDomainType`: The type of the entity to load. + +| `findAllSorted` | Select all aggregate roots, sorted | `findAll(Sort)`.| + +`getSort`: The sorting specification. + +| `findAllPaged` | Select a page of aggregate roots, optionally sorted | `findAll(Page)`.| + +`getPageable`: The paging specification. + +| `count` | Count the number of aggregate root of the type used as prefix | `count` | + +`getDomainType`: The type of aggregate roots to count. +|=== + diff --git a/src/main/antora/modules/ROOT/pages/jdbc/query-methods.adoc b/src/main/antora/modules/ROOT/pages/jdbc/query-methods.adoc new file mode 100644 index 0000000000..cde9847cc6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/query-methods.adoc @@ -0,0 +1,296 @@ +[[jdbc.query-methods]] += Query Methods + +This section offers some specific information about the implementation and use of Spring Data JDBC. + +Most of the data access operations you usually trigger on a repository result in a query being run against the databases. +Defining such a query is a matter of declaring a method on the repository interface, as the following example shows: + +.PersonRepository with query methods +[source,java] +---- +interface PersonRepository extends PagingAndSortingRepository { + + List findByFirstname(String firstname); <1> + + List findByFirstnameOrderByLastname(String firstname, Pageable pageable); <2> + + Slice findByLastname(String lastname, Pageable pageable); <3> + + Page findByLastname(String lastname, Pageable pageable); <4> + + Person findByFirstnameAndLastname(String firstname, String lastname); <5> + + Person findFirstByLastname(String lastname); <6> + + @Query("SELECT * FROM person WHERE lastname = :lastname") + List findByLastname(String lastname); <7> + @Query("SELECT * FROM person WHERE lastname = :lastname") + Stream streamByLastname(String lastname); <8> + + @Query("SELECT * FROM person WHERE username = :#{ principal?.username }") + Person findActiveUser(); <9> +} +---- +<1> The method shows a query for all people with the given `firstname`. +The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. +Thus, the method name results in a query expression of `SELECT … FROM person WHERE firstname = :firstname`. +<2> Use `Pageable` to pass offset and sorting parameters to the database. +<3> Return a `Slice`.Selects `LIMIT+1` rows to determine whether there's more data to consume. `ResultSetExtractor` customization is not supported. +<4> Run a paginated query returning `Page`.Selects only data within the given page bounds and potentially a count query to determine the total count. `ResultSetExtractor` customization is not supported. +<5> Find a single entity for the given criteria. +It completes with `IncorrectResultSizeDataAccessException` on non-unique results. +<6> In contrast to <3>, the first entity is always emitted even if the query yields more result documents. +<7> The `findByLastname` method shows a query for all people with the given `lastname`. +<8> The `streamByLastname` method returns a `Stream`, which makes values possible as soon as they are returned from the database. +<9> You can use the Spring Expression Language to dynamically resolve parameters. +In the sample, Spring Security is used to resolve the username of the current user. + +The following table shows the keywords that are supported for query methods: + +[cols="1,2,3",options="header",subs="quotes"] +.Supported keywords for query methods +|=== +| Keyword +| Sample +| Logical result + +| `After` +| `findByBirthdateAfter(Date date)` +| `birthdate > date` + +| `GreaterThan` +| `findByAgeGreaterThan(int age)` +| `age > age` + +| `GreaterThanEqual` +| `findByAgeGreaterThanEqual(int age)` +| `age >= age` + +| `Before` +| `findByBirthdateBefore(Date date)` +| `birthdate < date` + +| `LessThan` +| `findByAgeLessThan(int age)` +| `age < age` + +| `LessThanEqual` +| `findByAgeLessThanEqual(int age)` +| `age \<= age` + +| `Between` +| `findByAgeBetween(int from, int to)` +| `age BETWEEN from AND to` + +| `NotBetween` +| `findByAgeNotBetween(int from, int to)` +| `age NOT BETWEEN from AND to` + +| `In` +| `findByAgeIn(Collection ages)` +| `age IN (age1, age2, ageN)` + +| `NotIn` +| `findByAgeNotIn(Collection ages)` +| `age NOT IN (age1, age2, ageN)` + +| `IsNotNull`, `NotNull` +| `findByFirstnameNotNull()` +| `firstname IS NOT NULL` + +| `IsNull`, `Null` +| `findByFirstnameNull()` +| `firstname IS NULL` + +| `Like`, `StartingWith`, `EndingWith` +| `findByFirstnameLike(String name)` +| `firstname LIKE name` + +| `NotLike`, `IsNotLike` +| `findByFirstnameNotLike(String name)` +| `firstname NOT LIKE name` + +| `Containing` on String +| `findByFirstnameContaining(String name)` +| `firstname LIKE '%' + name + '%'` + +| `NotContaining` on String +| `findByFirstnameNotContaining(String name)` +| `firstname NOT LIKE '%' + name + '%'` + +| `(No keyword)` +| `findByFirstname(String name)` +| `firstname = name` + +| `Not` +| `findByFirstnameNot(String name)` +| `firstname != name` + +| `IsTrue`, `True` +| `findByActiveIsTrue()` +| `active IS TRUE` + +| `IsFalse`, `False` +| `findByActiveIsFalse()` +| `active IS FALSE` +|=== + +NOTE: Query derivation is limited to properties that can be used in a `WHERE` clause without using joins. + +[[jdbc.query-methods.strategies]] +== Query Lookup Strategies + +The JDBC module supports defining a query manually as a String in a `@Query` annotation or as named query in a property file. + +Deriving a query from the name of the method is is currently limited to simple properties, that means properties present in the aggregate root directly. +Also, only select queries are supported by this approach. + +[[jdbc.query-methods.at-query]] +== Using `@Query` + +The following example shows how to use `@Query` to declare a query method: + +.Declare a query method by using @Query +[source,java] +---- +interface UserRepository extends CrudRepository { + + @Query("select firstName, lastName from User u where u.emailAddress = :email") + User findByEmailAddress(@Param("email") String email); +} +---- + +For converting the query result into entities the same `RowMapper` is used by default as for the queries Spring Data JDBC generates itself. +The query you provide must match the format the `RowMapper` expects. +Columns for all properties that are used in the constructor of an entity must be provided. +Columns for properties that get set via setter, wither or field access are optional. +Properties that don't have a matching column in the result will not be set. +The query is used for populating the aggregate root, embedded entities and one-to-one relationships including arrays of primitive types which get stored and loaded as SQL-array-types. +Separate queries are generated for maps, lists, sets and arrays of entities. + +Properties one-to-one relationships must have there name prefixed by the name of the relationship plus `_`. +For example if the `User` from the example above has an `address` with the property `city` the column for that `city` must be labeled `address_city`. + + +WARNING: Note that String-based queries do not support pagination nor accept `Sort`, `PageRequest`, and `Limit` as a query parameter as for these queries the query would be required to be rewritten. +If you want to apply limiting, please express this intent using SQL and bind the appropriate parameters to the query yourself. + +Queries may contain SpEL expressions. +There are two variants that are evaluated differently. + +In the first variant a SpEL expression is prefixed with `:` and used like a bind variable. +Such a SpEL expression will get replaced with a bind variable and the variable gets bound to the result of the SpEL expression. + +.Use a SpEL in a query +[source,java] +---- +@Query("SELECT * FROM person WHERE id = :#{#person.id}") +Person findWithSpEL(PersonRef person); +---- + +This can be used to access members of a parameter, as demonstrated in the example above. +For more involved use cases an `EvaluationContextExtension` can be made available in the application context, which in turn can make any object available in to the SpEL. + +The other variant can be used anywhere in the query and the result of evaluating the query will replace the expression in the query string. + +.Use a SpEL in a query +[source,java] +---- +@Query("SELECT * FROM #{tableName} WHERE id = :id") +Person findWithSpEL(PersonRef person); +---- + +It is evaluated once before the first execution and uses a `StandardEvaluationContext` with the two variables `tableName` and `qualifiedTableName` added. +This use is most useful when table names are dynamic themselves, because they use SpEL expressions as well. + +NOTE: Spring fully supports Java 8’s parameter name discovery based on the `-parameters` compiler flag. +By using this flag in your build as an alternative to debug information, you can omit the `@Param` annotation for named parameters. + +NOTE: Spring Data JDBC supports only named parameters. + +[[jdbc.query-methods.named-query]] +== Named Queries + +If no query is given in an annotation as described in the previous section Spring Data JDBC will try to locate a named query. +There are two ways how the name of the query can be determined. +The default is to take the _domain class_ of the query, i.e. the aggregate root of the repository, take its simple name and append the name of the method separated by a `.`. +Alternatively the `@Query` annotation has a `name` attribute which can be used to specify the name of a query to be looked up. + +Named queries are expected to be provided in the property file `META-INF/jdbc-named-queries.properties` on the classpath. + +The location of that file may be changed by setting a value to `@EnableJdbcRepositories.namedQueriesLocation`. + +Named queries are handled in the same way as queries provided by annotation. + +[[jdbc.query-methods.customizing-query-methods]] +=== Customizing Query Methods + +[[jdbc.query-methods.at-query.streaming-results]] +=== Streaming Results + +When you specify Stream as the return type of a query method, Spring Data JDBC returns elements as soon as they become available. +When dealing with large amounts of data this is suitable for reducing latency and memory requirements. + +The stream contains an open connection to the database. +To avoid memory leaks, that connection needs to be closed eventually, by closing the stream. +The recommended way to do that is a `try-with-resource clause`. +It also means that, once the connection to the database is closed, the stream cannot obtain further elements and likely throws an exception. + +[[jdbc.query-methods.at-query.custom-rowmapper]] +=== Custom `RowMapper` or `ResultSetExtractor` + +The `@Query` annotation allows you to specify a custom `RowMapper` or `ResultSetExtractor` to use. +The attributes `rowMapperClass` and `resultSetExtractorClass` allow you to specify classes to use, which will get instantiated using a default constructor. +Alternatively you may set `rowMapperClassRef` or `resultSetExtractorClassRef` to a bean name from your Spring application context. + +If you want to use a certain `RowMapper` not just for a single method but for all methods with custom queries returning a certain type, +you may register a `RowMapperMap` bean and registering a `RowMapper` per method return type. +The following example shows how to register `DefaultQueryMappingConfiguration`: + +[source,java] +---- +@Bean +QueryMappingConfiguration rowMappers() { + return new DefaultQueryMappingConfiguration() + .register(Person.class, new PersonRowMapper()) + .register(Address.class, new AddressRowMapper()); +} +---- + +When determining which `RowMapper` to use for a method, the following steps are followed, based on the return type of the method: + +. If the type is a simple type, no `RowMapper` is used. ++ +Instead, the query is expected to return a single row with a single column, and a conversion to the return type is applied to that value. +. The entity classes in the `QueryMappingConfiguration` are iterated until one is found that is a superclass or interface of the return type in question. +The `RowMapper` registered for that class is used. ++ +Iterating happens in the order of registration, so make sure to register more general types after specific ones. + +If applicable, wrapper types such as collections or `Optional` are unwrapped. +Thus, a return type of `Optional` uses the `Person` type in the preceding process. + +NOTE: Using a custom `RowMapper` through `QueryMappingConfiguration`, `@Query(rowMapperClass=…)`, or a custom `ResultSetExtractor` disables Entity Callbacks and Lifecycle Events as the result mapping can issue its own events/callbacks if needed. + +[[jdbc.query-methods.at-query.modifying]] +=== Modifying Query + +You can mark a query as being a modifying query by using the `@Modifying` on query method, as the following example shows: + +[source,java] +---- +@Modifying +@Query("UPDATE DUMMYENTITY SET name = :name WHERE id = :id") +boolean updateName(@Param("id") Long id, @Param("name") String name); +---- + +You can specify the following return types: + +* `void` +* `int` (updated record count) +* `boolean`(whether a record was updated) + +Modifying queries are executed directly against the database. +No events or callbacks get called. +Therefore also fields with auditing annotations do not get updated if they don't get updated in the annotated query. diff --git a/src/main/antora/modules/ROOT/pages/jdbc/schema-support.adoc b/src/main/antora/modules/ROOT/pages/jdbc/schema-support.adoc new file mode 100644 index 0000000000..9abf0f80fa --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/schema-support.adoc @@ -0,0 +1,90 @@ +[[jdbc.schema]] += Schema Creation + +When working with SQL databases, the schema is an essential part. +Spring Data JDBC supports a wide range of schema options yet when starting with a domain model it can be challenging to come up with an initial domain model. +To assist you with a code-first approach, Spring Data JDBC ships with an integration to create database change sets using https://www.liquibase.org/[Liquibase]. + +Consider the following domain entity: + +[source,java] +---- +@Table +class Person { + @Id long id; + String firstName; + String lastName; + LocalDate birthday; + boolean active; +} +---- + +Rendering the initial ChangeSet through the following code: + +[source,java] +---- + +RelationalMappingContext context = … // The context contains the Person entity, ideally initialized through initialEntitySet +LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context); + +writer.writeChangeSet(new FileSystemResource(new File(…))); +---- + +yields the following change log: + +[source,yaml] +---- +databaseChangeLog: +- changeSet: + id: '1685969572426' + author: Spring Data Relational + objectQuotingStrategy: LEGACY + changes: + - createTable: + columns: + - column: + autoIncrement: true + constraints: + nullable: false + primaryKey: true + name: id + type: BIGINT + - column: + constraints: + nullable: true + name: first_name + type: VARCHAR(255 BYTE) + - column: + constraints: + nullable: true + name: last_name + type: VARCHAR(255 BYTE) + - column: + constraints: + nullable: true + name: birthday + type: DATE + - column: + constraints: + nullable: false + name: active + type: TINYINT + tableName: person +---- + +Column types are computed from an object implementing the `SqlTypeMapping` strategy interface. +Nullability is inferred from the type and set to `false` if a property type use primitive Java types. + +Schema support can assist you throughout the application development lifecycle. +In differential mode, you provide an existing Liquibase `Database` to the schema writer instance and the schema writer compares existing tables to mapped entities and derives from the difference which tables and columns to create/to drop. +By default, no tables and no columns are dropped unless you configure `dropTableFilter` and `dropColumnFilter`. +Both filter predicate provide the table name respective column name so your code can computer which tables and columns can be dropped. + +[source,java] +---- +writer.setDropTableFilter(tableName -> …); +writer.setDropColumnFilter((tableName, columnName) -> …); +---- + +NOTE: Schema support can only identify additions and removals in the sense of removing tables/columns that are not mapped or adding columns that do not exist in the database. +Columns cannot be renamed nor data cannot be migrated because entity mapping does not provide details of how the schema has evolved. diff --git a/src/main/antora/modules/ROOT/pages/jdbc/sequences.adoc b/src/main/antora/modules/ROOT/pages/jdbc/sequences.adoc new file mode 100644 index 0000000000..bd9c0033f9 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/sequences.adoc @@ -0,0 +1,4 @@ +[[jdbc.sequences]] += Sequence Support + +include::partial$sequences.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/jdbc/transactions.adoc b/src/main/antora/modules/ROOT/pages/jdbc/transactions.adoc new file mode 100644 index 0000000000..b16b44b331 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/transactions.adoc @@ -0,0 +1,127 @@ +[[jdbc.transactions]] += Transactionality + +The methods of `CrudRepository` instances are transactional by default. +For reading operations, the transaction configuration `readOnly` flag is set to `true`. +All others are configured with a plain `@Transactional` annotation so that default transaction configuration applies. +For details, see the Javadoc of javadoc:org.springframework.data.jdbc.repository.support.SimpleJdbcRepository[]. +If you need to tweak transaction configuration for one of the methods declared in a repository, redeclare the method in your repository interface, as follows: + +.Custom transaction configuration for CRUD +[source,java] +---- +interface UserRepository extends CrudRepository { + + @Override + @Transactional(timeout = 10) + List findAll(); + + // Further query method declarations +} +---- + +The preceding causes the `findAll()` method to be run with a timeout of 10 seconds and without the `readOnly` flag. + +Another way to alter transactional behavior is by using a facade or service implementation that typically covers more than one repository. +Its purpose is to define transactional boundaries for non-CRUD operations. +The following example shows how to create such a facade: + +.Using a facade to define transactions for multiple repository calls +[source,java] +---- +@Service +public class UserManagementImpl implements UserManagement { + + private final UserRepository userRepository; + private final RoleRepository roleRepository; + + UserManagementImpl(UserRepository userRepository, + RoleRepository roleRepository) { + this.userRepository = userRepository; + this.roleRepository = roleRepository; + } + + @Transactional + public void addRoleToAllUsers(String roleName) { + + Role role = roleRepository.findByName(roleName); + + for (User user : userRepository.findAll()) { + user.addRole(role); + userRepository.save(user); + } + } +} +---- + +The preceding example causes calls to `addRoleToAllUsers(…)` to run inside a transaction (participating in an existing one or creating a new one if none are already running). +The transaction configuration for the repositories is neglected, as the outer transaction configuration determines the actual repository to be used. +Note that you have to explicitly activate `` or use `@EnableTransactionManagement` to get annotation-based configuration for facades working. +Note that the preceding example assumes you use component scanning. + +[[jdbc.transaction.query-methods]] +== Transactional Query Methods + +To let your query methods be transactional, use `@Transactional` at the repository interface you define, as the following example shows: + +.Using @Transactional at query methods +[source,java] +---- +@Transactional(readOnly = true) +interface UserRepository extends CrudRepository { + + List findByLastname(String lastname); + + @Modifying + @Transactional + @Query("delete from User u where u.active = false") + void deleteInactiveUsers(); +} +---- + +Typically, you want the `readOnly` flag to be set to true, because most of the query methods only read data. +In contrast to that, `deleteInactiveUsers()` uses the `@Modifying` annotation and overrides the transaction configuration. +Thus, the method is with the `readOnly` flag set to `false`. + +NOTE: It is highly recommended to make query methods transactional. +These methods might execute more than one query in order to populate an entity. +Without a common transaction Spring Data JDBC executes the queries in different connections. +This may put excessive strain on the connection pool and might even lead to dead locks when multiple methods request a fresh connection while holding on to one. + +NOTE: It is definitely reasonable to mark read-only queries as such by setting the `readOnly` flag. +This does not, however, act as a check that you do not trigger a manipulating query (although some databases reject `INSERT` and `UPDATE` statements inside a read-only transaction). +Instead, the `readOnly` flag is propagated as a hint to the underlying JDBC driver for performance optimizations. + +[[jdbc.locking]] +== JDBC Locking + +Spring Data JDBC supports locking on derived query methods. +To enable locking on a given derived query method inside a repository, you annotate it with `@Lock`. +The required value of type `LockMode` offers two values: `PESSIMISTIC_READ` which guarantees that the data you are reading doesn't get modified, and `PESSIMISTIC_WRITE` which obtains a lock to modify the data. +Some databases do not make this distinction. +In that cases both modes are equivalent of `PESSIMISTIC_WRITE`. + +.Using @Lock on derived query method +[source,java] +---- +interface UserRepository extends CrudRepository { + + @Lock(LockMode.PESSIMISTIC_READ) + List findByLastname(String lastname); +} +---- + +As you can see above, the method `findByLastname(String lastname)` will be executed with a pessimistic read lock. +If you are using a databse with the MySQL Dialect this will result for example in the following query: + +.Resulting Sql query for MySQL dialect +[source,sql] +---- +Select * from user u where u.lastname = lastname LOCK IN SHARE MODE +---- + +NOTE: `@Lock` is currently not supported on string-based queries. +Query-methods created with `@Query`, will ignore the locking information provided by the `@Lock`, +Using `@Lock` on string-based queries will result in the warning in logs. +Future versions will throw an exception. + diff --git a/src/main/antora/modules/ROOT/pages/jdbc/why.adoc b/src/main/antora/modules/ROOT/pages/jdbc/why.adoc new file mode 100644 index 0000000000..e7d08f8f42 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/jdbc/why.adoc @@ -0,0 +1,31 @@ +[[jdbc.why]] += Why Spring Data JDBC? + +The main persistence API for relational databases in the Java world is certainly JPA, which has its own Spring Data module. +Why is there another one? + +JPA does a lot of things in order to help the developer. +Among other things, it tracks changes to entities. +It does lazy loading for you. +It lets you map a wide array of object constructs to an equally wide array of database designs. + +This is great and makes a lot of things really easy. +Just take a look at a basic JPA tutorial. +But it often gets really confusing as to why JPA does a certain thing. +Also, things that are really simple conceptually get rather difficult with JPA. + +Spring Data JDBC aims to be much simpler conceptually, by embracing the following design decisions: + +* If you load an entity, SQL statements get run. +Once this is done, you have a completely loaded entity. +No lazy loading or caching is done. + +* If you save an entity, it gets saved. +If you do not, it does not. +There is no dirty tracking and no session. + +* There is a simple model of how to map entities to tables. +It probably only works for rather simple cases. +If you do not like that, you should code your own strategy. +Spring Data JDBC offers only very limited support for customizing the strategy with annotations. + diff --git a/src/main/antora/modules/ROOT/pages/kotlin.adoc b/src/main/antora/modules/ROOT/pages/kotlin.adoc new file mode 100644 index 0000000000..4f01678d84 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc b/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc new file mode 100644 index 0000000000..8f578961cf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/coroutines.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc b/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc new file mode 100644 index 0000000000..d24a619a68 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/extensions.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc b/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc new file mode 100644 index 0000000000..6967ddb3f6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/null-safety.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/object-mapping.adoc b/src/main/antora/modules/ROOT/pages/kotlin/object-mapping.adoc new file mode 100644 index 0000000000..ba2301bd72 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/object-mapping.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/object-mapping.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc b/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc new file mode 100644 index 0000000000..bb209ab6a4 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/requirements.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/object-mapping.adoc b/src/main/antora/modules/ROOT/pages/object-mapping.adoc new file mode 100644 index 0000000000..0b6fd54ddf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/object-mapping.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$object-mapping.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/query-by-example.adoc b/src/main/antora/modules/ROOT/pages/query-by-example.adoc new file mode 100644 index 0000000000..491c57ca0b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/query-by-example.adoc @@ -0,0 +1,34 @@ +:support-qbe-collection: false +include::{commons}@data-commons::query-by-example.adoc[] + +Here's an example: + +[source,java,indent=0] +---- +include::example$r2dbc/QueryByExampleTests.java[tag=example] +---- + +<1> Create a domain object with the criteria (`null` fields will be ignored). +<2> Using the domain object, create an `Example`. +<3> Through the repository, execute query (use `findOne` for a single item). + +This illustrates how to craft a simple probe using a domain object. +In this case, it will query based on the `Employee` object's `name` field being equal to `Frodo`. +`null` fields are ignored. + +[source,java,indent=0] +---- +include::example$r2dbc/QueryByExampleTests.java[tag=example-2] +---- + +<1> Create a custom `ExampleMatcher` that matches on ALL fields (use `matchingAny()` to match on *ANY* fields) +<2> For the `name` field, use a wildcard that matches against the end of the field +<3> Match columns against `null` (don't forget that `NULL` doesn't equal `NULL` in relational databases). +<4> Ignore the `role` field when forming the query. +<5> Plug the custom `ExampleMatcher` into the probe. + +It's also possible to apply a `withTransform()` against any property, allowing you to transform a property before forming the query. +For example, you can apply a `toUpperCase()` to a `String` -based property before the query is created. + +Query By Example really shines when you don't know all the fields needed in a query in advance. +If you were building a filter on a web page where the user can pick the fields, Query By Example is a great way to flexibly capture that into an efficient query. diff --git a/src/main/antora/modules/ROOT/pages/r2dbc.adoc b/src/main/antora/modules/ROOT/pages/r2dbc.adoc new file mode 100644 index 0000000000..10cad9aeb6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc.adoc @@ -0,0 +1,27 @@ +[[r2dbc.repositories]] += R2DBC +:page-section-summary-toc: 1 + +The Spring Data R2DBC module applies core Spring concepts to the development of solutions that use R2DBC database drivers aligned with xref:jdbc/domain-driven-design.adoc[Domain-driven design principles]. +We provide a "`template`" as a high-level abstraction for storing and querying aggregates. + +This document is the reference guide for Spring Data R2DBC support. +It explains the concepts and semantics and syntax. + +This chapter points out the specialties for repository support for JDBC. +This builds on the core repository support explained in xref:repositories/introduction.adoc[Working with Spring Data Repositories]. +You should have a sound understanding of the basic concepts explained there. + +R2DBC contains a wide range of features: + +* Spring configuration support with xref:r2dbc/getting-started.adoc#r2dbc.connectionfactory[Java-based `@Configuration`] classes for an R2DBC driver instance. +* xref:r2dbc/entity-persistence.adoc[`R2dbcEntityTemplate`] as central class for entity-bound operations that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs. +* Feature-rich xref:r2dbc/mapping.adoc[object mapping] integrated with Spring's Conversion Service. +* xref:r2dbc/mapping.adoc#mapping.usage.annotations[Annotation-based mapping metadata] that is extensible to support other metadata formats. +* xref:r2dbc/repositories.adoc[Automatic implementation of Repository interfaces], including support for xref:repositories/custom-implementations.adoc[custom query methods]. + +For most tasks, you should use `R2dbcEntityTemplate` or the repository support, which both use the rich mapping functionality. +`R2dbcEntityTemplate` is the place to look for accessing functionality such as ad-hoc CRUD operations. + + + diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/auditing.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/auditing.adoc new file mode 100644 index 0000000000..7dcf6e9cce --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/auditing.adoc @@ -0,0 +1,21 @@ +[[r2dbc.auditing]] += Auditing + +Since Spring Data R2DBC 1.2, auditing can be enabled by annotating a configuration class with the `@EnableR2dbcAuditing` annotation, as the following example shows: + +.Activating auditing using JavaConfig +[source,java] +---- +@Configuration +@EnableR2dbcAuditing +class Config { + + @Bean + public ReactiveAuditorAware myAuditorProvider() { + return new AuditorAwareImpl(); + } +} +---- + +If you expose a bean of type `ReactiveAuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types. +If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableR2dbcAuditing`. diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/entity-callbacks.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/entity-callbacks.adoc new file mode 100644 index 0000000000..1e59130b3f --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/entity-callbacks.adoc @@ -0,0 +1,43 @@ +[[r2dbc.entity-callbacks]] += EntityCallbacks + +Spring Data R2DBC uses the xref:commons/entity-callbacks.adoc[`EntityCallback` API] for its auditing support and reacts on the following callbacks. + +.Supported Entity Callbacks +[%header,cols="4"] +|=== +| Callback +| Method +| Description +| Order + +| BeforeConvertCallback +| `onBeforeConvert(T entity, SqlIdentifier table)` +| Invoked before a domain object is converted to `OutboundRow`. +| `Ordered.LOWEST_PRECEDENCE` + +| AfterConvertCallback +| `onAfterConvert(T entity, SqlIdentifier table)` +| Invoked after a domain object is loaded. + +Can modify the domain object after reading it from a row. +| `Ordered.LOWEST_PRECEDENCE` + +| AuditingEntityCallback +| `onBeforeConvert(T entity, SqlIdentifier table)` +| Marks an auditable entity _created_ or _modified_ +| 100 + +| BeforeSaveCallback +| `onBeforeSave(T entity, OutboundRow row, SqlIdentifier table)` +| Invoked before a domain object is saved. + +Can modify the target, to be persisted, `OutboundRow` containing all mapped entity information. +| `Ordered.LOWEST_PRECEDENCE` + +| AfterSaveCallback +| `onAfterSave(T entity, OutboundRow row, SqlIdentifier table)` +| Invoked after a domain object is saved. + +Can modify the domain object, to be returned after save, `OutboundRow` containing all mapped entity information. +| `Ordered.LOWEST_PRECEDENCE` + +|=== + diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/entity-persistence.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/entity-persistence.adoc new file mode 100644 index 0000000000..6d38a40825 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/entity-persistence.adoc @@ -0,0 +1,215 @@ +[[r2dbc.entity-persistence]] += Persisting Entities + +`R2dbcEntityTemplate` is the central entrypoint for Spring Data R2DBC. +It provides direct entity-oriented methods and a more narrow, fluent interface for typical ad-hoc use-cases, such as querying, inserting, updating, and deleting data. + +The entry points (`insert()`, `select()`, `update()`, and others) follow a natural naming schema based on the operation to be run. +Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that creates and runs a SQL statement. +Spring Data R2DBC uses a `R2dbcDialect` abstraction to determine bind markers, pagination support and the data types natively supported by the underlying driver. + +NOTE: All terminal methods return always a `Publisher` type that represents the desired operation. +The actual statements are sent to the database upon subscription. + +[[r2dbc.entityoperations.save-insert]] +== Methods for Inserting and Updating Entities + +There are several convenient methods on `R2dbcEntityTemplate` for saving and inserting your objects. +To have more fine-grained control over the conversion process, you can register Spring converters with `R2dbcCustomConversions` -- for example `Converter` and `Converter`. + +The simple case of using the save operation is to save a POJO. +In this case, the table name is determined by name (not fully qualified) of the class. +You may also call the save operation with a specific collection name. +You can use mapping metadata to override the collection in which to store the object. + +When inserting or saving, if the `Id` property is not set, the assumption is that its value will be auto-generated by the database. +Consequently, for auto-generation the type of the `Id` property or field in your class must be a `Long`, or `Integer`. + +The following example shows how to insert a row and retrieving its contents: + +.Inserting and retrieving entities using the `R2dbcEntityTemplate` +[source,java,indent=0] +---- +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=insertAndSelect] +---- + +The following insert and update operations are available: + +A similar set of insert operations is also available: + +* `Mono` *insert* `(T objectToSave)`: Insert the object to the default table. +* `Mono` *update* `(T objectToSave)`: Insert the object to the default table. + +Table names can be customized by using the fluent API. + +[[r2dbc.entityoperations.selecting]] +== Selecting Data + +The `select(…)` and `selectOne(…)` methods on `R2dbcEntityTemplate` are used to select data from a table. +Both methods take a <> object that defines the field projection, the `WHERE` clause, the `ORDER BY` clause and limit/offset pagination. +Limit/offset functionality is transparent to the application regardless of the underlying database. +This functionality is supported by the xref:r2dbc/getting-started.adoc#r2dbc.dialects[`R2dbcDialect` abstraction] to cater for differences between the individual SQL flavors. + +.Selecting entities using the `R2dbcEntityTemplate` +[source,java,indent=0] +---- +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=select] +---- + +[[r2dbc.entityoperations.fluent-api]] +== Fluent API + +This section explains the fluent API usage. +Consider the following simple query: + +[source,java,indent=0] +---- +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=simpleSelect] +---- + +<1> Using `Person` with the `select(…)` method maps tabular results on `Person` result objects. +<2> Fetching `all()` rows returns a `Flux` without limiting results. + +The following example declares a more complex query that specifies the table name by name, a `WHERE` condition, and an `ORDER BY` clause: + +[source,java,indent=0] +---- +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=fullSelect] +---- + +<1> Selecting from a table by name returns row results using the given domain type. +<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter results. +<3> Results can be ordered by individual column names, resulting in an `ORDER BY` clause. +<4> Selecting the one result fetches only a single row. +This way of consuming rows expects the query to return exactly a single result. +`Mono` emits a `IncorrectResultSizeDataAccessException` if the query yields more than a single result. + +TIP: You can directly apply xref:repositories/projections.adoc[Projections] to results by providing the target type via `select(Class)`. + +You can switch between retrieving a single entity and retrieving multiple entities through the following terminating methods: + +* `first()`: Consume only the first row, returning a `Mono`. +The returned `Mono` completes without emitting an object if the query returns no results. +* `one()`: Consume exactly one row, returning a `Mono`. +The returned `Mono` completes without emitting an object if the query returns no results. +If the query returns more than one row, `Mono` completes exceptionally emitting `IncorrectResultSizeDataAccessException`. +* `all()`: Consume all returned rows returning a `Flux`. +* `count()`: Apply a count projection returning `Mono`. +* `exists()`: Return whether the query yields any rows by returning `Mono`. + +You can use the `select()` entry point to express your `SELECT` queries. +The resulting `SELECT` queries support the commonly used clauses (`WHERE` and `ORDER BY`) and support pagination. +The fluent API style let you chain together multiple methods while having easy-to-understand code. +To improve readability, you can use static imports that let you avoid using the 'new' keyword for creating `Criteria` instances. + +[[r2dbc.datbaseclient.fluent-api.criteria]] +include::../commons/criteria-methods.adoc[] +You can use `Criteria` with `SELECT`, `UPDATE`, and `DELETE` queries. + +[[r2dbc.entityoperations.fluent-api.insert]] +== Inserting Data + +You can use the `insert()` entry point to insert data. + +Consider the following simple typed insert operation: + +[source,java,indent=0] +---- +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=insert] +---- + +<1> Using `Person` with the `into(…)` method sets the `INTO` table, based on mapping metadata. +It also prepares the insert statement to accept `Person` objects for inserting. +<2> Provide a scalar `Person` object. +Alternatively, you can supply a `Publisher` to run a stream of `INSERT` statements. +This method extracts all non-`null` values and inserts them. + +[[r2dbc.entityoperations.fluent-api.update]] +== Updating Data + +You can use the `update()` entry point to update rows. +Updating data starts by specifying the table to update by accepting `Update` specifying assignments. +It also accepts `Query` to create a `WHERE` clause. + +Consider the following simple typed update operation: + +[source,java] +---- +Person modified = … + +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=update] +---- + +<1> Update `Person` objects and apply mapping based on mapping metadata. +<2> Set a different table name by calling the `inTable(…)` method. +<3> Specify a query that translates into a `WHERE` clause. +<4> Apply the `Update` object. +Set in this case `age` to `42` and return the number of affected rows. + +[[r2dbc.entityoperations.fluent-api.delete]] +== Deleting Data + +You can use the `delete()` entry point to delete rows. +Removing data starts with a specification of the table to delete from and, optionally, accepts a `Criteria` to create a `WHERE` clause. + +Consider the following simple insert operation: + +[source,java] +---- +include::example$r2dbc/R2dbcEntityTemplateSnippets.java[tag=delete] +---- + +<1> Delete `Person` objects and apply mapping based on mapping metadata. +<2> Set a different table name by calling the `from(…)` method. +<3> Specify a query that translates into a `WHERE` clause. +<4> Apply the delete operation and return the number of affected rows. + +[[r2dbc.entity-persistence.saving]] +Using Repositories, saving an entity can be performed with the `ReactiveCrudRepository.save(…)` method. +If the entity is new, this results in an insert for the entity. + +If the entity is not new, it gets updated. +Note that whether an instance is new is part of the instance's state. + +NOTE: This approach has some obvious downsides. +If only few of the referenced entities have been actually changed, the deletion and insertion is wasteful. +While this process could and probably will be improved, there are certain limitations to what Spring Data R2DBC can offer. +It does not know the previous state of an aggregate. +So any update process always has to take whatever it finds in the database and make sure it converts it to whatever is the state of the entity passed to the save method. + +include::partial$id-generation.adoc[] + +[[r2dbc.entity-persistence.optimistic-locking]] +== Optimistic Locking + +include::partial$optimistic-locking.adoc[] + +[source,java] +---- +@Table +class Person { + + @Id Long id; + String firstname; + String lastname; + @Version Long version; +} + +R2dbcEntityTemplate template = …; + +Mono daenerys = template.insert(new Person("Daenerys")); <1> + +Person other = template.select(Person.class) + .matching(query(where("id").is(daenerys.getId()))) + .first().block(); <2> + +daenerys.setLastname("Targaryen"); +template.update(daenerys); <3> + +template.update(other).subscribe(); // emits OptimisticLockingFailureException <4> +---- + +<1> Initially insert row. `version` is set to `0`. +<2> Load the just inserted row. `version` is still `0`. +<3> Update the row with `version = 0`.Set the `lastname` and bump `version` to `1`. +<4> Try to update the previously loaded row that still has `version = 0`.The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`. diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/getting-started.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/getting-started.adoc new file mode 100644 index 0000000000..8cba9aa0dc --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/getting-started.adoc @@ -0,0 +1,192 @@ +[[r2dbc.getting-started]] += Getting Started + +An easy way to bootstrap setting up a working environment is to create a Spring-based project in https://spring.io/tools[Spring Tools] or from https://start.spring.io[Spring Initializr]. + +First, you need to set up a running database server. +Refer to your vendor documentation on how to configure your database for R2DBC access. + +[[requirements]] +== Requirements + +Spring Data R2DBC requires {springdocsurl}[Spring Framework] {springVersion} and above. + +In terms of databases, Spring Data R2DBC requires a <> to abstract common SQL functionality over vendor-specific flavours. +Spring Data R2DBC includes direct support for the following databases: + +* https://github.com/r2dbc/r2dbc-h2[H2] (`io.r2dbc:r2dbc-h2`) +* https://github.com/mariadb-corporation/mariadb-connector-r2dbc[MariaDB] (`org.mariadb:r2dbc-mariadb`) +* https://github.com/r2dbc/r2dbc-mssql[Microsoft SQL Server] (`io.r2dbc:r2dbc-mssql`) +* https://github.com/asyncer-io/r2dbc-mysql[MySQL] (`io.asyncer:r2dbc-mysql`) +* https://github.com/jasync-sql/jasync-sql[jasync-sql MySQL] (`com.github.jasync-sql:jasync-r2dbc-mysql`) +* https://github.com/r2dbc/r2dbc-postgresql[Postgres] (`io.r2dbc:r2dbc-postgresql`) +* https://github.com/oracle/oracle-r2dbc[Oracle] (`com.oracle.database.r2dbc:oracle-r2dbc`) + +If you use a different database then your application won’t start up. +The <> section contains further detail on how to proceed in such case. + +[[r2dbc.hello-world]] +== Hello World + +To create a Spring project in STS: + +. Go to File -> New -> Spring Template Project -> Simple Spring Utility Project, and press Yes when prompted. +Then enter a project and a package name, such as `org.spring.r2dbc.example`. +. Add the following to the `pom.xml` files `dependencies` element: ++ + +. Add the following to the pom.xml files `dependencies` element: ++ +[source,xml,subs="+attributes"] +---- + + + + + + org.springframework.data + spring-data-r2dbc + {version} + + + + + io.r2dbc + r2dbc-h2 + x.y.z + + + +---- + +. Change the version of Spring in the pom.xml to be ++ +[source,xml,subs="+attributes"] +---- +{springVersion} +---- + +. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level as your `` element: ++ +[source,xml] +---- + + + spring-milestone + Spring Maven MILESTONE Repository + https://repo.spring.io/milestone + + +---- + +The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here]. + +You may also want to set the logging level to `DEBUG` to see some additional information. +To do so, edit the `application.properties` file to have the following content: + +[source] +---- +logging.level.org.springframework.r2dbc=DEBUG +---- + +Then you can, for example, create a `Person` class to persist, as follows: + +[source,java,indent=0] +---- +include::example$r2dbc/Person.java[tags=class] +---- + +Next, you need to create a table structure in your database, as follows: + +[source,sql] +---- +CREATE TABLE person( + id VARCHAR(255) PRIMARY KEY, + name VARCHAR(255), + age INT +); +---- + +You also need a main application to run, as follows: + +[source,java,indent=0] +---- +include::example$r2dbc/R2dbcApp.java[tag=class] +---- + +When you run the main program, the preceding examples produce output similar to the following: + +[source] +---- +2018-11-28 10:47:03,893 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person( + id VARCHAR(255) PRIMARY KEY, + name VARCHAR(255), + age INT + )] +2018-11-28 10:47:04,074 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)] +2018-11-28 10:47:04,092 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person] +2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34] +---- + +Even in this simple example, there are few things to notice: + +* You can create an instance of the central helper class in Spring Data R2DBC (`R2dbcEntityTemplate`) by using a standard `io.r2dbc.spi.ConnectionFactory` object. +* The mapper works against standard POJO objects without the need for any additional metadata (though you can, optionally, provide that information -- see xref:r2dbc/mapping.adoc[here].). +* Mapping conventions can use field access.Notice that the `Person` class has only getters. +* If the constructor argument names match the column names of the stored row, they are used to instantiate the object. + +[[r2dbc.examples-repo]] +== Examples Repository + +There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works. + +[[r2dbc.connecting]] +== Connecting to a Relational Database with Spring + +One of the first tasks when using relational databases and Spring is to create a `io.r2dbc.spi.ConnectionFactory` object by using the IoC container. +Make sure to use a <>. + +[[r2dbc.connectionfactory]] +== Registering a `ConnectionFactory` Instance using Java Configuration + +The following example shows an example of using Java-based bean metadata to register an instance of `io.r2dbc.spi.ConnectionFactory`: + +.Registering a `io.r2dbc.spi.ConnectionFactory` object using Java Configuration +[source,java] +---- +@Configuration +public class ApplicationConfiguration extends AbstractR2dbcConfiguration { + + @Override + @Bean + public ConnectionFactory connectionFactory() { + return … + } +} +---- + +This approach lets you use the standard `io.r2dbc.spi.ConnectionFactory` instance, with the container using Spring's `AbstractR2dbcConfiguration`.As compared to registering a `ConnectionFactory` instance directly, the configuration support has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates R2DBC exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation.This hierarchy and the use of `@Repository` is described in {spring-framework-docs}/data-access.html[Spring's DAO support features]. + +`AbstractR2dbcConfiguration` also registers `DatabaseClient`, which is required for database interaction and for Repository implementation. + +[[r2dbc.dialects]] +== Dialects + +Spring Data R2DBC uses a `Dialect` to encapsulate behavior that is specific to a database or its driver. +Spring Data R2DBC reacts to database specifics by inspecting the `ConnectionFactory` and selects the appropriate database dialect accordingly. +If you use a database for which no dialect is available, then your application won’t start up. +In that case, you’ll have to ask your vendor to provide a `Dialect` implementation. +Alternatively, you can implement your own `Dialect`. + +[TIP] +==== +Dialects are resolved by {spring-data-r2dbc-javadoc}/org/springframework/data/r2dbc/dialect/DialectResolver.html[`DialectResolver`] from a `ConnectionFactory`, typically by inspecting `ConnectionFactoryMetadata`. ++ You can let Spring auto-discover your `R2dbcDialect` by registering a class that implements `org.springframework.data.r2dbc.dialect.DialectResolver$R2dbcDialectProvider` through `META-INF/spring.factories`. +`DialectResolver` discovers dialect provider implementations from the class path using Spring's `SpringFactoriesLoader`. +To do so: + +. Implement your own `Dialect`. +. Implement a `R2dbcDialectProvider` returning the `Dialect`. +. Register the provider by creating a `spring.factories` resource under `META-INF` and perform the registration by adding a line + +`org.springframework.data.r2dbc.dialect.DialectResolver$R2dbcDialectProvider=` +==== diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/kotlin.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/kotlin.adoc new file mode 100644 index 0000000000..d0eb3cd969 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/kotlin.adoc @@ -0,0 +1,28 @@ +[[kotlin]] += Kotlin + +This part of the reference documentation explains the specific Kotlin functionality offered by Spring Data R2DBC. +See xref:kotlin.adoc[] for the general functionality provided by Spring Data. + +To retrieve a list of `SWCharacter` objects in Java, you would normally write the following: + +[source,java] +---- +Flux characters = client.select().from(SWCharacter.class).fetch().all(); +---- + +With Kotlin and the Spring Data extensions, you can instead write the following: + +[source,kotlin] +---- +val characters = client.select().from().fetch().all() +// or (both are equivalent) +val characters : Flux = client.select().from().fetch().all() +---- + +As in Java, `characters` in Kotlin is strongly typed, but Kotlin's clever type inference allows for shorter syntax. + +Spring Data R2DBC provides the following extensions: + +* Reified generics support for `DatabaseClient` and `Criteria`. +* xref:kotlin/coroutines.adoc[] extensions for `DatabaseClient`. diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/mapping.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/mapping.adoc new file mode 100644 index 0000000000..e25416b9ab --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/mapping.adoc @@ -0,0 +1,262 @@ +[[mapping]] += Mapping + +Rich mapping support is provided by the `MappingR2dbcConverter`. `MappingR2dbcConverter` has a rich metadata model that allows mapping domain objects to a data row. +The mapping metadata model is populated by using annotations on your domain objects. +However, the infrastructure is not limited to using annotations as the only source of metadata information. +The `MappingR2dbcConverter` also lets you map objects to rows without providing any additional metadata, by following a set of conventions. + +This section describes the features of the `MappingR2dbcConverter`, including how to use conventions for mapping objects to rows and how to override those conventions with annotation-based mapping metadata. + +Read on the basics about xref:object-mapping.adoc[] before continuing with this chapter. + +[[mapping.conventions]] +== Convention-based Mapping + +`MappingR2dbcConverter` has a few conventions for mapping objects to rows when no additional mapping metadata is provided. +The conventions are: + +* The short Java class name is mapped to the table name in the following manner. +The `com.bigbank.SavingsAccount` class maps to the `SAVINGS_ACCOUNT` table name. +The same name mapping is applied for mapping fields to column names. +For example, the `firstName` field maps to the `FIRST_NAME` column. +You can control this mapping by providing a custom `NamingStrategy`. +See <> for more detail. +Table and column names that are derived from property or class names are used in SQL statements without quotes by default. +You can control this behavior by setting `RelationalMappingContext.setForceQuote(true)`. + +* Nested objects are not supported. + +* The converter uses any Spring Converters registered with `CustomConversions` to override the default mapping of object properties to row columns and values. + +* The fields of an object are used to convert to and from columns in the row. +Public `JavaBean` properties are not used. + +* If you have a single non-zero-argument constructor whose constructor argument names match top-level column names of the row, that constructor is used. +Otherwise, the zero-argument constructor is used. +If there is more than one non-zero-argument constructor, an exception is thrown. +Refer to xref:object-mapping.adoc#mapping.object-creation[Object Creation] for further details. + +[[mapping.configuration]] +== Mapping Configuration + +By default, (unless explicitly configured) an instance of `MappingR2dbcConverter` is created when you create a `DatabaseClient`. +You can create your own instance of the `MappingR2dbcConverter`. +By creating your own instance, you can register Spring converters to map specific classes to and from the database. + +You can configure the `MappingR2dbcConverter` as well as `DatabaseClient` and `ConnectionFactory` by using Java-based metadata. +The following example uses Spring's Java-based configuration: + +If you set `setForceQuote` of the `R2dbcMappingContext to` true, table and column names derived from classes and properties are used with database specific quotes. +This means that it is OK to use reserved SQL words (such as order) in these names. +You can do so by overriding `r2dbcMappingContext(Optional)` of `AbstractR2dbcConfiguration`. +Spring Data converts the letter casing of such a name to that form which is also used by the configured database when no quoting is used. +Therefore, you can use unquoted names when creating tables, as long as you do not use keywords or special characters in your names. +For databases that adhere to the SQL standard, this means that names are converted to upper case. +The quoting character and the way names get capitalized is controlled by the used `Dialect`. +See xref:r2dbc/getting-started.adoc#r2dbc.dialects[R2DBC Drivers] for how to configure custom dialects. + +.@Configuration class to configure R2DBC mapping support +[source,java] +---- +@Configuration +public class MyAppConfig extends AbstractR2dbcConfiguration { + + public ConnectionFactory connectionFactory() { + return ConnectionFactories.get("r2dbc:…"); + } + + // the following are optional + + @Override + protected List getCustomConverters() { + return List.of(new PersonReadConverter(), new PersonWriteConverter()); + } +} +---- + +`AbstractR2dbcConfiguration` requires you to implement a method that defines a `ConnectionFactory`. + +You can add additional converters to the converter by overriding the `r2dbcCustomConversions` method. + +You can configure a custom `NamingStrategy` by registering it as a bean. +The `NamingStrategy` controls how the names of classes and properties get converted to the names of tables and columns. + +NOTE: `AbstractR2dbcConfiguration` creates a `DatabaseClient` instance and registers it with the container under the name of `databaseClient`. + +[[mapping.usage]] +== Metadata-based Mapping + +To take full advantage of the object mapping functionality inside the Spring Data R2DBC support, you should annotate your mapped objects with the `@Table` annotation. +Although it is not necessary for the mapping framework to have this annotation (your POJOs are mapped correctly, even without any annotations), it lets the classpath scanner find and pre-process your domain objects to extract the necessary metadata. +If you do not use this annotation, your application takes a slight performance hit the first time you store a domain object, because the mapping framework needs to build up its internal metadata model so that it knows about the properties of your domain object and how to persist them. +The following example shows a domain object: + +.Example domain object +[source,java] +---- +package com.mycompany.domain; + +@Table +public class Person { + + @Id + private Long id; + + private Integer ssn; + + private String firstName; + + private String lastName; +} +---- + +IMPORTANT: The `@Id` annotation tells the mapper which property you want to use as the primary key. + +[[mapping.types]] +=== Default Type Mapping + +The following table explains how property types of an entity affect mapping: + +|=== +|Source Type | Target Type | Remarks + +|Primitive types and wrapper types +|Passthru +|Can be customized using <>. + +|JSR-310 Date/Time types +|Passthru +|Can be customized using <>. + + +|`String`, `BigInteger`, `BigDecimal`, and `UUID` +|Passthru +|Can be customized using <>. + +|`Enum` +|String +|Can be customized by registering <>. + +|`Blob` and `Clob` +|Passthru +|Can be customized using <>. + +|`byte[]`, `ByteBuffer` +|Passthru +|Considered a binary payload. + +|`Collection` +|Array of `T` +|Conversion to Array type if supported by the configured xref:r2dbc/getting-started.adoc#requirements[driver], not supported otherwise. + +|Arrays of primitive types, wrapper types and `String` +|Array of wrapper type (e.g. `int[]` -> `Integer[]`) +|Conversion to Array type if supported by the configured xref:r2dbc/getting-started.adoc#requirements[driver], not supported otherwise. + +|Driver-specific types +|Passthru +|Contributed as a simple type by the used `R2dbcDialect`. + +|Complex objects +|Target type depends on registered `Converter`. +|Requires a <>, not supported otherwise. + +|=== + +NOTE: The native data type for a column depends on the R2DBC driver type mapping. +Drivers can contribute additional simple types such as Geometry types. + +[[mapping.usage.annotations]] +=== Mapping Annotation Overview + +include::partial$mapping-annotations.adoc[] +See xref:r2dbc/entity-persistence.adoc#r2dbc.entity-persistence.optimistic-locking[Optimistic Locking] for further reference. + +The mapping metadata infrastructure is defined in the separate `spring-data-commons` project that is technology-agnostic. +Specific subclasses are used in the R2DBC support to support annotation based metadata. +Other strategies can also be put in place (if there is demand). + +include::partial$mapping.adoc[] + +[[mapping.explicit.converters]] +== Overriding Mapping with Explicit Converters + +When storing and querying your objects, it is often convenient to have a `R2dbcConverter` instance to handle the mapping of all Java types to `OutboundRow` instances. +However, you may sometimes want the `R2dbcConverter` instances to do most of the work but let you selectively handle the conversion for a particular type -- perhaps to optimize performance. + +To selectively handle the conversion yourself, register one or more one or more `org.springframework.core.convert.converter.Converter` instances with the `R2dbcConverter`. + +You can use the `r2dbcCustomConversions` method in `AbstractR2dbcConfiguration` to configure converters. +The examples xref:r2dbc/mapping.adoc#mapping.configuration[at the beginning of this chapter] show how to perform the configuration with Java. + +NOTE: Custom top-level entity conversion requires asymmetric types for conversion. +Inbound data is extracted from R2DBC's `Row`. +Outbound data (to be used with `INSERT`/`UPDATE` statements) is represented as `OutboundRow` and later assembled to a statement. + +The following example of a Spring Converter implementation converts from a `Row` to a `Person` POJO: + +[source,java] +---- +@ReadingConverter +public class PersonReadConverter implements Converter { + + public Person convert(Row source) { + Person p = new Person(source.get("id", String.class),source.get("name", String.class)); + p.setAge(source.get("age", Integer.class)); + return p; + } +} +---- + +Please note that converters get applied on singular properties. +Collection properties (e.g. `Collection`) are iterated and converted element-wise. +Collection converters (e.g. `Converter>, OutboundRow`) are not supported. + +NOTE: R2DBC uses boxed primitives (`Integer.class` instead of `int.class`) to return primitive values. + +The following example converts from a `Person` to a `OutboundRow`: + +[source,java] +---- +@WritingConverter +public class PersonWriteConverter implements Converter { + + public OutboundRow convert(Person source) { + OutboundRow row = new OutboundRow(); + row.put("id", Parameter.from(source.getId())); + row.put("name", Parameter.from(source.getFirstName())); + row.put("age", Parameter.from(source.getAge())); + return row; + } +} +---- + +[[mapping.explicit.enum.converters]] +=== Overriding Enum Mapping with Explicit Converters + +Some databases, such as https://github.com/pgjdbc/r2dbc-postgresql#postgres-enum-types[Postgres], can natively write enum values using their database-specific enumerated column type. +Spring Data converts `Enum` values by default to `String` values for maximum portability. +To retain the actual enum value, register a `@Writing` converter whose source and target types use the actual enum type to avoid using `Enum.name()` conversion. +Additionally, you need to configure the enum type on the driver level so that the driver is aware how to represent the enum type. + +The following example shows the involved components to read and write `Color` enum values natively: + +[source,java] +---- +enum Color { + Grey, Blue +} + +class ColorConverter extends EnumWriteSupport { + +} + + +class Product { + @Id long id; + Color color; + + // … +} +---- diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/migration-guide.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/migration-guide.adoc new file mode 100644 index 0000000000..03f9624187 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/migration-guide.adoc @@ -0,0 +1,62 @@ +[[migration-guide]] += Migration Guide + +The following sections explain how to migrate to a newer version of Spring Data R2DBC. + +[[upgrading.1.1-1.2]] +== Upgrading from 1.1.x to 1.2.x + +Spring Data R2DBC was developed with the intent to evaluate how well R2DBC can integrate with Spring applications. +One of the main aspects was to move core support into Spring Framework once R2DBC support has proven useful. +Spring Framework 5.3 ships with a new module: Spring R2DBC (`spring-r2dbc`). + +`spring-r2dbc` ships core R2DBC functionality (a slim variant of `DatabaseClient`, Transaction Manager, Connection Factory initialization, Exception translation) that was initially provided by Spring Data R2DBC. +The 1.2.0 release aligns with what's provided in Spring R2DBC by making several changes outlined in the following sections. + +Spring R2DBC's `DatabaseClient` is a more lightweight implementation that encapsulates a pure SQL-oriented interface. +You will notice that the method to run SQL statements changed from `DatabaseClient.execute(…)` to `DatabaseClient.sql(…)`. +The fluent API for CRUD operations has moved into `R2dbcEntityTemplate`. + +If you use logging of SQL statements through the logger prefix `org.springframework.data.r2dbc`, make sure to update it to `org.springframework.r2dbc` (that is removing `.data`) to point to Spring R2DBC components. + +[[upgrading.1.1-1.2.deprecation]] +=== Deprecations + +* Deprecation of `o.s.d.r2dbc.core.DatabaseClient` and its support classes `ConnectionAccessor`, `FetchSpec`, `SqlProvider` and a few more. +Named parameter support classes such as `NamedParameterExpander` are encapsulated by Spring R2DBC's `DatabaseClient` implementation hence we're not providing replacements as this was internal API in the first place. +Use `o.s.r2dbc.core.DatabaseClient` and their Spring R2DBC replacements available from `org.springframework.r2dbc.core`. +Entity-based methods (`select`/`insert`/`update`/`delete`) methods are available through `R2dbcEntityTemplate` which was introduced with version 1.1. +* Deprecation of `o.s.d.r2dbc.connectionfactory`, `o.s.d.r2dbc.connectionfactory.init`, and `o.s.d.r2dbc.connectionfactory.lookup` packages. +Use Spring R2DBC's variant which you can find at `o.s.r2dbc.connection`. +* Deprecation of `o.s.d.r2dbc.convert.ColumnMapRowMapper`. +Use `o.s.r2dbc.core.ColumnMapRowMapper` instead. +* Deprecation of binding support classes `o.s.d.r2dbc.dialect.Bindings`, `BindMarker`, `BindMarkers`, `BindMarkersFactory` and related types. +Use replacements from `org.springframework.r2dbc.core.binding`. +* Deprecation of `BadSqlGrammarException`, `UncategorizedR2dbcException` and exception translation at `o.s.d.r2dbc.support`. +Spring R2DBC provides a slim exception translation variant without an SPI for now available through `o.s.r2dbc.connection.ConnectionFactoryUtils#convertR2dbcException`. + +[[upgrading.1.1-1.2.replacements]] +=== Usage of replacements provided by Spring R2DBC + +To ease migration, several deprecated types are now subtypes of their replacements provided by Spring R2DBC. +Spring Data R2DBC has changes several methods or introduced new methods accepting Spring R2DBC types. +Specifically the following classes are changed: + +* `R2dbcEntityTemplate` +* `R2dbcDialect` +* Types in `org.springframework.data.r2dbc.query` + +We recommend that you review and update your imports if you work with these types directly. + +[[breaking-changes]] +=== Breaking Changes + +* `OutboundRow` and statement mappers switched from using `SettableValue` to `Parameter` +* Repository factory support requires `o.s.r2dbc.core.DatabaseClient` instead of `o.s.data.r2dbc.core.DatabaseClient`. + +[[upgrading.1.1-1.2.dependencies]] +=== Dependency Changes + +To make use of Spring R2DBC, make sure to include the following dependency: + +* `org.springframework:spring-r2dbc` diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/query-methods.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/query-methods.adoc new file mode 100644 index 0000000000..eda7efd489 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/query-methods.adoc @@ -0,0 +1,243 @@ +[[r2dbc.repositories.queries]] += Query Methods + +Most of the data access operations you usually trigger on a repository result in a query being run against the databases. +Defining such a query is a matter of declaring a method on the repository interface, as the following example shows: + +.PersonRepository with query methods +==== +[source,java] +---- +interface ReactivePersonRepository extends ReactiveSortingRepository { + + Flux findByFirstname(String firstname); <1> + + Flux findByFirstname(Publisher firstname); <2> + + Flux findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3> + + Mono findByFirstnameAndLastname(String firstname, String lastname); <4> + + Mono findFirstByLastname(String lastname); <5> + + @Query("SELECT * FROM person WHERE lastname = :lastname") + Flux findByLastname(String lastname); <6> + + @Query("SELECT firstname, lastname FROM person WHERE lastname = $1") + Mono findFirstByLastname(String lastname); <7> +} +---- + +<1> The method shows a query for all people with the given `firstname`. +The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. +Thus, the method name results in a query expression of `SELECT … FROM person WHERE firstname = :firstname`. +<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`. +<3> Use `Pageable` to pass offset and sorting parameters to the database. +<4> Find a single entity for the given criteria. +It completes with `IncorrectResultSizeDataAccessException` on non-unique results. +<5> Unless <4>, the first entity is always emitted even if the query yields more result rows. +<6> The `findByLastname` method shows a query for all people with the given last name. +<7> A query for a single `Person` entity projecting only `firstname` and `lastname` columns. +The annotated query uses native bind markers, which are Postgres bind markers in this example. +==== + +Note that the columns of a select statement used in a `@Query` annotation must match the names generated by the `NamingStrategy` for the respective property. +If a select statement does not include a matching column, that property is not set. +If that property is required by the persistence constructor, either null or (for primitive types) the default value is provided. + +The following table shows the keywords that are supported for query methods: + +[cols="1,2,3",options="header",subs="quotes"] +.Supported keywords for query methods +|=== +| Keyword +| Sample +| Logical result + +| `After` +| `findByBirthdateAfter(Date date)` +| `birthdate > date` + +| `GreaterThan` +| `findByAgeGreaterThan(int age)` +| `age > age` + +| `GreaterThanEqual` +| `findByAgeGreaterThanEqual(int age)` +| `age >= age` + +| `Before` +| `findByBirthdateBefore(Date date)` +| `birthdate < date` + +| `LessThan` +| `findByAgeLessThan(int age)` +| `age < age` + +| `LessThanEqual` +| `findByAgeLessThanEqual(int age)` +| `age \<= age` + +| `Between` +| `findByAgeBetween(int from, int to)` +| `age BETWEEN from AND to` + +| `NotBetween` +| `findByAgeNotBetween(int from, int to)` +| `age NOT BETWEEN from AND to` + +| `In` +| `findByAgeIn(Collection ages)` +| `age IN (age1, age2, ageN)` + +| `NotIn` +| `findByAgeNotIn(Collection ages)` +| `age NOT IN (age1, age2, ageN)` + +| `IsNotNull`, `NotNull` +| `findByFirstnameNotNull()` +| `firstname IS NOT NULL` + +| `IsNull`, `Null` +| `findByFirstnameNull()` +| `firstname IS NULL` + +| `Like`, `StartingWith`, `EndingWith` +| `findByFirstnameLike(String name)` +| `firstname LIKE name` + +| `NotLike`, `IsNotLike` +| `findByFirstnameNotLike(String name)` +| `firstname NOT LIKE name` + +| `Containing` on String +| `findByFirstnameContaining(String name)` +| `firstname LIKE '%' + name +'%'` + +| `NotContaining` on String +| `findByFirstnameNotContaining(String name)` +| `firstname NOT LIKE '%' + name +'%'` + +| `(No keyword)` +| `findByFirstname(String name)` +| `firstname = name` + +| `Not` +| `findByFirstnameNot(String name)` +| `firstname != name` + +| `IsTrue`, `True` +| `findByActiveIsTrue()` +| `active IS TRUE` + +| `IsFalse`, `False` +| `findByActiveIsFalse()` +| `active IS FALSE` +|=== + +[[r2dbc.repositories.modifying]] +== Modifying Queries + +The previous sections describe how to declare queries to access a given entity or collection of entities. +Using keywords from the preceding table can be used in conjunction with `delete…By` or `remove…By` to create derived queries that delete matching rows. + +.`Delete…By` Query +==== +[source,java] +---- +interface ReactivePersonRepository extends ReactiveSortingRepository { + + Mono deleteByLastname(String lastname); <1> + + Mono deletePersonByLastname(String lastname); <2> + + Mono deletePersonByLastname(String lastname); <3> +} +---- + +<1> Using a return type of `Mono` returns the number of affected rows. +<2> Using `Void` just reports whether the rows were successfully deleted without emitting a result value. +<3> Using `Boolean` reports whether at least one row was removed. +==== + +As this approach is feasible for comprehensive custom functionality, you can modify queries that only need parameter binding by annotating the query method with `@Modifying`, as shown in the following example: + +[source,java,indent=0] +---- +include::example$r2dbc/PersonRepository.java[tags=atModifying] +---- + +The result of a modifying query can be: + +* `Void` (or Kotlin `Unit`) to discard update count and await completion. +* `Integer` or another numeric type emitting the affected rows count. +* `Boolean` to emit whether at least one row was updated. + +The `@Modifying` annotation is only relevant in combination with the `@Query` annotation. +Derived custom methods do not require this annotation. + +Modifying queries are executed directly against the database. +No events or callbacks get called. +Therefore also fields with auditing annotations do not get updated if they don't get updated in the annotated query. + +Alternatively, you can add custom modifying behavior by using the facilities described in xref:repositories/custom-implementations.adoc[Custom Implementations for Spring Data Repositories]. + +[[r2dbc.query-methods.at-query]] +== Using `@Query` + +The following example shows how to use `@Query` to declare a query method: + +.Declare a query method by using @Query +[source,java] +---- +interface UserRepository extends ReactiveCrudRepository { + + @Query("select firstName, lastName from User u where u.emailAddress = :email") + Flux findByEmailAddress(@Param("email") String email); +} +---- + +WARNING: Note that String-based queries do not support pagination nor accept `Sort`, `PageRequest`, and `Limit` as a query parameter as for these queries the query would be required to be rewritten. +If you want to apply limiting, please express this intent using SQL and bind the appropriate parameters to the query yourself. + +NOTE: Spring fully supports Java 8’s parameter name discovery based on the `-parameters` compiler flag. +By using this flag in your build as an alternative to debug information, you can omit the `@Param` annotation for named parameters. + +[[r2dbc.repositories.queries.spel]] +=== Queries with SpEL Expressions + +Query string definitions can be used together with SpEL expressions to create dynamic queries at runtime. +SpEL expressions can be used in two ways. + +SpEL expressions can provide predicate values which are evaluated right before running the query. + +Expressions expose method arguments through an array that contains all the arguments. +The following query uses `[0]` +to declare the predicate value for `lastname` (which is equivalent to the `:lastname` parameter binding): + +[source,java,indent=0] +---- +include::example$r2dbc/PersonRepository.java[tags=spel] +---- + +This Expression support is extensible through the Query SPI: `org.springframework.data.spel.spi.EvaluationContextExtension`. +The Query SPI can contribute properties and functions and can customize the root object. +Extensions are retrieved from the application context at the time of SpEL evaluation when the query is built. + +TIP: When using SpEL expressions in combination with plain parameters, use named parameter notation instead of native bind markers to ensure a proper binding order. + +The other way to use Expression is in the middle of query, independent of parameters. +The result of evaluating the query will replace the expression in the query string. + +.Use a SpEL in a query +[source,java,indent=0] +---- +include::example$r2dbc/PersonRepository.java[tags=spel2] +---- + +It is evaluated once before the first execution and uses a `StandardEvaluationContext` with the two variables `tableName` and `qualifiedTableName` added. +This use is most useful when table names are dynamic themselves, because they use SpEL expressions as well. + +SpEL in query strings can be a powerful way to enhance queries. +However, they can also accept a broad range of unwanted arguments. +You should make sure to sanitize strings before passing them to the query to avoid unwanted changes to your query. diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/repositories.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/repositories.adoc new file mode 100644 index 0000000000..507ece97bc --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/repositories.adoc @@ -0,0 +1,123 @@ +[[r2dbc.repositories]] += R2DBC Repositories + +[[r2dbc.repositories.intro]] +This chapter points out the specialties for repository support for R2DBC. +This builds on the core repository support explained in xref:repositories/introduction.adoc[Working with Spring Data Repositories]. +Before reading this chapter, you should have a sound understanding of the basic concepts explained there. + +[[r2dbc.repositories.usage]] +== Usage + +To access domain entities stored in a relational database, you can use our sophisticated repository support that eases implementation quite significantly. +To do so, create an interface for your repository. +Consider the following `Person` class: + +.Sample Person entity +[source,java] +---- +public class Person { + + @Id + private Long id; + private String firstname; + private String lastname; + + // … getters and setters omitted +} +---- + +The following example shows a repository interface for the preceding `Person` class: + +.Basic repository interface to persist Person entities +[source,java] +---- +public interface PersonRepository extends ReactiveCrudRepository { + + // additional custom query methods go here +} +---- + +To configure R2DBC repositories, you can use the `@EnableR2dbcRepositories` annotation. +If no base package is configured, the infrastructure scans the package of the annotated configuration class. +The following example shows how to use Java configuration for a repository: + +.Java configuration for repositories +[source,java] +---- +@Configuration +@EnableR2dbcRepositories +class ApplicationConfig extends AbstractR2dbcConfiguration { + + @Override + public ConnectionFactory connectionFactory() { + return … + } +} +---- + +Because our domain repository extends `ReactiveCrudRepository`, it provides you with reactive CRUD operations to access the entities. +On top of `ReactiveCrudRepository`, there is also `ReactiveSortingRepository`, which adds additional sorting functionality similar to that of `PagingAndSortingRepository`. +Working with the repository instance is merely a matter of dependency injecting it into a client. +Consequently, you can retrieve all `Person` objects with the following code: + +.Paging access to Person entities +[source,java,indent=0] +---- +include::example$r2dbc/PersonRepositoryTests.java[tags=class] +---- + +The preceding example creates an application context with Spring's unit test support, which performs annotation-based dependency injection into test cases. +Inside the test method, we use the repository to query the database. +We use `StepVerifier` as a test aid to verify our expectations against the results. + +[[projections.resultmapping]] +=== Result Mapping + +A query method returning an Interface- or DTO projection is backed by results produced by the actual query. +Interface projections generally rely on mapping results onto the domain type first to consider potential `@Column` type mappings and the actual projection proxy uses a potentially partially materialized entity to expose projection data. + +Result mapping for DTO projections depends on the actual query type. +Derived queries use the domain type to map results, and Spring Data creates DTO instances solely from properties available on the domain type. +Declaring properties in your DTO that are not available on the domain type is not supported. + +String-based queries use a different approach since the actual query, specifically the field projection, and result type declaration are close together. +DTO projections used with query methods annotated with `@Query` map query results directly into the DTO type. +Field mappings on the domain type are not considered. +Using the DTO type directly, your query method can benefit from a more dynamic projection that isn't restricted to the domain model. + +[[r2dbc.multiple-databases]] +== Working with multiple Databases + +When working with multiple, potentially different databases, your application will require a different approach to configuration. +The provided `AbstractR2dbcConfiguration` support class assumes a single `ConnectionFactory` from which the `Dialect` gets derived. +That being said, you need to define a few beans yourself to configure Spring Data R2DBC to work with multiple databases. + +R2DBC repositories require `R2dbcEntityOperations` to implement repositories. +A simple configuration to scan for repositories without using `AbstractR2dbcConfiguration` looks like: + +[source,java] +---- +@Configuration +@EnableR2dbcRepositories(basePackages = "com.acme.mysql", entityOperationsRef = "mysqlR2dbcEntityOperations") +static class MySQLConfiguration { + + @Bean + @Qualifier("mysql") + public ConnectionFactory mysqlConnectionFactory() { + return … + } + + @Bean + public R2dbcEntityOperations mysqlR2dbcEntityOperations(@Qualifier("mysql") ConnectionFactory connectionFactory) { + + DatabaseClient databaseClient = DatabaseClient.create(connectionFactory); + + return new R2dbcEntityTemplate(databaseClient, MySqlDialect.INSTANCE); + } +} +---- + +Note that `@EnableR2dbcRepositories` allows configuration either through `databaseClientRef` or `entityOperationsRef`. +Using various `DatabaseClient` beans is useful when connecting to multiple databases of the same type. +When using different database systems that differ in their dialect, use `@EnableR2dbcRepositories`(entityOperationsRef = …)` instead. diff --git a/src/main/antora/modules/ROOT/pages/r2dbc/sequences.adoc b/src/main/antora/modules/ROOT/pages/r2dbc/sequences.adoc new file mode 100644 index 0000000000..df762cb87f --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/r2dbc/sequences.adoc @@ -0,0 +1,4 @@ +[[r2dbc.sequences]] += Sequence Support + +include::partial$sequences.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/auditing.adoc b/src/main/antora/modules/ROOT/pages/repositories/auditing.adoc new file mode 100644 index 0000000000..92ece5b283 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/auditing.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$auditing.adoc[leveloffset=+1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc new file mode 100644 index 0000000000..ad0eda73dd --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc @@ -0,0 +1,3 @@ +include::{commons}@data-commons::page$repositories/core-concepts.adoc[] + +include::{commons}@data-commons::page$is-new-state-detection.adoc[leveloffset=+1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc new file mode 100644 index 0000000000..f84313e9da --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/core-domain-events.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc new file mode 100644 index 0000000000..a7c2ff8d3c --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/core-extensions.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc new file mode 100644 index 0000000000..2ae01801b1 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/create-instances.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc new file mode 100644 index 0000000000..c7615191a6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/custom-implementations.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/definition.adoc b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc new file mode 100644 index 0000000000..bd65a8af83 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/definition.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/introduction.adoc b/src/main/antora/modules/ROOT/pages/repositories/introduction.adoc new file mode 100644 index 0000000000..2649734ab0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/introduction.adoc @@ -0,0 +1,8 @@ +[[common.basics]] += Introduction +:page-section-summary-toc: 1 + +This chapter explains the basic foundations of Spring Data repositories. +Before continuing to the JDBC or R2DBC specifics, make sure you have a sound understanding of the basic concepts explained here. + +The goal of the Spring Data repository abstraction is to significantly reduce the amount of boilerplate code required to implement data access layers for various persistence stores. diff --git a/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc new file mode 100644 index 0000000000..081bac9f61 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/null-handling.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/projections.adoc b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc new file mode 100644 index 0000000000..e80d9a0a77 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc @@ -0,0 +1,4 @@ +[[relational.projections]] += Projections + +include::{commons}@data-commons::page$repositories/projections.adoc[leveloffset=+1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc new file mode 100644 index 0000000000..e495eddc6b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-keywords-reference.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc new file mode 100644 index 0000000000..dfe4814955 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-methods-details.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc new file mode 100644 index 0000000000..a73c3201d0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-return-types-reference.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/value-expressions.adoc b/src/main/antora/modules/ROOT/pages/value-expressions.adoc new file mode 100644 index 0000000000..6356a46265 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/value-expressions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$value-expressions.adoc[] diff --git a/src/main/antora/modules/ROOT/partials/id-generation.adoc b/src/main/antora/modules/ROOT/partials/id-generation.adoc new file mode 100644 index 0000000000..52befaf7fb --- /dev/null +++ b/src/main/antora/modules/ROOT/partials/id-generation.adoc @@ -0,0 +1,19 @@ +[[entity-persistence.id-generation]] +== ID Generation + +Spring Data uses identifier properties to identify entities. +That is, looking these up or creating statements targeting a particular row. +The ID of an entity must be annotated with Spring Data's https://docs.spring.io/spring-data/commons/docs/current/api/org/springframework/data/annotation/Id.html[`@Id`] annotation. + +When your database has an auto-increment column for the ID column, the generated value gets set in the entity after inserting it into the database. + +If you annotate the identifier property additionally with `@Sequence` a database sequence will be used to obtain values for the id if the underlying `Dialect` supports sequences. + +Otherwise, Spring Data does not attempt to insert values of identifier columns when the entity is new and the identifier value defaults to its initial value. +That is `0` for primitive types and `null` if the identifier property uses a numeric wrapper type such as `Long`. + +xref:repositories/core-concepts.adoc#is-new-state-detection[Entity State Detection] explains in detail the strategies to detect whether an entity is new or whether it is expected to exist in your database. + +One important constraint is that, after saving an entity, the entity must not be new anymore. +Note that whether an entity is new is part of the entity's state. +With auto-increment columns, this happens automatically, because the ID gets set by Spring Data with the value from the ID column. diff --git a/src/main/antora/modules/ROOT/partials/mapping-annotations.adoc b/src/main/antora/modules/ROOT/partials/mapping-annotations.adoc new file mode 100644 index 0000000000..e98d076c5d --- /dev/null +++ b/src/main/antora/modules/ROOT/partials/mapping-annotations.adoc @@ -0,0 +1,24 @@ +The `RelationalConverter` can use metadata to drive the mapping of objects to rows. +The following annotations are available: + +* `@Id`: Applied at the field level to mark the primary key. +* `@Table`: Applied at the class level to indicate this class is a candidate for mapping to the database. +You can specify the name of the table where the database is stored. +* `@Transient`: By default, all fields are mapped to the row. +This annotation excludes the field where it is applied from being stored in the database. +Transient properties cannot be used within a persistence constructor as the converter cannot materialize a value for the constructor argument. +* `@PersistenceCreator`: Marks a given constructor or static factory method -- even a package protected one -- to use when instantiating the object from the database. +Constructor arguments are mapped by name to the values in the retrieved row. +* `@Value`: This annotation is part of the Spring Framework. +Within the mapping framework it can be applied to constructor arguments. +This lets you use a Spring Expression Language statement to transform a key’s value retrieved in the database before it is used to construct a domain object. +In order to reference a column of a given row one has to use expressions like: `@Value("#root.myProperty")` where root refers to the root of the given `Row`. +* `@Column`: Applied at the field level to describe the name of the column as it is represented in the row, letting the name be different from the field name of the class. +Names specified with a `@Column` annotation are always quoted when used in SQL statements. +For most databases, this means that these names are case-sensitive. +It also means that you can use special characters in these names. +However, this is not recommended, since it may cause problems with other tools. +* `@Version`: Applied at field level is used for optimistic locking and checked for modification on save operations. +The value is `null` (`zero` for primitive types) is considered as marker for entities to be new. +The initially stored value is `zero` (`one` for primitive types). +The version gets incremented automatically on every update. diff --git a/src/main/antora/modules/ROOT/partials/mapping.adoc b/src/main/antora/modules/ROOT/partials/mapping.adoc new file mode 100644 index 0000000000..7e864516e2 --- /dev/null +++ b/src/main/antora/modules/ROOT/partials/mapping.adoc @@ -0,0 +1,197 @@ +[[entity-persistence.naming-strategy]] +== Naming Strategy + +By convention, Spring Data applies a `NamingStrategy` to determine table, column, and schema names defaulting to https://en.wikipedia.org/wiki/Snake_case[snake case]. +An object property named `firstName` becomes `first_name`. +You can tweak that by providing a javadoc:org.springframework.data.relational.core.mapping.NamingStrategy[] in your application context. + +[[entity-persistence.custom-table-name]] +== Override table names + +When the table naming strategy does not match your database table names, you can override the table name with the javadoc:org.springframework.data.relational.core.mapping.Table[] annotation. +The element `value` of this annotation provides the custom table name. +The following example maps the `MyEntity` class to the `CUSTOM_TABLE_NAME` table in the database: + +[source,java] +---- +@Table("CUSTOM_TABLE_NAME") +class MyEntity { + @Id + Integer id; + + String name; +} +---- + +You may use xref:value-expressions.adoc[Spring Data's SpEL support] to dynamically create the table name. +Once generated the table name will be cached, so it is dynamic per mapping context only. + +[[entity-persistence.custom-column-name]] +== Override column names + +When the column naming strategy does not match your database table names, you can override the table name with the javadoc:org.springframework.data.relational.core.mapping.Column[] annotation. +The element `value` of this annotation provides the custom column name. +The following example maps the `name` property of the `MyEntity` class to the `CUSTOM_COLUMN_NAME` column in the database: + +[source,java] +---- +class MyEntity { + @Id + Integer id; + + @Column("CUSTOM_COLUMN_NAME") + String name; +} +---- + +ifdef::mapped-collection[] + +The javadoc:org.springframework.data.relational.core.mapping.MappedCollection[] +annotation can be used on a reference type (one-to-one relationship) or on Sets, Lists, and Maps (one-to-many relationship). +`idColumn` element of the annotation provides a custom name for the foreign key column referencing the id column in the other table. +In the following example the corresponding table for the `MySubEntity` class has a `NAME` column, and the `CUSTOM_MY_ENTITY_ID_COLUMN_NAME` column of the `MyEntity` id for relationship reasons: + +[source,java] +---- +class MyEntity { + @Id + Integer id; + + @MappedCollection(idColumn = "CUSTOM_MY_ENTITY_ID_COLUMN_NAME") + Set subEntities; +} + +class MySubEntity { + String name; +} +---- + +When using `List` and `Map` you must have an additional column for the position of a dataset in the `List` or the key value of the entity in the `Map`. +This additional column name may be customized with the `keyColumn` Element of the javadoc:org.springframework.data.relational.core.mapping.MappedCollection[] annotation: + +[source,java] +---- +class MyEntity { + @Id + Integer id; + + @MappedCollection(idColumn = "CUSTOM_COLUMN_NAME", keyColumn = "CUSTOM_KEY_COLUMN_NAME") + List name; +} + +class MySubEntity { + String name; +} +---- +endif::[] + +You may use xref:value-expressions.adoc[Spring Data's SpEL support] to dynamically create column names. +Once generated the names will be cached, so it is dynamic per mapping context only. + + +ifdef::embedded-entities[] + +[[entity-persistence.embedded-entities]] +== Embedded entities + +Embedded entities are used to have value objects in your java data model, even if there is only one table in your database. +In the following example you see, that `MyEntity` is mapped with the `@Embedded` annotation. +The consequence of this is, that in the database a table `my_entity` with the two columns `id` and `name` (from the `EmbeddedEntity` class) is expected. + +However, if the `name` column is actually `null` within the result set, the entire property `embeddedEntity` will be set to null according to the `onEmpty` of `@Embedded`, which ``null``s objects when all nested properties are `null`. + +Opposite to this behavior `USE_EMPTY` tries to create a new instance using either a default constructor or one that accepts nullable parameter values from the result set. + +.Sample Code of embedding objects +==== +[source,java] +---- +class MyEntity { + + @Id + Integer id; + + @Embedded(onEmpty = USE_NULL) <1> + EmbeddedEntity embeddedEntity; +} + +class EmbeddedEntity { + String name; +} +---- + +<1> ``Null``s `embeddedEntity` if `name` in `null`. +Use `USE_EMPTY` to instantiate `embeddedEntity` with a potential `null` value for the `name` property. +==== + +If you need a value object multiple times in an entity, this can be achieved with the optional `prefix` element of the `@Embedded` annotation. +This element represents a prefix and is prepend for each column name in the embedded object. + +[TIP] +==== +Make use of the shortcuts `@Embedded.Nullable` & `@Embedded.Empty` for `@Embedded(onEmpty = USE_NULL)` and `@Embedded(onEmpty = USE_EMPTY)` to reduce verbosity and simultaneously set JSR-305 `@javax.annotation.Nonnull` accordingly. + +[source,java] +---- +class MyEntity { + + @Id + Integer id; + + @Embedded.Nullable <1> + EmbeddedEntity embeddedEntity; +} +---- + +<1> Shortcut for `@Embedded(onEmpty = USE_NULL)`. +==== + +Embedded entities containing a `Collection` or a `Map` will always be considered non-empty since they will at least contain the empty collection or map. +Such an entity will therefore never be `null` even when using @Embedded(onEmpty = USE_NULL). +endif::[] + +[[entity-persistence.read-only-properties]] +== Read Only Properties + +Attributes annotated with `@ReadOnlyProperty` will not be written to the database by Spring Data, but they will be read when an entity gets loaded. + +Spring Data will not automatically reload an entity after writing it. +Therefore, you have to reload it explicitly if you want to see data that was generated in the database for such columns. + +If the annotated attribute is an entity or collection of entities, it is represented by one or more separate rows in separate tables. +Spring Data will not perform any insert, delete or update for these rows. + +[[entity-persistence.insert-only-properties]] +== Insert Only Properties + +Attributes annotated with `@InsertOnlyProperty` will only be written to the database by Spring Data during insert operations. +For updates these properties will be ignored. + +`@InsertOnlyProperty` is only supported for the aggregate root. + +[[mapping.custom.object.construction]] +== Customized Object Construction + +The mapping subsystem allows the customization of the object construction by annotating a constructor with the `@PersistenceConstructor` annotation.The values to be used for the constructor parameters are resolved in the following way: + +* If a parameter is annotated with the `@Value` annotation, the given expression is evaluated, and the result is used as the parameter value. +* If the Java type has a property whose name matches the given field of the input row, then its property information is used to select the appropriate constructor parameter to which to pass the input field value. +This works only if the parameter name information is present in the Java `.class` files, which you can achieve by compiling the source with debug information or using the `-parameters` command-line switch for `javac` in Java 8. +* Otherwise, a `MappingException` is thrown to indicate that the given constructor parameter could not be bound. + +[source,java] +---- +class OrderItem { + + private @Id final String id; + private final int quantity; + private final double unitPrice; + + OrderItem(String id, int quantity, double unitPrice) { + this.id = id; + this.quantity = quantity; + this.unitPrice = unitPrice; + } + + // getters/setters omitted +} +---- diff --git a/src/main/antora/modules/ROOT/partials/optimistic-locking.adoc b/src/main/antora/modules/ROOT/partials/optimistic-locking.adoc new file mode 100644 index 0000000000..5819ce4173 --- /dev/null +++ b/src/main/antora/modules/ROOT/partials/optimistic-locking.adoc @@ -0,0 +1,12 @@ +Spring Data supports optimistic locking by means of a numeric attribute that is annotated with +https://docs.spring.io/spring-data/commons/docs/current/api/org/springframework/data/annotation/Version.html[`@Version`] on the aggregate root. +Whenever Spring Data saves an aggregate with such a version attribute two things happen: + +* The update statement for the aggregate root will contain a where clause checking that the version stored in the database is actually unchanged. +* If this isn't the case an `OptimisticLockingFailureException` will be thrown. + +Also, the version attribute gets increased both in the entity and in the database so a concurrent action will notice the change and throw an `OptimisticLockingFailureException` if applicable as described above. + +This process also applies to inserting new aggregates, where a `null` or `0` version indicates a new instance and the increased instance afterwards marks the instance as not new anymore, making this work rather nicely with cases where the id is generated during object construction for example when UUIDs are used. + +During deletes the version check also applies but no version is increased. diff --git a/src/main/antora/modules/ROOT/partials/sequences.adoc b/src/main/antora/modules/ROOT/partials/sequences.adoc new file mode 100644 index 0000000000..4415aac8e9 --- /dev/null +++ b/src/main/antora/modules/ROOT/partials/sequences.adoc @@ -0,0 +1,57 @@ +Primary key properties (annotated with `@Id`) may also be annotated with `@Sequence`. +The presence of the `@Sequence` annotation indicates that the property's initial value should be obtained from a database sequence at the time of object insertion. +The ability of the database to generate a sequence is <>. +In the absence of the `@Sequence` annotation, it is assumed that the value for the corresponding column is automatically generated by the database upon row insertion. + +Consider the following entity: + +.Entity with Id generation from a Sequence +[source,java] +---- +@Table +class MyEntity { + + @Id + @Sequence( + sequence = "my_seq", + schema = "public" + ) + private Long id; + + // … +} +---- + +When persisting this entity, before the SQL `INSERT`, Spring Data will issue an additional `SELECT` statement to fetch the next value from the sequence. +For instance, for PostgreSQL the query, issued by Spring Data, would look like this: + +.Select for next sequence value in PostgreSQL +[source,sql] +---- +SELECT nextval('public.my_seq'); +---- + +The fetched identifier value is included in `VALUES` during the insert: + +.Insert statement enriched with Id value +[source,sql] +---- +INSERT INTO "my_entity"("id", "name") VALUES(?, ?); +---- + +NOTE: Obtaining a value from a sequence and inserting the object are two separate operations. +We highly recommend running these operations within a surrounding transaction to ensure atomicity. + +[[sequences.dialects]] +== Supported Dialects + +The following dialects support Sequences: + +* H2 +* HSQL +* PostgreSQL +* DB2 +* Oracle +* Microsoft SQL Server + +Note that MySQL does not support sequences. diff --git a/src/main/antora/resources/antora-resources/antora.yml b/src/main/antora/resources/antora-resources/antora.yml new file mode 100644 index 0000000000..b4c60f697f --- /dev/null +++ b/src/main/antora/resources/antora-resources/antora.yml @@ -0,0 +1,23 @@ +version: ${antora-component.version} +prerelease: ${antora-component.prerelease} + +asciidoc: + attributes: + copyright-year: ${current.year} + version: ${project.version} + springversionshort: ${spring.short} + springversion: ${spring} + attribute-missing: 'warn' + commons: ${springdata.commons.docs} + include-xml-namespaces: false + spring-data-commons-docs-url: https://docs.spring.io/spring-data/commons/reference + spring-data-commons-javadoc-base: https://docs.spring.io/spring-data/commons/docs/${springdata.commons}/api/ + spring-data-jdbc-javadoc: https://docs.spring.io/spring-data/jdbc/docs/${version}/api/ + spring-data-r2dbc-javadoc: https://docs.spring.io/spring-data/r2dbc/docs/${version}/api/ + springdocsurl: https://docs.spring.io/spring-framework/reference/{springversionshort} + springjavadocurl: https://docs.spring.io/spring-framework/docs/${spring}/javadoc-api + spring-framework-docs: '{springdocsurl}' + spring-framework-javadoc: '{springjavadocurl}' + springhateoasversion: ${spring-hateoas} + releasetrainversion: ${releasetrain} + store: Jdbc diff --git a/src/main/java/org/springframework/data/jdbc/core/CascadingDataAccessStrategy.java b/src/main/java/org/springframework/data/jdbc/core/CascadingDataAccessStrategy.java deleted file mode 100644 index a94c2ebca3..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/CascadingDataAccessStrategy.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; -import java.util.function.Function; - -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.mapping.PropertyPath; - -/** - * Delegates each methods to the {@link DataAccessStrategy}s passed to the constructor in turn until the first that does - * not throw an exception. - * - * @author Jens Schauder - */ -public class CascadingDataAccessStrategy implements DataAccessStrategy { - - private final List strategies; - - public CascadingDataAccessStrategy(List strategies) { - this.strategies = new ArrayList<>(strategies); - } - - @Override - public void insert(T instance, Class domainType, Map additionalParameters) { - collectVoid(das -> das.insert(instance, domainType, additionalParameters)); - } - - @Override - public void update(S instance, Class domainType) { - collectVoid(das -> das.update(instance, domainType)); - } - - @Override - public void delete(Object id, Class domainType) { - collectVoid(das -> das.delete(id, domainType)); - } - - @Override - public void delete(Object rootId, PropertyPath propertyPath) { - collectVoid(das -> das.delete(rootId, propertyPath)); - } - - @Override - public void deleteAll(Class domainType) { - collectVoid(das -> das.deleteAll(domainType)); - } - - @Override - public void deleteAll(PropertyPath propertyPath) { - collectVoid(das -> das.deleteAll(propertyPath)); - } - - @Override - public long count(Class domainType) { - return collect(das -> das.count(domainType)); - } - - @Override - public T findById(Object id, Class domainType) { - return collect(das -> das.findById(id, domainType)); - } - - @Override - public Iterable findAll(Class domainType) { - return collect(das -> das.findAll(domainType)); - } - - @Override - public Iterable findAllById(Iterable ids, Class domainType) { - return collect(das -> das.findAllById(ids, domainType)); - } - - @Override - public Iterable findAllByProperty(Object rootId, JdbcPersistentProperty property) { - return collect(das -> das.findAllByProperty(rootId, property)); - } - - @Override - public boolean existsById(Object id, Class domainType) { - return collect(das -> das.existsById(id, domainType)); - } - - private T collect(Function function) { - return strategies.stream().collect(new FunctionCollector<>(function)); - } - - private void collectVoid(Consumer consumer) { - - collect(das -> { - consumer.accept(das); - return null; - }); - } - -} diff --git a/src/main/java/org/springframework/data/jdbc/core/DataAccessStrategy.java b/src/main/java/org/springframework/data/jdbc/core/DataAccessStrategy.java deleted file mode 100644 index 8bea1a9dd9..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/DataAccessStrategy.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import java.util.Map; - -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.mapping.PropertyPath; - -/** - * Abstraction for accesses to the database that should be implementable with a single SQL statement and relates to a single entity as opposed to {@link JdbcEntityOperations} which provides interactions related to complete aggregates. - * - * @author Jens Schauder - */ -public interface DataAccessStrategy { - - void insert(T instance, Class domainType, Map additionalParameters); - - void update(T instance, Class domainType); - - void delete(Object id, Class domainType); - - /** - * Deletes all entities reachable via {@literal propertyPath} from the instance identified by {@literal rootId}. - * - * @param rootId Id of the root object on which the {@literal propertyPath} is based. - * @param propertyPath Leading from the root object to the entities to be deleted. - */ - void delete(Object rootId, PropertyPath propertyPath); - - void deleteAll(Class domainType); - - /** - * Deletes all entities reachable via {@literal propertyPath} from any instance. - * - * @param propertyPath Leading from the root object to the entities to be deleted. - */ - void deleteAll(PropertyPath propertyPath); - - long count(Class domainType); - - T findById(Object id, Class domainType); - - Iterable findAll(Class domainType); - - Iterable findAllById(Iterable ids, Class domainType); - - /** - * Finds all entities reachable via {@literal property} from the instance identified by {@literal rootId}. - * - * @param rootId Id of the root object on which the {@literal propertyPath} is based. - * @param property Leading from the root object to the entities to be found. - */ - Iterable findAllByProperty(Object rootId, JdbcPersistentProperty property); - - boolean existsById(Object id, Class domainType); - -} diff --git a/src/main/java/org/springframework/data/jdbc/core/DefaultDataAccessStrategy.java b/src/main/java/org/springframework/data/jdbc/core/DefaultDataAccessStrategy.java deleted file mode 100644 index bbade9dc5c..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/DefaultDataAccessStrategy.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import org.springframework.dao.EmptyResultDataAccessException; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.dao.NonTransientDataAccessException; -import org.springframework.data.jdbc.mapping.model.BasicJdbcPersistentEntityInformation; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.jdbc.support.JdbcUtil; -import org.springframework.data.mapping.PropertyHandler; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.repository.core.EntityInformation; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.jdbc.support.GeneratedKeyHolder; -import org.springframework.jdbc.support.KeyHolder; -import org.springframework.util.Assert; - -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; - -/** - * The default {@link DataAccessStrategy} is to generate SQL statements based on meta data from the entity. - * - * @author Jens Schauder - */ -public class DefaultDataAccessStrategy implements DataAccessStrategy { - - private static final String ENTITY_NEW_AFTER_INSERT = "Entity [%s] still 'new' after insert. Please set either" - + " the id property in a BeforeInsert event handler, or ensure the database creates a value and your " - + "JDBC driver returns it."; - - private final SqlGeneratorSource sqlGeneratorSource; - private final NamedParameterJdbcOperations operations; - private final JdbcMappingContext context; - private final DataAccessStrategy accessStrategy; - - public DefaultDataAccessStrategy(SqlGeneratorSource sqlGeneratorSource, NamedParameterJdbcOperations operations, - JdbcMappingContext context, DataAccessStrategy accessStrategy) { - - this.sqlGeneratorSource = sqlGeneratorSource; - this.operations = operations; - this.context = context; - this.accessStrategy = accessStrategy; - } - - /** - * Creates a {@link DefaultDataAccessStrategy} which references it self for resolution of recursive data accesses. - * Only suitable if this is the only access strategy in use. - */ - public DefaultDataAccessStrategy(SqlGeneratorSource sqlGeneratorSource, NamedParameterJdbcOperations operations, - JdbcMappingContext context) { - - this.sqlGeneratorSource = sqlGeneratorSource; - this.operations = operations; - this.context = context; - this.accessStrategy = this; - } - - @Override - public void insert(T instance, Class domainType, Map additionalParameters) { - - KeyHolder holder = new GeneratedKeyHolder(); - JdbcPersistentEntity persistentEntity = getRequiredPersistentEntity(domainType); - JdbcPersistentEntityInformation entityInformation = context - .getRequiredPersistentEntityInformation(domainType); - - MapSqlParameterSource parameterSource = getPropertyMap(instance, persistentEntity); - - Object idValue = getIdValueOrNull(instance, persistentEntity); - JdbcPersistentProperty idProperty = persistentEntity.getRequiredIdProperty(); - parameterSource.addValue(idProperty.getColumnName(), convert(idValue, idProperty.getColumnType()), - JdbcUtil.sqlTypeFor(idProperty.getColumnType())); - - additionalParameters.forEach(parameterSource::addValue); - - boolean idValueDoesNotComeFromEntity = // - idValue == null // - || additionalParameters.containsKey(idProperty.getColumnName()); - - operations.update( // - sql(domainType).getInsert(idValueDoesNotComeFromEntity, additionalParameters.keySet()), // - parameterSource, // - holder // - ); - - setIdFromJdbc(instance, holder, persistentEntity); - - if (entityInformation.isNew(instance)) { - throw new IllegalStateException(String.format(ENTITY_NEW_AFTER_INSERT, persistentEntity)); - } - - } - - @Override - public void update(S instance, Class domainType) { - - JdbcPersistentEntity persistentEntity = getRequiredPersistentEntity(domainType); - - operations.update(sql(domainType).getUpdate(), getPropertyMap(instance, persistentEntity)); - } - - @Override - public void delete(Object id, Class domainType) { - - String deleteByIdSql = sql(domainType).getDeleteById(); - MapSqlParameterSource parameter = createIdParameterSource(id, domainType); - - operations.update(deleteByIdSql, parameter); - } - - @Override - public void delete(Object rootId, PropertyPath propertyPath) { - - JdbcPersistentEntity rootEntity = context.getRequiredPersistentEntity(propertyPath.getOwningType()); - - JdbcPersistentProperty referencingProperty = rootEntity.getRequiredPersistentProperty(propertyPath.getSegment()); - Assert.notNull(referencingProperty, "No property found matching the PropertyPath " + propertyPath); - - String format = sql(rootEntity.getType()).createDeleteByPath(propertyPath); - - HashMap parameters = new HashMap<>(); - parameters.put("rootId", rootId); - operations.update(format, parameters); - } - - @Override - public void deleteAll(Class domainType) { - operations.getJdbcOperations().update(sql(domainType).createDeleteAllSql(null)); - } - - @Override - public void deleteAll(PropertyPath propertyPath) { - operations.getJdbcOperations().update(sql(propertyPath.getOwningType().getType()).createDeleteAllSql(propertyPath)); - } - - @SuppressWarnings("ConstantConditions") - @Override - public long count(Class domainType) { - return operations.getJdbcOperations().queryForObject(sql(domainType).getCount(), Long.class); - } - - @Override - public T findById(Object id, Class domainType) { - - String findOneSql = sql(domainType).getFindOne(); - MapSqlParameterSource parameter = createIdParameterSource(id, domainType); - try { - return operations.queryForObject(findOneSql, parameter, getEntityRowMapper(domainType)); - } catch (EmptyResultDataAccessException e) { - return null; - } - } - - @Override - public Iterable findAll(Class domainType) { - return operations.query(sql(domainType).getFindAll(), getEntityRowMapper(domainType)); - } - - @Override - public Iterable findAllById(Iterable ids, Class domainType) { - - String findAllInListSql = sql(domainType).getFindAllInList(); - Class targetType = getRequiredPersistentEntity(domainType).getRequiredIdProperty().getColumnType(); - - MapSqlParameterSource parameter = new MapSqlParameterSource( // - "ids", // - StreamSupport.stream(ids.spliterator(), false) // - .map(id -> convert(id, targetType)) // - .collect(Collectors.toList()) // - ); - - return operations.query(findAllInListSql, parameter, getEntityRowMapper(domainType)); - } - - @SuppressWarnings("unchecked") - @Override - public Iterable findAllByProperty(Object rootId, JdbcPersistentProperty property) { - - Class actualType = property.getActualType(); - String findAllByProperty = sql(actualType).getFindAllByProperty(property.getReverseColumnName(), - property.getKeyColumn(), property.isOrdered()); - - MapSqlParameterSource parameter = new MapSqlParameterSource(property.getReverseColumnName(), rootId); - - return (Iterable) operations.query(findAllByProperty, parameter, property.isMap() // - ? getMapEntityRowMapper(property) // - : getEntityRowMapper(actualType)); - } - - @Override - public boolean existsById(Object id, Class domainType) { - - String existsSql = sql(domainType).getExists(); - MapSqlParameterSource parameter = createIdParameterSource(id, domainType); - return operations.queryForObject(existsSql, parameter, Boolean.class); - } - - private MapSqlParameterSource getPropertyMap(final S instance, JdbcPersistentEntity persistentEntity) { - - MapSqlParameterSource parameters = new MapSqlParameterSource(); - - persistentEntity.doWithProperties((PropertyHandler) property -> { - if (!property.isEntity()) { - Object value = persistentEntity.getPropertyAccessor(instance).getProperty(property); - - Object convertedValue = convert(value, property.getColumnType()); - parameters.addValue(property.getColumnName(), convertedValue, JdbcUtil.sqlTypeFor(property.getColumnType())); - } - }); - - return parameters; - } - - @SuppressWarnings("unchecked") - private ID getIdValueOrNull(S instance, JdbcPersistentEntity persistentEntity) { - - EntityInformation entityInformation = (EntityInformation) context.getRequiredPersistentEntityInformation(persistentEntity.getType()); - - ID idValue = entityInformation.getId(instance); - - return isIdPropertyNullOrScalarZero(idValue, persistentEntity) ? null : idValue; - } - - private boolean isIdPropertyNullOrScalarZero(ID idValue, JdbcPersistentEntity persistentEntity) { - - JdbcPersistentProperty idProperty = persistentEntity.getIdProperty(); - return idValue == null // - || idProperty == null // - || (idProperty.getType() == int.class && idValue.equals(0)) // - || (idProperty.getType() == long.class && idValue.equals(0L)); - } - - private void setIdFromJdbc(S instance, KeyHolder holder, JdbcPersistentEntity persistentEntity) { - - JdbcPersistentEntityInformation entityInformation = new BasicJdbcPersistentEntityInformation<>( - persistentEntity); - - try { - - getIdFromHolder(holder, persistentEntity).ifPresent(it -> { - - Class targetType = persistentEntity.getRequiredIdProperty().getType(); - Object converted = convert(it, targetType); - entityInformation.setId(instance, converted); - }); - - } catch (NonTransientDataAccessException e) { - throw new UnableToSetId("Unable to set id of " + instance, e); - } - } - - private Optional getIdFromHolder(KeyHolder holder, JdbcPersistentEntity persistentEntity) { - - try { - // MySQL just returns one value with a special name - return Optional.ofNullable(holder.getKey()); - } catch (InvalidDataAccessApiUsageException e) { - // Postgres returns a value for each column - return Optional.ofNullable(holder.getKeys().get(persistentEntity.getIdColumn())); - } - } - - public EntityRowMapper getEntityRowMapper(Class domainType) { - return new EntityRowMapper<>(getRequiredPersistentEntity(domainType), context.getConversions(), context, accessStrategy); - } - - private RowMapper getMapEntityRowMapper(JdbcPersistentProperty property) { - return new MapEntityRowMapper(getEntityRowMapper(property.getActualType()), property.getKeyColumn()); - } - - private MapSqlParameterSource createIdParameterSource(Object id, Class domainType) { - - return new MapSqlParameterSource("id", - convert(id, getRequiredPersistentEntity(domainType).getRequiredIdProperty().getColumnType())); - } - - @SuppressWarnings("unchecked") - private JdbcPersistentEntity getRequiredPersistentEntity(Class domainType) { - return (JdbcPersistentEntity) context.getRequiredPersistentEntity(domainType); - } - - private V convert(Object from, Class to) { - - if (from == null) { - return null; - } - - JdbcPersistentEntity persistentEntity = context.getPersistentEntity(from.getClass()); - - Object id = persistentEntity == null ? null : persistentEntity.getIdentifierAccessor(from).getIdentifier(); - - return context.getConversions().convert(id == null ? from : id, to); - } - - private SqlGenerator sql(Class domainType) { - return sqlGeneratorSource.getSqlGenerator(domainType); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/DefaultJdbcInterpreter.java b/src/main/java/org/springframework/data/jdbc/core/DefaultJdbcInterpreter.java deleted file mode 100644 index 132a91e955..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/DefaultJdbcInterpreter.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import java.util.HashMap; -import java.util.Map; - -import org.springframework.data.jdbc.core.conversion.DbAction; -import org.springframework.data.jdbc.core.conversion.DbAction.Delete; -import org.springframework.data.jdbc.core.conversion.DbAction.DeleteAll; -import org.springframework.data.jdbc.core.conversion.DbAction.Insert; -import org.springframework.data.jdbc.core.conversion.DbAction.Update; -import org.springframework.data.jdbc.core.conversion.Interpreter; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.util.Assert; - -/** - * {@link Interpreter} for {@link DbAction}s using a {@link DataAccessStrategy} for performing actual database - * interactions. - * - * @author Jens Schauder - */ -class DefaultJdbcInterpreter implements Interpreter { - - private final JdbcMappingContext context; - private final DataAccessStrategy accessStrategy; - - DefaultJdbcInterpreter(JdbcMappingContext context, DataAccessStrategy accessStrategy) { - - this.context = context; - this.accessStrategy = accessStrategy; - } - - @Override - public void interpret(Insert insert) { - accessStrategy.insert(insert.getEntity(), insert.getEntityType(), createAdditionalColumnValues(insert)); - } - - @Override - public void interpret(Update update) { - accessStrategy.update(update.getEntity(), update.getEntityType()); - } - - @Override - public void interpret(Delete delete) { - - if (delete.getPropertyPath() == null) { - accessStrategy.delete(delete.getRootId(), delete.getEntityType()); - } else { - accessStrategy.delete(delete.getRootId(), delete.getPropertyPath().getPath()); - } - } - - @Override - public void interpret(DeleteAll delete) { - - if (delete.getEntityType() == null) { - accessStrategy.deleteAll(delete.getPropertyPath().getPath()); - } else { - accessStrategy.deleteAll(delete.getEntityType()); - } - } - - private Map createAdditionalColumnValues(Insert insert) { - - Map additionalColumnValues = new HashMap<>(); - addDependingOnInformation(insert, additionalColumnValues); - additionalColumnValues.putAll(insert.getAdditionalValues()); - - return additionalColumnValues; - } - - private void addDependingOnInformation(Insert insert, Map additionalColumnValues) { - - DbAction dependingOn = insert.getDependingOn(); - - if (dependingOn == null) { - return; - } - - JdbcPersistentEntity persistentEntity = context.getRequiredPersistentEntity(dependingOn.getEntityType()); - - String columnName = getColumnNameForReverseColumn(insert, persistentEntity); - - Object identifier = getIdFromEntityDependingOn(dependingOn, persistentEntity); - - additionalColumnValues.put(columnName, identifier); - } - - private Object getIdFromEntityDependingOn(DbAction dependingOn, JdbcPersistentEntity persistentEntity) { - return persistentEntity.getIdentifierAccessor(dependingOn.getEntity()).getIdentifier(); - } - - private String getColumnNameForReverseColumn(Insert insert, JdbcPersistentEntity persistentEntity) { - - PropertyPath path = insert.getPropertyPath().getPath(); - - Assert.notNull(path, "There shouldn't be an insert depending on another insert without having a PropertyPath."); - - return persistentEntity.getRequiredPersistentProperty(path.getSegment()).getReverseColumnName(); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/DelegatingDataAccessStrategy.java b/src/main/java/org/springframework/data/jdbc/core/DelegatingDataAccessStrategy.java deleted file mode 100644 index 90b05828cb..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/DelegatingDataAccessStrategy.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import java.util.Map; - -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.util.Assert; - -/** - * delegates all method calls to an instance set after construction. This is useful for {@link DataAccessStrategy}s with - * cyclical dependencies. - * - * @author Jens Schauder - */ -public class DelegatingDataAccessStrategy implements DataAccessStrategy { - - private DataAccessStrategy delegate; - - @Override - public void insert(T instance, Class domainType, Map additionalParameters) { - delegate.insert(instance, domainType, additionalParameters); - } - - @Override - public void update(S instance, Class domainType) { - delegate.update(instance, domainType); - } - - @Override - public void delete(Object rootId, PropertyPath propertyPath) { - delegate.delete(rootId, propertyPath); - } - - @Override - public void delete(Object id, Class domainType) { - delegate.delete(id, domainType); - } - - @Override - public void deleteAll(Class domainType) { - delegate.deleteAll(domainType); - } - - @Override - public void deleteAll(PropertyPath propertyPath) { - delegate.deleteAll(propertyPath); - } - - @Override - public long count(Class domainType) { - return delegate.count(domainType); - } - - @Override - public T findById(Object id, Class domainType) { - - Assert.notNull(delegate, "Delegate is null"); - - return delegate.findById(id, domainType); - } - - @Override - public Iterable findAll(Class domainType) { - return delegate.findAll(domainType); - } - - @Override - public Iterable findAllById(Iterable ids, Class domainType) { - return delegate.findAllById(ids, domainType); - } - - @Override - public Iterable findAllByProperty(Object rootId, JdbcPersistentProperty property) { - - Assert.notNull(delegate, "Delegate is null"); - - return delegate.findAllByProperty(rootId, property); - } - - @Override - public boolean existsById(Object id, Class domainType) { - return delegate.existsById(id, domainType); - } - - /** - * Must be called exactly once before calling any of the other methods. - * - * @param delegate Must not be {@literal null} - */ - public void setDelegate(DataAccessStrategy delegate) { - - Assert.isNull(this.delegate, "The delegate must be set exactly once"); - Assert.notNull(delegate, "The delegate must not be set to null"); - - this.delegate = delegate; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/EntityRowMapper.java b/src/main/java/org/springframework/data/jdbc/core/EntityRowMapper.java deleted file mode 100644 index 73b28b155d..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/EntityRowMapper.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import org.springframework.core.convert.ConversionService; -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.ClassGeneratingEntityInstantiator; -import org.springframework.data.convert.EntityInstantiator; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.mapping.MappingException; -import org.springframework.data.mapping.PersistentProperty; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PreferredConstructor.Parameter; -import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mapping.model.ParameterValueProvider; -import org.springframework.jdbc.core.RowMapper; - -import java.sql.ResultSet; -import java.sql.SQLException; - -/** - * Maps a ResultSet to an entity of type {@code T}, including entities referenced. - * - * @author Jens Schauder - * @author Oliver Gierke - * @since 2.0 - */ -public class EntityRowMapper implements RowMapper { - - private static final Converter ITERABLE_OF_ENTRY_TO_MAP_CONVERTER = new IterableOfEntryToMapConverter(); - - private final JdbcPersistentEntity entity; - private final EntityInstantiator instantiator = new ClassGeneratingEntityInstantiator(); - private final ConversionService conversions; - private final JdbcMappingContext context; - private final DataAccessStrategy accessStrategy; - private final JdbcPersistentProperty idProperty; - - public EntityRowMapper(JdbcPersistentEntity entity, ConversionService conversions, JdbcMappingContext context, - DataAccessStrategy accessStrategy) { - - this.entity = entity; - this.conversions = conversions; - this.context = context; - this.accessStrategy = accessStrategy; - - idProperty = entity.getRequiredIdProperty(); - } - - /* - * (non-Javadoc) - * @see org.springframework.jdbc.core.RowMapper#mapRow(java.sql.ResultSet, int) - */ - @Override - public T mapRow(ResultSet resultSet, int rowNumber) throws SQLException { - - T result = createInstance(resultSet); - - ConvertingPropertyAccessor propertyAccessor = new ConvertingPropertyAccessor(entity.getPropertyAccessor(result), - conversions); - - Object id = readFrom(resultSet, idProperty, ""); - - for (JdbcPersistentProperty property : entity) { - - if (property.isCollectionLike()) { - propertyAccessor.setProperty(property, accessStrategy.findAllByProperty(id, property)); - } else if (property.isMap()) { - - Iterable allByProperty = accessStrategy.findAllByProperty(id, property); - propertyAccessor.setProperty(property, ITERABLE_OF_ENTRY_TO_MAP_CONVERTER.convert(allByProperty)); - } else { - propertyAccessor.setProperty(property, readFrom(resultSet, property, "")); - } - } - - return result; - } - - private T createInstance(ResultSet rs) { - return instantiator.createInstance(entity, new ResultSetParameterValueProvider(rs, entity, conversions, "")); - } - - private Object readFrom(ResultSet resultSet, JdbcPersistentProperty property, String prefix) { - - try { - - if (property.isEntity()) { - return readEntityFrom(resultSet, property); - } - - return resultSet.getObject(prefix + property.getColumnName()); - } catch (SQLException o_O) { - throw new MappingException(String.format("Could not read property %s from result set!", property), o_O); - } - } - - private S readEntityFrom(ResultSet rs, PersistentProperty property) { - - String prefix = property.getName() + "_"; - - @SuppressWarnings("unchecked") - JdbcPersistentEntity entity = (JdbcPersistentEntity) context - .getRequiredPersistentEntity(property.getActualType()); - - if (readFrom(rs, entity.getRequiredIdProperty(), prefix) == null) { - return null; - } - - S instance = instantiator.createInstance(entity, new ResultSetParameterValueProvider(rs, entity, conversions, prefix)); - - PersistentPropertyAccessor accessor = entity.getPropertyAccessor(instance); - ConvertingPropertyAccessor propertyAccessor = new ConvertingPropertyAccessor(accessor, conversions); - - for (JdbcPersistentProperty p : entity) { - propertyAccessor.setProperty(p, readFrom(rs, p, prefix)); - } - - return instance; - } - - @RequiredArgsConstructor - private static class ResultSetParameterValueProvider implements ParameterValueProvider { - - @NonNull - private final ResultSet resultSet; - @NonNull - private final JdbcPersistentEntity entity; - @NonNull - private final ConversionService conversionService; - @NonNull - private final String prefix; - - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.ParameterValueProvider#getParameterValue(org.springframework.data.mapping.PreferredConstructor.Parameter) - */ - @Override - public T getParameterValue(Parameter parameter) { - - String column = prefix + entity.getRequiredPersistentProperty(parameter.getName()).getColumnName(); - - try { - return conversionService.convert(resultSet.getObject(column), parameter.getType().getType()); - } catch (SQLException o_O) { - throw new MappingException(String.format("Couldn't read column %s from ResultSet.", column), o_O); - } - } - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/EventPublishingEntityRowMapper.java b/src/main/java/org/springframework/data/jdbc/core/EventPublishingEntityRowMapper.java deleted file mode 100644 index 6d128bd5d1..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/EventPublishingEntityRowMapper.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -import java.sql.ResultSet; -import java.sql.SQLException; - -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.data.jdbc.mapping.event.AfterCreation; -import org.springframework.data.jdbc.mapping.event.Identifier; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; -import org.springframework.jdbc.core.RowMapper; - -/** - * A {@link RowMapper} that publishes events after a delegate, did the actual work of mapping a {@link ResultSet} to an - * entityInformation. - * - * @author Jens Schauder - * @since 2.0 - */ -@RequiredArgsConstructor -public class EventPublishingEntityRowMapper implements RowMapper { - - private final @NonNull RowMapper delegate; - private final @NonNull JdbcPersistentEntityInformation entityInformation; - private final @NonNull ApplicationEventPublisher publisher; - - /* - * (non-Javadoc) - * @see org.springframework.jdbc.core.RowMapper#mapRow(java.sql.ResultSet, int) - */ - @Override - public T mapRow(ResultSet resultSet, int i) throws SQLException { - - T instance = delegate.mapRow(resultSet, i); - - publisher.publishEvent(new AfterCreation(Identifier.of(entityInformation.getRequiredId(instance)), instance, null)); - - return instance; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/JdbcEntityOperations.java b/src/main/java/org/springframework/data/jdbc/core/JdbcEntityOperations.java deleted file mode 100644 index e8b5ccaf0e..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/JdbcEntityOperations.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -/** - * Specifies a operations one can perform on a database, based on an Domain Type. - * - * @author Jens Schauder - */ -public interface JdbcEntityOperations { - - void save(T instance, Class domainType); - - void deleteById(Object id, Class domainType); - - void delete(T entity, Class domainType); - - void deleteAll(Class domainType); - - long count(Class domainType); - - T findById(Object id, Class domainType); - - Iterable findAllById(Iterable ids, Class domainType); - - Iterable findAll(Class domainType); - - boolean existsById(Object id, Class domainType); - -} diff --git a/src/main/java/org/springframework/data/jdbc/core/JdbcEntityTemplate.java b/src/main/java/org/springframework/data/jdbc/core/JdbcEntityTemplate.java deleted file mode 100644 index b00d022b55..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/JdbcEntityTemplate.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import java.util.Optional; - -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.core.conversion.AggregateChange.Kind; -import org.springframework.data.jdbc.core.conversion.Interpreter; -import org.springframework.data.jdbc.core.conversion.JdbcEntityDeleteWriter; -import org.springframework.data.jdbc.core.conversion.JdbcEntityWriter; -import org.springframework.data.jdbc.mapping.event.AfterDelete; -import org.springframework.data.jdbc.mapping.event.AfterSave; -import org.springframework.data.jdbc.mapping.event.BeforeDelete; -import org.springframework.data.jdbc.mapping.event.BeforeSave; -import org.springframework.data.jdbc.mapping.event.Identifier; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; - -/** - * {@link JdbcEntityOperations} implementation, storing aggregates in and obtaining them from a JDBC data store. - * - * @author Jens Schauder - */ -public class JdbcEntityTemplate implements JdbcEntityOperations { - - private final ApplicationEventPublisher publisher; - private final JdbcMappingContext context; - private final Interpreter interpreter; - - private final JdbcEntityWriter jdbcEntityWriter; - private final JdbcEntityDeleteWriter jdbcEntityDeleteWriter; - - private final DataAccessStrategy accessStrategy; - - public JdbcEntityTemplate(ApplicationEventPublisher publisher, JdbcMappingContext context, - DataAccessStrategy dataAccessStrategy) { - - this.publisher = publisher; - this.context = context; - - this.jdbcEntityWriter = new JdbcEntityWriter(context); - this.jdbcEntityDeleteWriter = new JdbcEntityDeleteWriter(context); - this.accessStrategy = dataAccessStrategy; - this.interpreter = new DefaultJdbcInterpreter(context, accessStrategy); - } - - @Override - public void save(T instance, Class domainType) { - - JdbcPersistentEntityInformation entityInformation = context - .getRequiredPersistentEntityInformation(domainType); - - AggregateChange change = createChange(instance); - - publisher.publishEvent(new BeforeSave( // - Identifier.ofNullable(entityInformation.getId(instance)), // - instance, // - change // - )); - - change.executeWith(interpreter); - - publisher.publishEvent(new AfterSave( // - Identifier.of(entityInformation.getId(instance)), // - instance, // - change // - )); - } - - @Override - public long count(Class domainType) { - return accessStrategy.count(domainType); - } - - @Override - public T findById(Object id, Class domainType) { - return accessStrategy.findById(id, domainType); - } - - @Override - public boolean existsById(Object id, Class domainType) { - return accessStrategy.existsById(id, domainType); - } - - @Override - public Iterable findAll(Class domainType) { - return accessStrategy.findAll(domainType); - } - - @Override - public Iterable findAllById(Iterable ids, Class domainType) { - return accessStrategy.findAllById(ids, domainType); - } - - @Override - public void delete(S entity, Class domainType) { - - JdbcPersistentEntityInformation entityInformation = context - .getRequiredPersistentEntityInformation(domainType); - deleteTree(entityInformation.getRequiredId(entity), entity, domainType); - } - - @Override - public void deleteById(Object id, Class domainType) { - deleteTree(id, null, domainType); - } - - @Override - public void deleteAll(Class domainType) { - - AggregateChange change = createDeletingChange(domainType); - change.executeWith(interpreter); - } - - private void deleteTree(Object id, Object entity, Class domainType) { - - AggregateChange change = createDeletingChange(id, entity, domainType); - - Specified specifiedId = Identifier.of(id); - Optional optionalEntity = Optional.ofNullable(entity); - publisher.publishEvent(new BeforeDelete(specifiedId, optionalEntity, change)); - - change.executeWith(interpreter); - - publisher.publishEvent(new AfterDelete(specifiedId, optionalEntity, change)); - } - - private AggregateChange createChange(T instance) { - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, instance.getClass(), instance); - jdbcEntityWriter.write(instance, aggregateChange); - return aggregateChange; - } - - private AggregateChange createDeletingChange(Object id, Object entity, Class domainType) { - - AggregateChange aggregateChange = new AggregateChange(Kind.DELETE, domainType, entity); - jdbcEntityDeleteWriter.write(id, aggregateChange); - return aggregateChange; - } - - private AggregateChange createDeletingChange(Class domainType) { - - AggregateChange aggregateChange = new AggregateChange(Kind.DELETE, domainType, null); - jdbcEntityDeleteWriter.write(null, aggregateChange); - return aggregateChange; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/MapEntityRowMapper.java b/src/main/java/org/springframework/data/jdbc/core/MapEntityRowMapper.java deleted file mode 100644 index da8e744635..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/MapEntityRowMapper.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.HashMap; -import java.util.Map; - -import org.springframework.jdbc.core.RowMapper; -import org.springframework.lang.Nullable; - -/** - * A {@link RowMapper} that maps a row to a {@link Map.Entry} so an {@link Iterable} of those can be converted to a - * {@link Map} using an {@link IterableOfEntryToMapConverter}. Creation of the {@literal value} part of the resulting - * {@link Map.Entry} is delegated to a {@link RowMapper} provided in the constructor. - * - * @author Jens Schauder - */ -class MapEntityRowMapper implements RowMapper> { - - private final RowMapper delegate; - private final String keyColumn; - - MapEntityRowMapper(RowMapper delegate, String keyColumn) { - - this.delegate = delegate; - this.keyColumn = keyColumn; - } - - @Nullable - @Override - public Map.Entry mapRow(ResultSet rs, int rowNum) throws SQLException { - return new HashMap.SimpleEntry<>(rs.getObject(keyColumn), delegate.mapRow(rs, rowNum)); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/SelectBuilder.java b/src/main/java/org/springframework/data/jdbc/core/SelectBuilder.java deleted file mode 100644 index 2088bbbc4f..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/SelectBuilder.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import lombok.Builder; - -import java.util.ArrayList; -import java.util.List; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Builder for creating Select-statements. Not intended for general purpose, but only for the needs of the - * {@link JdbcEntityTemplate}. - * - * @author Jens Schauder - */ -class SelectBuilder { - - private final List columns = new ArrayList<>(); - private final String tableName; - private final List joins = new ArrayList<>(); - private final List conditions = new ArrayList<>(); - - SelectBuilder(String tableName) { - - this.tableName = tableName; - } - - SelectBuilder column(Function columnSpec) { - - columns.add(columnSpec.apply(Column.builder()).build()); - return this; - } - - SelectBuilder where(Function whereSpec) { - - conditions.add(whereSpec.apply(new WhereConditionBuilder(this)).build()); - return this; - } - - SelectBuilder join(Function joinSpec) { - - joins.add(joinSpec.apply(Join.builder()).build()); - return this; - } - - String build() { - - return selectFrom() + joinClause() + whereClause(); - } - - private String whereClause() { - - if (conditions.isEmpty()) { - return ""; - } - - return conditions.stream() // - .map(WhereCondition::toSql) // - .collect(Collectors.joining("AND", " WHERE ", "") // - ); - } - - private String joinClause() { - return joins.stream().map(j -> joinTable(j) + joinConditions(j)).collect(Collectors.joining(" ")); - } - - private String joinTable(Join j) { - return String.format("%s JOIN %s AS %s", j.outerJoinModifier(), j.table, j.as); - } - - private String joinConditions(Join j) { - - return j.conditions.stream() // - .map(w -> String.format("%s %s %s", w.fromExpression, w.operation, w.toExpression)) // - .collect(Collectors.joining(" AND ", " ON ", "")); - } - - private String selectFrom() { - - return columns.stream() // - .map(Column::columnDefinition) // - .collect(Collectors.joining(", ", "SELECT ", " FROM " + tableName)); - } - - static class WhereConditionBuilder { - - private String fromTable; - private String fromColumn; - private final SelectBuilder selectBuilder; - - private String operation = "="; - private String expression; - - WhereConditionBuilder(SelectBuilder selectBuilder) { - this.selectBuilder = selectBuilder; - } - - WhereConditionBuilder eq() { - - this.operation = "="; - return this; - } - - public WhereConditionBuilder in() { - - this.operation = "in"; - return this; - } - - WhereConditionBuilder tableAlias(String fromTable) { - - this.fromTable = fromTable; - return this; - } - - WhereConditionBuilder column(String fromColumn) { - - this.fromColumn = fromColumn; - return this; - } - - WhereConditionBuilder variable(String var) { - - this.expression = ":" + var; - return this; - } - - WhereCondition build() { - return new WhereCondition(fromTable + "." + fromColumn, operation, expression); - } - - } - - static class Join { - - private final String table; - private final String as; - private final Outer outer; - private final List conditions = new ArrayList<>(); - - Join(String table, String as, List conditions, Outer outer) { - - this.table = table; - this.as = as; - this.outer = outer; - this.conditions.addAll(conditions); - } - - static JoinBuilder builder() { - return new JoinBuilder(); - } - - private String outerJoinModifier() { - - switch (outer) { - case NONE: - return ""; - default: - return String.format(" %s OUTER", outer.name()); - - } - } - - public static class JoinBuilder { - - private String table; - private String as; - private List conditions = new ArrayList<>(); - private Outer outer = Outer.NONE; - - JoinBuilder() {} - - public Join.JoinBuilder table(String table) { - - this.table = table; - return this; - } - - public Join.JoinBuilder as(String as) { - - this.as = as; - return this; - } - - WhereConditionBuilder where(String column) { - return new WhereConditionBuilder(this, column); - } - - private JoinBuilder where(WhereCondition condition) { - - conditions.add(condition); - return this; - } - - Join build() { - return new Join(table, as, conditions, outer); - } - - public String toString() { - return "org.springframework.data.jdbc.core.SelectBuilder.Join.JoinBuilder(table=" + this.table + ", as=" - + this.as + ")"; - } - - JoinBuilder rightOuter() { - - outer = Outer.RIGHT; - return this; - } - - JoinBuilder leftOuter() { - outer = Outer.LEFT; - return this; - } - - static class WhereConditionBuilder { - - private final JoinBuilder joinBuilder; - private final String fromColumn; - - private String operation = "="; - - WhereConditionBuilder(JoinBuilder joinBuilder, String column) { - - this.joinBuilder = joinBuilder; - this.fromColumn = column; - } - - WhereConditionBuilder eq() { - operation = "="; - return this; - } - - JoinBuilder column(String table, String column) { - - return joinBuilder.where(new WhereCondition( // - joinBuilder.as + "." + fromColumn, // - operation, // - table + "." + column // - )); - - } - - } - - } - - private enum Outer { - NONE, RIGHT, LEFT - } - } - - static class WhereCondition { - - private final String operation; - private final String fromExpression; - private final String toExpression; - - WhereCondition(String fromExpression, String operation, String toExpression) { - - this.fromExpression = fromExpression; - this.toExpression = toExpression; - this.operation = operation; - } - - String toSql() { - - if (operation.equals("in")) { - return String.format("%s %s(%s)", fromExpression, operation, toExpression); - } - - return String.format("%s %s %s", fromExpression, operation, toExpression); - } - } - - @Builder - static class Column { - - private final String tableAlias; - private final String column; - private final String as; - - String columnDefinition() { - StringBuilder b = new StringBuilder(); - if (tableAlias != null) - b.append(tableAlias).append('.'); - b.append(column); - if (as != null) - b.append(" AS ").append(as); - return b.toString(); - } - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/SqlGenerator.java b/src/main/java/org/springframework/data/jdbc/core/SqlGenerator.java deleted file mode 100644 index 369fc9c1cc..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/SqlGenerator.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.jdbc.repository.SimpleJdbcRepository; -import org.springframework.data.mapping.PropertyHandler; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.util.Lazy; -import org.springframework.data.util.StreamUtils; -import org.springframework.util.Assert; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Generates SQL statements to be used by {@link SimpleJdbcRepository} - * - * @author Jens Schauder - * @since 2.0 - */ -class SqlGenerator { - - private final JdbcPersistentEntity entity; - private final JdbcMappingContext context; - private final List columnNames = new ArrayList<>(); - private final List nonIdColumnNames = new ArrayList<>(); - - private final Lazy findOneSql = Lazy.of(this::createFindOneSelectSql); - private final Lazy findAllSql = Lazy.of(this::createFindAllSql); - private final Lazy findAllInListSql = Lazy.of(this::createFindAllInListSql); - - private final Lazy existsSql = Lazy.of(this::createExistsSql); - private final Lazy countSql = Lazy.of(this::createCountSql); - - private final Lazy updateSql = Lazy.of(this::createUpdateSql); - - private final Lazy deleteByIdSql = Lazy.of(this::createDeleteSql); - private final Lazy deleteByListSql = Lazy.of(this::createDeleteByListSql); - private final SqlGeneratorSource sqlGeneratorSource; - - SqlGenerator(JdbcMappingContext context, JdbcPersistentEntity entity, SqlGeneratorSource sqlGeneratorSource) { - - this.context = context; - this.entity = entity; - this.sqlGeneratorSource = sqlGeneratorSource; - initColumnNames(); - } - - private void initColumnNames() { - - entity.doWithProperties((PropertyHandler) p -> { - // the referencing column of referenced entity is expected to be on the other side of the relation - if (!p.isEntity()) { - columnNames.add(p.getColumnName()); - if (!entity.isIdProperty(p)) { - nonIdColumnNames.add(p.getColumnName()); - } - } - }); - } - - String getFindAllInList() { - return findAllInListSql.get(); - } - - String getFindAll() { - return findAllSql.get(); - } - - /** - * Returns a query for selecting all simple properties of an entity, including those for one-to-one relationships. - * Results are limited to those rows referencing some other entity using the column specified by - * {@literal columnName}. This is used to select values for a complex property ({@link Set}, {@link Map} ...) based on - * a referencing entity. - * - * @param columnName name of the column of the FK back to the referencing entity. - * @param keyColumn if the property is of type {@link Map} this column contains the map key. - * @param ordered whether the SQL statement should include an ORDER BY for the keyColumn. If this is {@literal true}, the keyColumn must not be {@literal null}. - * @return a SQL String. - */ - String getFindAllByProperty(String columnName, String keyColumn, boolean ordered) { - - Assert.isTrue(keyColumn != null || !ordered, "If the SQL statement should be ordered a keyColumn to order by must be provided."); - - String baseSelect = (keyColumn != null) // - ? createSelectBuilder().column(cb -> cb.tableAlias(entity.getTableName()).column(keyColumn).as(keyColumn)) - .build() - : getFindAll(); - - String orderBy = ordered ? " ORDER BY " + keyColumn : ""; - - return String.format("%s WHERE %s = :%s%s", baseSelect, columnName, columnName, orderBy); - } - - String getExists() { - return existsSql.get(); - } - - String getFindOne() { - return findOneSql.get(); - } - - String getInsert(boolean excludeId, Set additionalColumns) { - return createInsertSql(excludeId, additionalColumns); - } - - String getUpdate() { - return updateSql.get(); - } - - String getCount() { - return countSql.get(); - } - - String getDeleteById() { - return deleteByIdSql.get(); - } - - String getDeleteByList() { - return deleteByListSql.get(); - } - - private String createFindOneSelectSql() { - - return createSelectBuilder() // - .where(wb -> wb.tableAlias(entity.getTableName()).column(entity.getIdColumn()).eq().variable("id")) // - .build(); - } - - private SelectBuilder createSelectBuilder() { - - SelectBuilder builder = new SelectBuilder(entity.getTableName()); - addColumnsForSimpleProperties(builder); - addColumnsAndJoinsForOneToOneReferences(builder); - - return builder; - } - - /** - * Adds the columns to the provided {@link SelectBuilder} representing simplem properties, including those from - * one-to-one relationships. - * - * @param builder The {@link SelectBuilder} to be modified. - */ - private void addColumnsAndJoinsForOneToOneReferences(SelectBuilder builder) { - - for (JdbcPersistentProperty property : entity) { - if (!property.isEntity() // - || Collection.class.isAssignableFrom(property.getType()) // - || Map.class.isAssignableFrom(property.getType()) // - ) { - continue; - } - - JdbcPersistentEntity refEntity = context.getRequiredPersistentEntity(property.getActualType()); - String joinAlias = property.getName(); - builder.join(jb -> jb.leftOuter().table(refEntity.getTableName()).as(joinAlias) // - .where(property.getReverseColumnName()).eq().column(entity.getTableName(), entity.getIdColumn())); - - for (JdbcPersistentProperty refProperty : refEntity) { - builder.column( // - cb -> cb.tableAlias(joinAlias) // - .column(refProperty.getColumnName()) // - .as(joinAlias + "_" + refProperty.getColumnName()) // - ); - } - } - } - - private void addColumnsForSimpleProperties(SelectBuilder builder) { - - for (JdbcPersistentProperty property : entity) { - - if (property.isEntity()) { - continue; - } - - builder.column(cb -> cb // - .tableAlias(entity.getTableName()) // - .column(property.getColumnName()) // - .as(property.getColumnName())); - } - } - - private Stream getColumnNameStream(String prefix) { - - return StreamUtils.createStreamFromIterator(entity.iterator()) // - .flatMap(p -> getColumnNameStream(p, prefix)); - } - - private Stream getColumnNameStream(JdbcPersistentProperty p, String prefix) { - - if (p.isEntity()) { - return sqlGeneratorSource.getSqlGenerator(p.getType()).getColumnNameStream(prefix + p.getColumnName() + "_"); - } else { - return Stream.of(prefix + p.getColumnName()); - } - } - - private String createFindAllSql() { - return createSelectBuilder().build(); - } - - private String createFindAllInListSql() { - - return createSelectBuilder() // - .where(wb -> wb.tableAlias(entity.getTableName()).column(entity.getIdColumn()).in().variable("ids")) // - .build(); - } - - private String createExistsSql() { - return String.format("select count(*) from %s where %s = :id", entity.getTableName(), entity.getIdColumn()); - } - - private String createCountSql() { - return String.format("select count(*) from %s", entity.getTableName()); - } - - private String createInsertSql(boolean excludeId, Set additionalColumns) { - - String insertTemplate = "insert into %s (%s) values (%s)"; - - List columnNamesForInsert = new ArrayList<>(excludeId ? nonIdColumnNames : columnNames); - columnNamesForInsert.addAll(additionalColumns); - - String tableColumns = String.join(", ", columnNamesForInsert); - String parameterNames = columnNamesForInsert.stream().collect(Collectors.joining(", :", ":", "")); - - return String.format(insertTemplate, entity.getTableName(), tableColumns, parameterNames); - } - - private String createUpdateSql() { - - String updateTemplate = "update %s set %s where %s = :%s"; - - String setClause = columnNames.stream()// - .map(n -> String.format("%s = :%s", n, n))// - .collect(Collectors.joining(", ")); - - return String.format(updateTemplate, entity.getTableName(), setClause, entity.getIdColumn(), entity.getIdColumn()); - } - - private String createDeleteSql() { - return String.format("DELETE from %s where %s = :id", entity.getTableName(), entity.getIdColumn()); - } - - String createDeleteAllSql(PropertyPath path) { - - if (path == null) { - return String.format("DELETE FROM %s", entity.getTableName()); - } - - JdbcPersistentEntity entityToDelete = context.getRequiredPersistentEntity(path.getLeafType()); - - JdbcPersistentEntity owningEntity = context.getRequiredPersistentEntity(path.getOwningType()); - JdbcPersistentProperty property = owningEntity.getRequiredPersistentProperty(path.getSegment()); - - String innerMostCondition = String.format("%s IS NOT NULL", property.getReverseColumnName()); - - String condition = cascadeConditions(innerMostCondition, path.next()); - - return String.format("DELETE FROM %s WHERE %s", entityToDelete.getTableName(), condition); - } - - private String createDeleteByListSql() { - return String.format("DELETE FROM %s WHERE %s IN (:ids)", entity.getTableName(), entity.getIdColumn()); - } - - String createDeleteByPath(PropertyPath path) { - - JdbcPersistentEntity entityToDelete = context.getRequiredPersistentEntity(path.getLeafType()); - JdbcPersistentEntity owningEntity = context.getRequiredPersistentEntity(path.getOwningType()); - JdbcPersistentProperty property = owningEntity.getRequiredPersistentProperty(path.getSegment()); - - String innerMostCondition = String.format("%s = :rootId", property.getReverseColumnName()); - - String condition = cascadeConditions(innerMostCondition, path.next()); - - return String.format("DELETE FROM %s WHERE %s", entityToDelete.getTableName(), condition); - } - - private String cascadeConditions(String innerCondition, PropertyPath path) { - - if (path == null) { - return innerCondition; - } - - JdbcPersistentEntity entity = context.getRequiredPersistentEntity(path.getOwningType()); - JdbcPersistentProperty property = entity.getPersistentProperty(path.getSegment()); - - Assert.notNull(property, "could not find property for path " + path.getSegment() + " in " + entity); - - return String.format("%s IN (SELECT %s FROM %s WHERE %s)", // - property.getReverseColumnName(), // - entity.getIdColumn(), // - entity.getTableName(), innerCondition // - ); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/SqlGeneratorSource.java b/src/main/java/org/springframework/data/jdbc/core/SqlGeneratorSource.java deleted file mode 100644 index 9e9c4ee811..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/SqlGeneratorSource.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import lombok.RequiredArgsConstructor; - -import java.util.HashMap; -import java.util.Map; - -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; - -/** - * Provides {@link SqlGenerator}s per domain type. Instances get cached, so when asked multiple times for the same domain - * type, the same generator will get returned. - * - * @author Jens Schauder - */ -@RequiredArgsConstructor -public class SqlGeneratorSource { - - private final Map sqlGeneratorCache = new HashMap<>(); - private final JdbcMappingContext context; - - SqlGenerator getSqlGenerator(Class domainType) { - - return sqlGeneratorCache.computeIfAbsent(domainType, - t -> new SqlGenerator(context, context.getRequiredPersistentEntity(t), this)); - - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/AggregateChange.java b/src/main/java/org/springframework/data/jdbc/core/conversion/AggregateChange.java deleted file mode 100644 index 5c5ae9e52d..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/AggregateChange.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import lombok.Getter; -import lombok.RequiredArgsConstructor; - -import java.util.ArrayList; -import java.util.List; - -/** - * Represents the change happening to the aggregate (as used in the context of Domain Driven Design) as a whole. - * - * @author Jens Schauder - */ -@RequiredArgsConstructor -@Getter -public class AggregateChange { - - private final Kind kind; - - /** Type of the aggregate root to be changed */ - private final Class entityType; - - /** Aggregate root, to which the change applies, if available */ - private final T entity; - - private final List actions = new ArrayList<>(); - - public void executeWith(Interpreter interpreter) { - actions.forEach(a -> a.executeWith(interpreter)); - } - - public void addAction(DbAction action) { - actions.add(action); - } - - public enum Kind { - SAVE, DELETE - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/DbAction.java b/src/main/java/org/springframework/data/jdbc/core/conversion/DbAction.java deleted file mode 100644 index 6c79fac328..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/DbAction.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import lombok.Getter; -import lombok.ToString; - -import java.util.HashMap; -import java.util.Map; - -import org.springframework.util.Assert; - -/** - * Abstracts over a single interaction with a database. - * - * @author Jens Schauder - */ -@ToString -@Getter -public abstract class DbAction { - - /** - * {@link Class} of the entity of which the database representation is affected by this action. - */ - private final Class entityType; - - /** - * The entity of which the database representation is affected by this action. Might be {@literal null}. - */ - private final T entity; - - /** - * The path from the Aggregate Root to the entity affected by this {@link DbAction}. - */ - private final JdbcPropertyPath propertyPath; - - /** - * Key-value-pairs to specify additional values to be used with the statement which can't be obtained from the entity, - * nor from {@link DbAction}s {@literal this} depends on. A used case are map keys, which need to be persisted with - * the map value but aren't part of the value. - */ - private final Map additionalValues = new HashMap<>(); - - /** - * Another action, this action depends on. For example the insert for one entity might need the id of another entity, - * which gets insert before this one. That action would be referenced by this property, so that the id becomes - * available at execution time. Might be {@literal null}. - */ - private final DbAction dependingOn; - - private DbAction(Class entityType, T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - - this.entityType = entityType; - this.entity = entity; - this.propertyPath = propertyPath; - this.dependingOn = dependingOn; - } - - public static Insert insert(T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - return new Insert<>(entity, propertyPath, dependingOn); - } - - public static Update update(T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - return new Update<>(entity, propertyPath, dependingOn); - } - - public static Delete delete(Object id, Class type, T entity, JdbcPropertyPath propertyPath, - DbAction dependingOn) { - return new Delete<>(id, type, entity, propertyPath, dependingOn); - } - - public static DeleteAll deleteAll(Class type, JdbcPropertyPath propertyPath, DbAction dependingOn) { - return new DeleteAll<>(type, propertyPath, dependingOn); - } - - /** - * Executing this DbAction with the given {@link Interpreter}. - * - * @param interpreter the {@link Interpreter} responsible for actually executing the {@link DbAction}. - */ - void executeWith(Interpreter interpreter) { - - try { - doExecuteWith(interpreter); - } catch (Exception e) { - throw new DbActionExecutionException(this, e); - } - } - - /** - * Executing this DbAction with the given {@link Interpreter} without any exception handling. - * - * @param interpreter the {@link Interpreter} responsible for actually executing the {@link DbAction}. - */ - protected abstract void doExecuteWith(Interpreter interpreter); - - /** - * {@link InsertOrUpdate} must reference an entity. - * - * @param type o the entity for which this represents a database interaction - */ - abstract static class InsertOrUpdate extends DbAction { - - @SuppressWarnings("unchecked") - InsertOrUpdate(T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - super((Class) entity.getClass(), entity, propertyPath, dependingOn); - } - } - - /** - * Represents an insert statement. - * - * @param type o the entity for which this represents a database interaction - */ - public static class Insert extends InsertOrUpdate { - - private Insert(T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - super(entity, propertyPath, dependingOn); - } - - @Override - protected void doExecuteWith(Interpreter interpreter) { - interpreter.interpret(this); - } - } - - /** - * Represents an update statement. - * - * @param type o the entity for which this represents a database interaction - */ - public static class Update extends InsertOrUpdate { - - private Update(T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - super(entity, propertyPath, dependingOn); - } - - @Override - protected void doExecuteWith(Interpreter interpreter) { - interpreter.interpret(this); - } - } - - /** - * Represents an delete statement, possibly a cascading delete statement, i.e. the delete necessary because one - * aggregate root gets deleted. - * - * @param type o the entity for which this represents a database interaction - */ - @Getter - public static class Delete extends DbAction { - - /** - * Id of the root for which all via {@link #propertyPath} referenced entities shall get deleted - */ - private final Object rootId; - - private Delete(Object rootId, Class type, T entity, JdbcPropertyPath propertyPath, DbAction dependingOn) { - - super(type, entity, propertyPath, dependingOn); - - Assert.notNull(rootId, "rootId must not be null."); - - this.rootId = rootId; - } - - @Override - protected void doExecuteWith(Interpreter interpreter) { - interpreter.interpret(this); - } - } - - /** - * Represents an delete statement. - * - * @param type o the entity for which this represents a database interaction - */ - public static class DeleteAll extends DbAction { - - private DeleteAll(Class entityType, JdbcPropertyPath propertyPath, DbAction dependingOn) { - super(entityType, null, propertyPath, dependingOn); - } - - @Override - protected void doExecuteWith(Interpreter interpreter) { - interpreter.interpret(this); - } - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/Interpreter.java b/src/main/java/org/springframework/data/jdbc/core/conversion/Interpreter.java deleted file mode 100644 index d383e070fe..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/Interpreter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import org.springframework.data.jdbc.core.conversion.DbAction.Delete; -import org.springframework.data.jdbc.core.conversion.DbAction.DeleteAll; -import org.springframework.data.jdbc.core.conversion.DbAction.Insert; -import org.springframework.data.jdbc.core.conversion.DbAction.Update; - -/** - * @author Jens Schauder - */ -public interface Interpreter { - - void interpret(Update update); - - void interpret(Insert insert); - - void interpret(Delete delete); - - void interpret(DeleteAll delete); -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityDeleteWriter.java b/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityDeleteWriter.java deleted file mode 100644 index 81a93f2131..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityDeleteWriter.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; - -/** - * Converts an entity that is about to be deleted into {@link DbAction}s inside a {@link AggregateChange} that need to be - * executed against the database to recreate the appropriate state in the database. - * - * @author Jens Schauder - */ -public class JdbcEntityDeleteWriter extends JdbcEntityWriterSupport { - - public JdbcEntityDeleteWriter(JdbcMappingContext context) { - super(context); - } - - @Override - public void write(Object id, AggregateChange aggregateChange) { - - if (id == null) { - deleteAll(aggregateChange); - } else { - deleteById(id, aggregateChange); - } - } - - private void deleteAll(AggregateChange aggregateChange) { - - context.referencedEntities(aggregateChange.getEntityType(), null) - .forEach(p -> aggregateChange.addAction(DbAction.deleteAll(p.getLeafType(), new JdbcPropertyPath(p), null))); - - aggregateChange.addAction(DbAction.deleteAll(aggregateChange.getEntityType(), null, null)); - } - - private void deleteById(Object id, AggregateChange aggregateChange) { - - deleteReferencedEntities(id, aggregateChange); - - aggregateChange.addAction(DbAction.delete(id, aggregateChange.getEntityType(), aggregateChange.getEntity(), null, null)); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriter.java b/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriter.java deleted file mode 100644 index 3e152f36a7..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriter.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import lombok.RequiredArgsConstructor; -import org.springframework.data.jdbc.core.conversion.DbAction.Insert; -import org.springframework.data.jdbc.core.conversion.DbAction.Update; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.mapping.PersistentProperty; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.util.StreamUtils; -import org.springframework.util.ClassUtils; - -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.Stream; - -/** - * Converts an entity that is about to be saved into {@link DbAction}s inside a {@link AggregateChange} that need to be - * executed against the database to recreate the appropriate state in the database. - * - * @author Jens Schauder - */ -public class JdbcEntityWriter extends JdbcEntityWriterSupport { - - public JdbcEntityWriter(JdbcMappingContext context) { - super(context); - } - - @Override - public void write(Object o, AggregateChange aggregateChange) { - write(o, aggregateChange, null); - } - - private void write(Object o, AggregateChange aggregateChange, DbAction dependingOn) { - - Class type = (Class) o.getClass(); - JdbcPersistentEntityInformation entityInformation = context.getRequiredPersistentEntityInformation(type); - JdbcPropertyPath propertyPath = JdbcPropertyPath.from("", type); - - if (entityInformation.isNew(o)) { - - Insert insert = DbAction.insert(o, propertyPath, dependingOn); - aggregateChange.addAction(insert); - - referencedEntities(o).forEach(propertyAndValue -> saveReferencedEntities(propertyAndValue, aggregateChange, - propertyPath.nested(propertyAndValue.property.getName()), insert)); - } else { - - deleteReferencedEntities(entityInformation.getRequiredId(o), aggregateChange); - - Update update = DbAction.update(o, propertyPath, dependingOn); - aggregateChange.addAction(update); - - referencedEntities(o).forEach( - propertyAndValue -> insertReferencedEntities(propertyAndValue, aggregateChange, propertyPath.nested(propertyAndValue.property.getName()), update)); - } - } - - private void saveReferencedEntities(PropertyAndValue propertyAndValue, AggregateChange aggregateChange, - JdbcPropertyPath propertyPath, DbAction dependingOn) { - - saveActions(propertyAndValue, propertyPath, dependingOn).forEach(a -> { - - aggregateChange.addAction(a); - referencedEntities(propertyAndValue.value) - .forEach(pav -> saveReferencedEntities(pav, aggregateChange, propertyPath.nested(pav.property.getName()), a)); - }); - } - - private Stream saveActions(PropertyAndValue propertyAndValue, JdbcPropertyPath propertyPath, - DbAction dependingOn) { - - if (Map.Entry.class.isAssignableFrom(ClassUtils.getUserClass(propertyAndValue.value))) { - return mapEntrySaveAction(propertyAndValue, propertyPath, dependingOn); - } - - return Stream.of(singleSaveAction(propertyAndValue.value, propertyPath, dependingOn)); - } - - private Stream mapEntrySaveAction(PropertyAndValue propertyAndValue, JdbcPropertyPath propertyPath, - DbAction dependingOn) { - - Map.Entry entry = (Map.Entry) propertyAndValue.value; - - DbAction action = singleSaveAction(entry.getValue(), propertyPath, dependingOn); - action.getAdditionalValues().put(propertyAndValue.property.getKeyColumn(), entry.getKey()); - return Stream.of(action); - } - - private DbAction singleSaveAction(T t, JdbcPropertyPath propertyPath, DbAction dependingOn) { - - JdbcPersistentEntityInformation entityInformation = context - .getRequiredPersistentEntityInformation((Class) ClassUtils.getUserClass(t)); - - return entityInformation.isNew(t) ? DbAction.insert(t, propertyPath, dependingOn) - : DbAction.update(t, propertyPath, dependingOn); - } - - private void insertReferencedEntities(PropertyAndValue propertyAndValue, AggregateChange aggregateChange, - JdbcPropertyPath propertyPath, DbAction dependingOn) { - - Insert insert; - if (propertyAndValue.property.isQualified()) { - - Entry valueAsEntry = (Entry) propertyAndValue.value; - insert = DbAction.insert(valueAsEntry.getValue(), propertyPath, dependingOn); - insert.getAdditionalValues().put(propertyAndValue.property.getKeyColumn(), valueAsEntry.getKey()); - } else { - insert = DbAction.insert(propertyAndValue.value, propertyPath, dependingOn); - } - - aggregateChange.addAction(insert); - referencedEntities(insert.getEntity()) - .forEach(pav -> insertReferencedEntities(pav, aggregateChange, propertyPath.nested(pav.property.getName()), dependingOn)); - } - - private Stream referencedEntities(Object o) { - - JdbcPersistentEntity persistentEntity = context.getRequiredPersistentEntity(o.getClass()); - - return StreamUtils.createStreamFromIterator(persistentEntity.iterator()) // - .filter(PersistentProperty::isEntity) // - .flatMap( // - p -> referencedEntity(p, persistentEntity.getPropertyAccessor(o)) // - .map(e -> new PropertyAndValue(p, e)) // - ); - } - - private Stream referencedEntity(JdbcPersistentProperty p, PersistentPropertyAccessor propertyAccessor) { - - Class actualType = p.getActualType(); - JdbcPersistentEntity persistentEntity = context // - .getPersistentEntity(actualType); - - if (persistentEntity == null) { - return Stream.empty(); - } - - Class type = p.getType(); - - if (List.class.isAssignableFrom(type)) { - return listPropertyAsStream(p, propertyAccessor); - } - - if (Collection.class.isAssignableFrom(type)) { - return collectionPropertyAsStream(p, propertyAccessor); - } - - if (Map.class.isAssignableFrom(type)) { - return mapPropertyAsStream(p, propertyAccessor); - } - - return singlePropertyAsStream(p, propertyAccessor); - } - - private Stream collectionPropertyAsStream(JdbcPersistentProperty p, - PersistentPropertyAccessor propertyAccessor) { - - Object property = propertyAccessor.getProperty(p); - - return property == null // - ? Stream.empty() // - : ((Collection) property).stream(); - } - - private Stream listPropertyAsStream(JdbcPersistentProperty p, PersistentPropertyAccessor propertyAccessor) { - - Object property = propertyAccessor.getProperty(p); - - if (property == null) return Stream.empty(); - - List listProperty = (List) property; - HashMap map = new HashMap<>(); - for (int i = 0; i < listProperty.size(); i++) { - map.put(i, listProperty.get(i)); - } - - return map.entrySet().stream().map(e -> (Object) e); - } - - private Stream mapPropertyAsStream(JdbcPersistentProperty p, PersistentPropertyAccessor propertyAccessor) { - - Object property = propertyAccessor.getProperty(p); - - return property == null // - ? Stream.empty() // - : ((Map) property).entrySet().stream().map(e -> (Object) e); - } - - private Stream singlePropertyAsStream(JdbcPersistentProperty p, PersistentPropertyAccessor propertyAccessor) { - - Object property = propertyAccessor.getProperty(p); - if (property == null) { - return Stream.empty(); - } - - return Stream.of(property); - } - - @RequiredArgsConstructor - private static class PropertyAndValue { - - private final JdbcPersistentProperty property; - private final Object value; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriterSupport.java b/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriterSupport.java deleted file mode 100644 index aacb39c1c6..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriterSupport.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import org.springframework.data.convert.EntityWriter; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; - -/** - * Common infrastructure needed by different implementations of {@link EntityWriter}. - * - * @author Jens Schauder - */ -abstract class JdbcEntityWriterSupport implements EntityWriter { - protected final JdbcMappingContext context; - - JdbcEntityWriterSupport(JdbcMappingContext context) { - this.context = context; - } - - /** - * add {@link org.springframework.data.jdbc.core.conversion.DbAction.Delete} actions to the {@link AggregateChange} for - * deleting all referenced entities. - * - * @param id id of the aggregate root, of which the referenced entities get deleted. - * @param aggregateChange the change object to which the actions should get added. Must not be {@literal null} - */ - void deleteReferencedEntities(Object id, AggregateChange aggregateChange) { - - context.referencedEntities(aggregateChange.getEntityType(), null) - .forEach(p -> aggregateChange.addAction(DbAction.delete(id, p.getLeafType(), null, new JdbcPropertyPath(p), null))); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcPropertyPath.java b/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcPropertyPath.java deleted file mode 100644 index 682898d082..0000000000 --- a/src/main/java/org/springframework/data/jdbc/core/conversion/JdbcPropertyPath.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import org.springframework.data.mapping.PropertyPath; -import org.springframework.util.StringUtils; - -/** - * A replacement for {@link org.springframework.data.mapping.PropertyPath} as long as it doesn't support objects with - * empty path. - * - * See https://jira.spring.io/browse/DATACMNS-1204. - * - * @author Jens Schauder - */ -public class JdbcPropertyPath { - - private final PropertyPath path; - private final Class rootType; - - JdbcPropertyPath(PropertyPath path) { - - this.path = path; - this.rootType = null; - } - - private JdbcPropertyPath(Class type) { - - this.path = null; - this.rootType = type; - } - - public static JdbcPropertyPath from(String source, Class type) { - - if (StringUtils.isEmpty(source)) { - return new JdbcPropertyPath(type); - } else { - return new JdbcPropertyPath(PropertyPath.from(source, type)); - } - } - - public JdbcPropertyPath nested(String name) { - return path == null ? new JdbcPropertyPath(PropertyPath.from(name, rootType)) : new JdbcPropertyPath(path.nested(name)); - } - - public PropertyPath getPath() { - return path; - } - - public String toDotPath() { - return path == null ? "" : path.toDotPath(); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/AfterCreation.java b/src/main/java/org/springframework/data/jdbc/mapping/event/AfterCreation.java deleted file mode 100644 index 6b2cd29ac8..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/AfterCreation.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * Gets published after instantiation and setting of all the properties of an entity. This allows to do some - * postprocessing of entities. - * - * @author Jens Schauder - * @since 2.0 - */ -public class AfterCreation extends JdbcEventWithIdAndEntity { - - private static final long serialVersionUID = -4185777271143436728L; - - /** - * @param id of the entity - * @param entity the newly instantiated entity. - * @param change - */ - public AfterCreation(Specified id, Object entity, AggregateChange change) { - super(id, entity, change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/AfterDelete.java b/src/main/java/org/springframework/data/jdbc/mapping/event/AfterDelete.java deleted file mode 100644 index b2f92fc211..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/AfterDelete.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * Gets published after deletion of an entity. It will have a {@link Specified} identifier. If the entity is empty or - * not depends on the delete method used. - * - * @author Jens Schauder - * @since 2.0 - */ -public class AfterDelete extends JdbcEventWithId { - - private static final long serialVersionUID = 3594807189931141582L; - - /** - * @param id of the entity. - * @param instance the deleted entity if it is available. - * @param change the {@link AggregateChange} encoding the planned actions to be performed on the database. - */ - public AfterDelete(Specified id, Optional instance, AggregateChange change) { - super(id, instance, change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/AfterSave.java b/src/main/java/org/springframework/data/jdbc/mapping/event/AfterSave.java deleted file mode 100644 index c20d4cfc77..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/AfterSave.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * Subclasses of this get published after a new instance or a changed instance was saved in the database. - * - * @author Jens Schauder - * @since 2.0 - */ -public class AfterSave extends JdbcEventWithIdAndEntity { - - private static final long serialVersionUID = 8982085767296982848L; - - /** - * @param id identifier of - * @param instance the newly saved entity. - * @param change the {@link AggregateChange} encoding the planned actions to be performed on the database. - */ - public AfterSave(Specified id, Object instance, AggregateChange change) { - super(id, instance, change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeDelete.java b/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeDelete.java deleted file mode 100644 index f3f9103e41..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeDelete.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * Gets published when an entity is about to get deleted. - * - * @author Jens Schauder - * @since 2.0 - */ -public class BeforeDelete extends JdbcEventWithId { - - private static final long serialVersionUID = -5483432053368496651L; - - /** - * @param id the id of the entity - * @param entity the entity about to get deleted. Might be empty. - * @param change the {@link AggregateChange} encoding the planned actions to be performed on the database. - */ - public BeforeDelete(Specified id, Optional entity, AggregateChange change) { - super(id, entity, change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeSave.java b/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeSave.java deleted file mode 100644 index 4f71188452..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/BeforeSave.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; - -/** - * Subclasses of this get published before an entity gets saved to the database. - * - * @author Jens Schauder - * @since 2.0 - */ -public class BeforeSave extends JdbcEventWithEntity { - - private static final long serialVersionUID = -6996874391990315443L; - - /** - * @param id of the entity to be saved. - * @param instance the entity about to get saved. - * @param change - */ - public BeforeSave(Identifier id, Object instance, AggregateChange change) { - super(id, instance, change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/Identifier.java b/src/main/java/org/springframework/data/jdbc/mapping/event/Identifier.java deleted file mode 100644 index d63eb60f1b..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/Identifier.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; - -import org.springframework.util.Assert; - -/** - * Wrapper for an identifier of an entity. Might either be a {@link Specified} or {@link Unset#UNSET} - * - * @author Jens Schauder - * @since 2.0 - */ -public interface Identifier { - - /** - * Creates a new {@link Specified} identifier for the given, non-null value. - * - * @param identifier must not be {@literal null}. - * @return will never be {@literal null}. - */ - static Specified of(Object identifier) { - - Assert.notNull(identifier, "Identifier must not be null!"); - - return SpecifiedIdentifier.of(identifier); - } - - static Identifier ofNullable(Object identifier) { - return identifier == null ? Unset.UNSET : of(identifier); - } - - /** - * Creates a new {@link Identifier} for the given optional source value. - * - * @param identifier must not be {@literal null}. - * @return - */ - static Identifier of(Optional identifier) { - - Assert.notNull(identifier, "Identifier must not be null!"); - - return identifier.map(it -> (Identifier) Identifier.of(it)).orElse(Unset.UNSET); - } - - /** - * Returns the identifier value. - * - * @return will never be {@literal null}. - */ - Optional getOptionalValue(); - - /** - * A specified identifier that exposes a definitely present identifier value. - * - * @author Oliver Gierke - */ - interface Specified extends Identifier { - - /** - * Returns the identifier value. - * - * @return will never be {@literal null}. - */ - default Object getValue() { - return getOptionalValue().orElseThrow(() -> new IllegalStateException("Should not happen!")); - } - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithEntity.java b/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithEntity.java deleted file mode 100644 index 0af7b84b55..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithEntity.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; - -/** - * A {@link SimpleJdbcEvent} which is guaranteed to have an entity. - * - * @author Jens Schauder - * @since 2.0 - */ -public class JdbcEventWithEntity extends SimpleJdbcEvent implements WithEntity { - - private static final long serialVersionUID = 4891455396602090638L; - - public JdbcEventWithEntity(Identifier id, Object entity, AggregateChange change) { - super(id, Optional.of(entity), change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithId.java b/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithId.java deleted file mode 100644 index 6b987e5611..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithId.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * A {@link SimpleJdbcEvent} guaranteed to have an identifier. - * - * @author Jens Schauder - * @since 2.0 - */ -public class JdbcEventWithId extends SimpleJdbcEvent implements WithId { - - private static final long serialVersionUID = -8071323168471611098L; - - private final Specified id; - - public JdbcEventWithId(Specified id, Optional entity, AggregateChange change) { - - super(id, entity, change); - - this.id = id; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.event.JdbcEvent#getId() - */ - @Override - public Specified getId() { - return id; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithIdAndEntity.java b/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithIdAndEntity.java deleted file mode 100644 index e1727c4298..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/JdbcEventWithIdAndEntity.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import lombok.Getter; - -import java.util.Optional; - -import org.springframework.data.jdbc.core.conversion.AggregateChange; -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * A {@link SimpleJdbcEvent} which is guaranteed to have an identifier and an entity. - * - * @author Jens Schauder - * @since 2.0 - */ -@Getter -public class JdbcEventWithIdAndEntity extends JdbcEventWithId implements WithEntity { - - private static final long serialVersionUID = -3194462549552515519L; - - public JdbcEventWithIdAndEntity(Specified id, Object entity, AggregateChange change) { - super(id, Optional.of(entity), change); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/SimpleJdbcEvent.java b/src/main/java/org/springframework/data/jdbc/mapping/event/SimpleJdbcEvent.java deleted file mode 100644 index 2eb61f18ab..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/SimpleJdbcEvent.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import java.util.Optional; - -import org.springframework.context.ApplicationEvent; -import org.springframework.data.jdbc.core.conversion.AggregateChange; - -/** - * The common superclass for all events published by JDBC repositories. {@link #getSource} contains the - * {@link Identifier} of the entity triggering the event. - * - * @author Jens Schauder - * @author Oliver Gierke - * @since 2.0 - */ -class SimpleJdbcEvent extends ApplicationEvent implements JdbcEvent { - - private static final long serialVersionUID = -1798807778668751659L; - - private final Object entity; - private final AggregateChange change; - - SimpleJdbcEvent(Identifier id, Optional entity, AggregateChange change) { - - super(id); - - this.entity = entity.orElse(null); - this.change = change; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.event.JdbcEvent#getId() - */ - @Override - public Identifier getId() { - return (Identifier) getSource(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.event.JdbcEvent#getOptionalEntity() - */ - @Override - public Optional getOptionalEntity() { - return Optional.ofNullable(entity); - } - - public AggregateChange getChange() { - return change; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/event/SpecifiedIdentifier.java b/src/main/java/org/springframework/data/jdbc/mapping/event/SpecifiedIdentifier.java deleted file mode 100644 index cbadb63e77..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/event/SpecifiedIdentifier.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.event; - -import lombok.Value; - -import java.util.Optional; - -import org.springframework.data.jdbc.mapping.event.Identifier.Specified; - -/** - * Simple value object for {@link Specified}. - * - * @author Jens Schauder - * @author Oliver Gierke - * @since 2.0 - */ -@Value(staticConstructor = "of") -class SpecifiedIdentifier implements Specified { - - Object value; - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.event.Identifier#getOptionalValue() - */ - @Override - public Optional getOptionalValue() { - return Optional.of(value); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentEntityInformation.java b/src/main/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentEntityInformation.java deleted file mode 100644 index d66b8e387b..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentEntityInformation.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import org.springframework.data.domain.Persistable; -import org.springframework.data.repository.core.support.PersistentEntityInformation; -import org.springframework.lang.Nullable; - -/** - * @author Jens Schauder - * @since 2.0 - */ -public class BasicJdbcPersistentEntityInformation extends PersistentEntityInformation - implements JdbcPersistentEntityInformation { - - private final JdbcPersistentEntity persistentEntity; - - public BasicJdbcPersistentEntityInformation(JdbcPersistentEntity persistentEntity) { - - super(persistentEntity); - - this.persistentEntity = persistentEntity; - } - - @Override - public boolean isNew(T entity) { - return entity instanceof Persistable ? ((Persistable) entity).isNew() : super.isNew(entity); - } - - @SuppressWarnings("unchecked") - @Nullable - @Override - public ID getId(T entity) { - return entity instanceof Persistable ? ((Persistable)entity).getId() : super.getId(entity); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation#setId(java.lang.Object, java.util.Optional) - */ - @Override - public void setId(T instance, Object value) { - persistentEntity.getPropertyAccessor(instance).setProperty(persistentEntity.getRequiredIdProperty(), value); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentProperty.java b/src/main/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentProperty.java deleted file mode 100644 index 09bbf4b12a..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentProperty.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import org.springframework.data.mapping.Association; -import org.springframework.data.mapping.PersistentEntity; -import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty; -import org.springframework.data.mapping.model.Property; -import org.springframework.data.mapping.model.SimpleTypeHolder; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -import java.time.ZonedDateTime; -import java.time.temporal.Temporal; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Set; - -/** - * Meta data about a property to be used by repository implementations. - * - * @author Jens Schauder - * @author Greg Turnquist - * @since 2.0 - */ -public class BasicJdbcPersistentProperty extends AnnotationBasedPersistentProperty - implements JdbcPersistentProperty { - - private static final Map, Class> javaToDbType = new LinkedHashMap<>(); - private final JdbcMappingContext context; - - static { - javaToDbType.put(Enum.class, String.class); - javaToDbType.put(ZonedDateTime.class, String.class); - javaToDbType.put(Temporal.class, Date.class); - } - - /** - * Creates a new {@link AnnotationBasedPersistentProperty}. - * - * @param property must not be {@literal null}. - * @param owner must not be {@literal null}. - * @param simpleTypeHolder must not be {@literal null}. - * @param context must not be {@literal null} - */ - public BasicJdbcPersistentProperty(Property property, PersistentEntity owner, - SimpleTypeHolder simpleTypeHolder, JdbcMappingContext context) { - - super(property, owner, simpleTypeHolder); - - Assert.notNull(context, "context must not be null."); - - this.context = context; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AbstractPersistentProperty#createAssociation() - */ - @Override - protected Association createAssociation() { - throw new UnsupportedOperationException(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty#getColumnName() - */ - public String getColumnName() { - return context.getNamingStrategy().getColumnName(this); - } - - /** - * The type to be used to store this property in the database. - * - * @return a {@link Class} that is suitable for usage with JDBC drivers - */ - @SuppressWarnings("unchecked") - @Override - public Class getColumnType() { - - Class columnType = columnTypeIfEntity(getActualType()); - - return columnType == null ? columnTypeForNonEntity(getActualType()) : columnType; - } - - @Override - public JdbcPersistentEntity getOwner() { - return (JdbcPersistentEntity) super.getOwner(); - } - - @Override - public String getReverseColumnName() { - return context.getNamingStrategy().getReverseColumnName(this); - } - - @Override - public String getKeyColumn() { - return isQualified() ? context.getNamingStrategy().getKeyColumn(this) : null; - } - - @Override - public boolean isQualified() { - return isMap() || isListLike(); - } - - private boolean isListLike() { - return isCollectionLike() && !Set.class.isAssignableFrom(this.getType()); - } - - @Override - public boolean isOrdered() { - return isListLike(); - } - - private Class columnTypeIfEntity(Class type) { - - JdbcPersistentEntity persistentEntity = context.getPersistentEntity(type); - - if (persistentEntity == null) { - return null; - } - - JdbcPersistentProperty idProperty = persistentEntity.getIdProperty(); - - if (idProperty == null) { - return null; - } - return idProperty.getColumnType(); - } - - private Class columnTypeForNonEntity(Class type) { - - return javaToDbType.entrySet().stream() // - .filter(e -> e.getKey().isAssignableFrom(type)) // - .map(e -> (Class) e.getValue()) // - .findFirst() // - .orElseGet(() -> ClassUtils.resolvePrimitiveIfNecessary(type)); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/DefaultNamingStrategy.java b/src/main/java/org/springframework/data/jdbc/mapping/model/DefaultNamingStrategy.java deleted file mode 100644 index c0f9b21625..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/DefaultNamingStrategy.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -/** - * Basic implementation of {@link NamingStrategy} with no schema, table based on {@link Class} and - * column name based on {@link JdbcPersistentProperty}. - * - * NOTE: Can also be used as an adapter. Create an anonymous subclass and override any settings to implement - * a different strategy on the fly. - * - * @author Greg Turnquist - */ -public class DefaultNamingStrategy implements NamingStrategy { - - /** - * No schema at all! - */ - @Override - public String getSchema() { - return ""; - } - - /** - * Look up the {@link Class}'s simple name. - */ - @Override - public String getTableName(Class type) { - return type.getSimpleName(); - } - - - /** - * Look up the {@link JdbcPersistentProperty}'s name. - */ - @Override - public String getColumnName(JdbcPersistentProperty property) { - return property.getName(); - } - - @Override - public String getReverseColumnName(JdbcPersistentProperty property) { - return property.getOwner().getTableName(); - } - - @Override - public String getKeyColumn(JdbcPersistentProperty property) { - return getReverseColumnName(property) + "_key"; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcMappingContext.java b/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcMappingContext.java deleted file mode 100644 index 8871cd9f3e..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcMappingContext.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import static java.util.Arrays.*; - -import lombok.Getter; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.time.temporal.Temporal; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; - -import org.springframework.core.convert.ConversionService; -import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.core.convert.support.GenericConversionService; -import org.springframework.data.convert.Jsr310Converters; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mapping.context.AbstractMappingContext; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.Property; -import org.springframework.data.mapping.model.SimpleTypeHolder; -import org.springframework.data.util.TypeInformation; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * {@link MappingContext} implementation for JDBC. - * - * @author Jens Schauder - * @author Greg Turnquist - * @author Kazuki Shimizu - * @since 2.0 - */ -public class JdbcMappingContext extends AbstractMappingContext, JdbcPersistentProperty> { - - private static final HashSet> CUSTOM_SIMPLE_TYPES = new HashSet<>(asList( // - BigDecimal.class, // - BigInteger.class, // - Temporal.class // - )); - - @Getter private final NamingStrategy namingStrategy; - @Getter private final NamedParameterJdbcOperations template; - @Getter private SimpleTypeHolder simpleTypeHolder; - private GenericConversionService conversions = getDefaultConversionService(); - - public JdbcMappingContext(NamingStrategy namingStrategy, NamedParameterJdbcOperations template, - ConversionCustomizer customizer) { - - this.namingStrategy = namingStrategy; - this.template = template; - - customizer.customize(conversions); - setSimpleTypeHolder(new SimpleTypeHolder(CUSTOM_SIMPLE_TYPES, true)); - } - - public JdbcMappingContext(NamedParameterJdbcOperations template) { - this(new DefaultNamingStrategy(), template, __ -> {}); - } - - @Override - public void setSimpleTypeHolder(SimpleTypeHolder simpleTypes) { - super.setSimpleTypeHolder(simpleTypes); - this.simpleTypeHolder = simpleTypes; - } - - public List referencedEntities(Class rootType, PropertyPath path) { - - List paths = new ArrayList<>(); - - Class currentType = path == null ? rootType : path.getLeafType(); - JdbcPersistentEntity persistentEntity = getRequiredPersistentEntity(currentType); - - for (JdbcPersistentProperty property : persistentEntity) { - if (property.isEntity()) { - - PropertyPath nextPath = path == null ? PropertyPath.from(property.getName(), rootType) - : path.nested(property.getColumnName()); - paths.add(nextPath); - paths.addAll(referencedEntities(rootType, nextPath)); - } - } - - Collections.reverse(paths); - - return paths; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.context.AbstractMappingContext#createPersistentEntity(org.springframework.data.util.TypeInformation) - */ - @Override - protected JdbcPersistentEntity createPersistentEntity(TypeInformation typeInformation) { - return new JdbcPersistentEntityImpl<>(typeInformation, this.namingStrategy); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.context.AbstractMappingContext#createPersistentProperty(org.springframework.data.mapping.model.Property, org.springframework.data.mapping.model.MutablePersistentEntity, org.springframework.data.mapping.model.SimpleTypeHolder) - */ - @Override - protected JdbcPersistentProperty createPersistentProperty(Property property, JdbcPersistentEntity owner, - SimpleTypeHolder simpleTypeHolder) { - return new BasicJdbcPersistentProperty(property, owner, simpleTypeHolder, this); - } - - @SuppressWarnings("unchecked") - public JdbcPersistentEntityInformation getRequiredPersistentEntityInformation(Class type) { - return new BasicJdbcPersistentEntityInformation<>((JdbcPersistentEntity) getRequiredPersistentEntity(type)); - } - - public ConversionService getConversions() { - return conversions; - } - - private static GenericConversionService getDefaultConversionService() { - - DefaultConversionService conversionService = new DefaultConversionService(); - Jsr310Converters.getConvertersToRegister().forEach(conversionService::addConverter); - - return conversionService; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntity.java b/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntity.java deleted file mode 100644 index 0673a3a517..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntity.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import org.springframework.data.mapping.PersistentEntity; -import org.springframework.data.mapping.model.MutablePersistentEntity; - -/** - * @author Jens Schauder - * @author Oliver Gierke - * @since 2.0 - */ -public interface JdbcPersistentEntity extends MutablePersistentEntity { - - /** - * Returns the name of the table backing the given entity. - * - * @return the table name. - */ - String getTableName(); - - /** - * Returns the column representing the identifier. - * - * @return will never be {@literal null}. - */ - String getIdColumn(); -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntityImpl.java b/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntityImpl.java deleted file mode 100644 index cea9b2d018..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntityImpl.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import lombok.Getter; - -import org.springframework.data.mapping.model.BasicPersistentEntity; -import org.springframework.data.util.TypeInformation; - -/** - * Meta data a repository might need for implementing persistence operations for instances of type {@code T} - * - * @author Jens Schauder - * @author Greg Turnquist - * @since 2.0 - */ -class JdbcPersistentEntityImpl extends BasicPersistentEntity - implements JdbcPersistentEntity { - - private final NamingStrategy namingStrategy; - private final @Getter String tableName; - - /** - * Creates a new {@link JdbcPersistentEntityImpl} for the given {@link TypeInformation}. - * - * @param information must not be {@literal null}. - */ - JdbcPersistentEntityImpl(TypeInformation information, NamingStrategy namingStrategy) { - - super(information); - - this.namingStrategy = namingStrategy; - this.tableName = this.namingStrategy.getQualifiedTableName(getType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity#getIdColumn() - */ - @Override - public String getIdColumn() { - return this.namingStrategy.getColumnName(getRequiredIdProperty()); - } - - @Override - public String toString() { - return String.format("JdbcpersistentEntityImpl<%s>", getType()); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntityInformation.java b/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntityInformation.java deleted file mode 100644 index a812052e10..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentEntityInformation.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import org.springframework.data.repository.core.EntityInformation; - -/** - * @author Jens Schauder - * @since 2.0 - */ -public interface JdbcPersistentEntityInformation extends EntityInformation { - - void setId(T instance, Object value); - - /** - * Returns the identifier of the given entity or throws and exception if it can't be obtained. - * - * @param entity must not be {@literal null}. - * @return the identifier of the given entity - * @throws IllegalArgumentException in case no identifier can be obtained for the given entity. - */ - default ID getRequiredId(T entity) { - - ID id = getId(entity); - if (id == null) - throw new IllegalStateException(String.format("Could not obtain required identifier from entity %s!", entity)); - - return id; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentProperty.java b/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentProperty.java deleted file mode 100644 index b35edeb820..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/JdbcPersistentProperty.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import org.springframework.data.mapping.PersistentProperty; - -/** - * A {@link PersistentProperty} for JDBC. - * - * @author Jens Schauder - * @author Oliver Gierke - * @since 2.0 - */ -public interface JdbcPersistentProperty extends PersistentProperty { - - /** - * Returns the name of the column backing this property. - * - * @return the name of the column backing this property. - */ - String getColumnName(); - - /** - * The type to be used to store this property in the database. - * - * @return a {@link Class} that is suitable for usage with JDBC drivers - */ - Class getColumnType(); - - @Override - JdbcPersistentEntity getOwner(); - - String getReverseColumnName(); - - String getKeyColumn(); - - /** - * Returns if this property is a qualified property, i.e. a property referencing multiple elements that can get picked by a key or an index. - */ - boolean isQualified(); - - /** - * Returns whether this property is an ordered property. - */ - boolean isOrdered(); -} diff --git a/src/main/java/org/springframework/data/jdbc/mapping/model/NamingStrategy.java b/src/main/java/org/springframework/data/jdbc/mapping/model/NamingStrategy.java deleted file mode 100644 index c531c7e63e..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mapping/model/NamingStrategy.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -/** - * @author Greg Turnquist - */ -public interface NamingStrategy { - - String getSchema(); - - String getTableName(Class type); - - String getColumnName(JdbcPersistentProperty property); - - default String getQualifiedTableName(Class type) { - return this.getSchema() + (this.getSchema().equals("") ? "" : ".") + this.getTableName(type); - } - - /** - * For a reference A -> B this is the name in the table for B which references A. - * - * @return a column name. - */ - String getReverseColumnName(JdbcPersistentProperty property); - - /** - * For a map valued reference A -> Map>X,B< this is the name of the column in the tabel for B holding the key of the map. - * @return - */ - String getKeyColumn(JdbcPersistentProperty property); - -} diff --git a/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisContext.java b/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisContext.java deleted file mode 100644 index 1cb110deda..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisContext.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mybatis; - -import java.util.Map; - -/** - * {@link MyBatisContext} instances get passed to MyBatis mapped statements as arguments, making Ids, instances, domainType and other attributes available to the statements. - * - * All methods might return {@literal null} depending on the kind of values available on invocation. - * @author Jens Schauder - */ -public class MyBatisContext { - - private final Object id; - private final Object instance; - private final Class domainType; - private final Map additonalValues; - - public MyBatisContext(Object id, Object instance, Class domainType, Map additonalValues) { - - this.id = id; - this.instance = instance; - this.domainType = domainType; - this.additonalValues = additonalValues; - } - - public Object getId() { - return id; - } - - public Object getInstance() { - return instance; - } - - public Class getDomainType() { - return domainType; - } - - public Object get(String key) { - return additonalValues.get(key); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategy.java b/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategy.java deleted file mode 100644 index 4bcae4edc4..0000000000 --- a/src/main/java/org/springframework/data/jdbc/mybatis/MyBatisDataAccessStrategy.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mybatis; - -import org.apache.ibatis.session.SqlSession; -import org.mybatis.spring.SqlSessionTemplate; -import org.springframework.data.jdbc.core.DataAccessStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.mapping.PropertyPath; - -import java.util.Collections; -import java.util.Map; - -/** - * {@link DataAccessStrategy} implementation based on MyBatis. Each method gets mapped to a statement. The name of the - * statement gets constructed as follows: The namespace is based on the class of the entity plus the suffix "Mapper". - * This is then followed by the method name separated by a dot. For methods taking a {@link PropertyPath} as argument, - * the relevant entity is that of the root of the path, and the path itself gets as dot separated String appended to the - * statement name. Each statement gets an instance of {@link MyBatisContext}, which at least has the entityType set. For - * methods taking a {@link PropertyPath} the entityTyoe if the context is set to the class of the leaf type. - * - * @author Jens Schauder - * @author Kazuki Shimizu - */ -public class MyBatisDataAccessStrategy implements DataAccessStrategy { - - private static final String MAPPER_SUFFIX = "Mapper"; - - private final SqlSession sqlSession; - - /** - * Constructs a {@link DataAccessStrategy} based on MyBatis. - *

- * Use a {@link SqlSessionTemplate} for {@link SqlSession} or a similar implementation tying the session to the - * proper transaction. - * - * @param sqlSession Must be non {@literal null}. - */ - public MyBatisDataAccessStrategy(SqlSession sqlSession) { - this.sqlSession = sqlSession; - } - - @Override - public void insert(T instance, Class domainType, Map additionalParameters) { - sqlSession().insert(mapper(domainType) + ".insert", - new MyBatisContext(null, instance, domainType, additionalParameters)); - } - - @Override - public void update(S instance, Class domainType) { - - sqlSession().update(mapper(domainType) + ".update", - new MyBatisContext(null, instance, domainType, Collections.emptyMap())); - } - - @Override - public void delete(Object id, Class domainType) { - - sqlSession().delete(mapper(domainType) + ".delete", - new MyBatisContext(id, null, domainType, Collections.emptyMap())); - } - - @Override - public void delete(Object rootId, PropertyPath propertyPath) { - - sqlSession().delete(mapper(propertyPath.getOwningType().getType()) + ".delete-" + toDashPath(propertyPath), - new MyBatisContext(rootId, null, propertyPath.getLeafProperty().getTypeInformation().getType(), - Collections.emptyMap())); - } - - @Override - public void deleteAll(Class domainType) { - - sqlSession().delete( // - mapper(domainType) + ".deleteAll", // - new MyBatisContext(null, null, domainType, Collections.emptyMap()) // - ); - } - - @Override - public void deleteAll(PropertyPath propertyPath) { - - Class baseType = propertyPath.getOwningType().getType(); - Class leaveType = propertyPath.getLeafProperty().getTypeInformation().getType(); - - sqlSession().delete( // - mapper(baseType) + ".deleteAll-" + toDashPath(propertyPath), // - new MyBatisContext(null, null, leaveType, Collections.emptyMap()) // - ); - } - - @Override - public T findById(Object id, Class domainType) { - return sqlSession().selectOne(mapper(domainType) + ".findById", - new MyBatisContext(id, null, domainType, Collections.emptyMap())); - } - - @Override - public Iterable findAll(Class domainType) { - return sqlSession().selectList(mapper(domainType) + ".findAll", - new MyBatisContext(null, null, domainType, Collections.emptyMap())); - } - - @Override - public Iterable findAllById(Iterable ids, Class domainType) { - return sqlSession().selectList(mapper(domainType) + ".findAllById", - new MyBatisContext(ids, null, domainType, Collections.emptyMap())); - } - - @Override - public Iterable findAllByProperty(Object rootId, JdbcPersistentProperty property) { - return sqlSession().selectList(mapper(property.getOwner().getType()) + ".findAllByProperty-" + property.getName(), - new MyBatisContext(rootId, null, property.getType(), Collections.emptyMap())); - } - - @Override - public boolean existsById(Object id, Class domainType) { - return sqlSession().selectOne(mapper(domainType) + ".existsById", - new MyBatisContext(id, null, domainType, Collections.emptyMap())); - } - - @Override - public long count(Class domainType) { - return sqlSession().selectOne(mapper(domainType) + ".count", - new MyBatisContext(null, null, domainType, Collections.emptyMap())); - } - - private String mapper(Class domainType) { - return domainType.getName() + MAPPER_SUFFIX; - } - - private SqlSession sqlSession() { - return this.sqlSession; - } - - private String toDashPath(PropertyPath propertyPath) { - return propertyPath.toDotPath().replaceAll("\\.", "-"); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/SimpleJdbcRepository.java b/src/main/java/org/springframework/data/jdbc/repository/SimpleJdbcRepository.java deleted file mode 100644 index cdeb851447..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/SimpleJdbcRepository.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository; - -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; - -import org.springframework.data.jdbc.core.JdbcEntityOperations; -import org.springframework.data.jdbc.core.JdbcEntityTemplate; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; -import org.springframework.data.repository.CrudRepository; - -/** - * @author Jens Schauder - * @since 2.0 - */ -public class SimpleJdbcRepository implements CrudRepository { - - private final JdbcPersistentEntityInformation entityInformation; - - private final JdbcEntityOperations entityOperations; - - /** - * Creates a new {@link SimpleJdbcRepository}. - */ - public SimpleJdbcRepository(JdbcEntityTemplate entityOperations, - JdbcPersistentEntityInformation entityInformation) { - - this.entityOperations = entityOperations; - this.entityInformation = entityInformation; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#save(S) - */ - @Override - public S save(S instance) { - - entityOperations.save(instance, entityInformation.getJavaType()); - - return instance; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#save(java.lang.Iterable) - */ - @Override - public Iterable saveAll(Iterable entities) { - - List savedEntities = new ArrayList<>(); - entities.forEach(e -> savedEntities.add(save(e))); - return savedEntities; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findOne(java.io.Serializable) - */ - @Override - public Optional findById(ID id) { - return Optional.ofNullable(entityOperations.findById(id, entityInformation.getJavaType())); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#exists(java.io.Serializable) - */ - @Override - public boolean existsById(ID id) { - return entityOperations.existsById(id, entityInformation.getJavaType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll() - */ - @Override - public Iterable findAll() { - return entityOperations.findAll(entityInformation.getJavaType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll(java.lang.Iterable) - */ - @Override - public Iterable findAllById(Iterable ids) { - return entityOperations.findAllById(ids, entityInformation.getJavaType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#count() - */ - @Override - public long count() { - return entityOperations.count(entityInformation.getJavaType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.io.Serializable) - */ - @Override - public void deleteById(ID id) { - entityOperations.deleteById(id, entityInformation.getJavaType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.lang.Object) - */ - @Override - public void delete(T instance) { - entityOperations.delete(instance, entityInformation.getJavaType()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.lang.Iterable) - */ - @Override - public void deleteAll(Iterable entities) { - - for (T entity : entities) { - entityOperations.delete(entity, (Class) entity.getClass()); - - } - } - - @Override - public void deleteAll() { - entityOperations.deleteAll(entityInformation.getJavaType()); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/config/JdbcConfiguration.java b/src/main/java/org/springframework/data/jdbc/repository/config/JdbcConfiguration.java deleted file mode 100644 index 4dafd53aae..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/config/JdbcConfiguration.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.config; - -import java.util.Optional; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.jdbc.mapping.model.ConversionCustomizer; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; - -/** - * Beans that must be registered for Spring Data JDBC to work. - * - * @author Greg Turnquist - */ -@Configuration -public class JdbcConfiguration { - - @Bean - JdbcMappingContext jdbcMappingContext(NamedParameterJdbcTemplate template, Optional namingStrategy, - Optional conversionCustomizer) { - - return new JdbcMappingContext( - namingStrategy.orElse(new DefaultNamingStrategy()), template, conversionCustomizer.orElse(conversionService -> {})); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtension.java b/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtension.java deleted file mode 100644 index 01e4129e01..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/config/JdbcRepositoryConfigExtension.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.config; - -import java.util.Locale; - -import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactoryBean; -import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport; - -/** - * {@link org.springframework.data.repository.config.RepositoryConfigurationExtension} extending the repository - * registration process by registering JDBC repositories. - * - * @author Jens Schauder - * @since 2.0 - */ -public class JdbcRepositoryConfigExtension extends RepositoryConfigurationExtensionSupport { - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getModuleName() - */ - @Override - public String getModuleName() { - return "JDBC"; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getRepositoryFactoryBeanClassName() - */ - @Override - public String getRepositoryFactoryBeanClassName() { - return JdbcRepositoryFactoryBean.class.getName(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModulePrefix() - */ - @Override - protected String getModulePrefix() { - return getModuleName().toLowerCase(Locale.US); - } - -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategy.java b/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategy.java deleted file mode 100644 index ccc6039faa..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryLookupStrategy.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import java.lang.reflect.Method; - -import org.springframework.core.convert.ConversionService; -import org.springframework.data.jdbc.core.DataAccessStrategy; -import org.springframework.data.jdbc.core.EntityRowMapper; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.projection.ProjectionFactory; -import org.springframework.data.repository.core.NamedQueries; -import org.springframework.data.repository.core.RepositoryMetadata; -import org.springframework.data.repository.query.EvaluationContextProvider; -import org.springframework.data.repository.query.QueryLookupStrategy; -import org.springframework.data.repository.query.RepositoryQuery; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.core.SingleColumnRowMapper; - -/** - * {@link QueryLookupStrategy} for JDBC repositories. Currently only supports annotated queries. - * - * @author Jens Schauder - * @author Kazuki Shimizu - */ -class JdbcQueryLookupStrategy implements QueryLookupStrategy { - - private final JdbcMappingContext context; - private final DataAccessStrategy accessStrategy; - private final ConversionService conversionService; - - JdbcQueryLookupStrategy(EvaluationContextProvider evaluationContextProvider, JdbcMappingContext context, - DataAccessStrategy accessStrategy) { - - this.context = context; - this.accessStrategy = accessStrategy; - this.conversionService = context.getConversions(); - } - - @Override - public RepositoryQuery resolveQuery(Method method, RepositoryMetadata repositoryMetadata, - ProjectionFactory projectionFactory, NamedQueries namedQueries) { - - JdbcQueryMethod queryMethod = new JdbcQueryMethod(method, repositoryMetadata, projectionFactory); - Class returnedObjectType = queryMethod.getReturnedObjectType(); - - RowMapper rowMapper = queryMethod.isModifyingQuery() ? null : createRowMapper(returnedObjectType); - - return new JdbcRepositoryQuery(queryMethod, context, rowMapper); - } - - private RowMapper createRowMapper(Class returnedObjectType) { - - return context.getSimpleTypeHolder().isSimpleType(returnedObjectType) - ? SingleColumnRowMapper.newInstance(returnedObjectType, conversionService) - : new EntityRowMapper<>( // - context.getRequiredPersistentEntity(returnedObjectType), // - conversionService, // - context, // - accessStrategy // - ); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryMethod.java b/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryMethod.java deleted file mode 100644 index 521514b7a3..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcQueryMethod.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import java.lang.reflect.Method; - -import org.springframework.core.annotation.AnnotatedElementUtils; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.data.jdbc.repository.query.Modifying; -import org.springframework.data.jdbc.repository.query.Query; -import org.springframework.data.projection.ProjectionFactory; -import org.springframework.data.repository.core.RepositoryMetadata; -import org.springframework.data.repository.query.QueryMethod; -import org.springframework.lang.Nullable; - -/** - * {@link QueryMethod} implementation that implements a method by executing the query from a {@link Query} annotation on - * that method. Binds method arguments to named parameters in the SQL statement. - * - * @author Jens Schauder - * @author Kazuki Shimizu - */ -public class JdbcQueryMethod extends QueryMethod { - - private final Method method; - - public JdbcQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory factory) { - - super(method, metadata, factory); - - this.method = method; - } - - /** - * Returns the annotated query if it exists. - * - * @return May be {@code null}. - */ - @Nullable - public String getAnnotatedQuery() { - return getMergedAnnotationAttribute("value"); - } - - /** - * Returns the class to be used as {@link org.springframework.jdbc.core.RowMapper} - * - * @return May be {@code null}. - */ - public Class getRowMapperClass() { - return getMergedAnnotationAttribute("rowMapperClass"); - } - - /** - * Returns whether the query method is a modifying one. - * - * @return if it's a modifying query, return {@code true}. - */ - @Override - public boolean isModifyingQuery() { - return AnnotationUtils.findAnnotation(method, Modifying.class) != null; - } - - @SuppressWarnings("unchecked") - private T getMergedAnnotationAttribute(String attribute) { - - Query queryAnnotation = AnnotatedElementUtils.findMergedAnnotation(method, Query.class); - return (T) AnnotationUtils.getValue(queryAnnotation, attribute); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactory.java b/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactory.java deleted file mode 100644 index 49281dc738..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactory.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import java.util.Optional; - -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.data.jdbc.core.DataAccessStrategy; -import org.springframework.data.jdbc.core.JdbcEntityTemplate; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; -import org.springframework.data.jdbc.repository.SimpleJdbcRepository; -import org.springframework.data.repository.core.EntityInformation; -import org.springframework.data.repository.core.RepositoryInformation; -import org.springframework.data.repository.core.RepositoryMetadata; -import org.springframework.data.repository.core.support.RepositoryFactorySupport; -import org.springframework.data.repository.query.EvaluationContextProvider; -import org.springframework.data.repository.query.QueryLookupStrategy; - -/** - * Creates repository implementation based on JDBC. - * - * @author Jens Schauder - * @author Greg Turnquist - * @since 2.0 - */ -public class JdbcRepositoryFactory extends RepositoryFactorySupport { - - private final JdbcMappingContext context; - private final ApplicationEventPublisher publisher; - private final DataAccessStrategy accessStrategy; - - public JdbcRepositoryFactory(ApplicationEventPublisher publisher, JdbcMappingContext context, - DataAccessStrategy dataAccessStrategy) { - - this.publisher = publisher; - this.context = context; - this.accessStrategy = dataAccessStrategy; - } - - @SuppressWarnings("unchecked") - @Override - public EntityInformation getEntityInformation(Class aClass) { - return (EntityInformation) context.getRequiredPersistentEntityInformation(aClass); - } - - @SuppressWarnings("unchecked") - @Override - protected Object getTargetRepository(RepositoryInformation repositoryInformation) { - - JdbcPersistentEntityInformation persistentEntityInformation = context - .getRequiredPersistentEntityInformation(repositoryInformation.getDomainType()); - JdbcEntityTemplate template = new JdbcEntityTemplate(publisher, context, accessStrategy); - - return new SimpleJdbcRepository<>(template, persistentEntityInformation); - } - - @Override - protected Class getRepositoryBaseClass(RepositoryMetadata repositoryMetadata) { - return SimpleJdbcRepository.class; - } - - @Override - protected Optional getQueryLookupStrategy(QueryLookupStrategy.Key key, - EvaluationContextProvider evaluationContextProvider) { - - if (key != null // - && key != QueryLookupStrategy.Key.USE_DECLARED_QUERY // - && key != QueryLookupStrategy.Key.CREATE_IF_NOT_FOUND // - ) { - throw new IllegalArgumentException(String.format("Unsupported query lookup strategy %s!", key)); - } - - return Optional.of(new JdbcQueryLookupStrategy(evaluationContextProvider, context, accessStrategy)); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBean.java b/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBean.java deleted file mode 100644 index 533122528a..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBean.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import java.io.Serializable; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.context.ApplicationEventPublisherAware; -import org.springframework.data.jdbc.core.DataAccessStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.repository.Repository; -import org.springframework.data.repository.core.support.RepositoryFactorySupport; -import org.springframework.data.repository.core.support.TransactionalRepositoryFactoryBeanSupport; -import org.springframework.util.Assert; - -/** - * Special adapter for Springs {@link org.springframework.beans.factory.FactoryBean} interface to allow easy setup of - * repository factories via Spring configuration. - * - * @author Jens Schauder - * @author Greg Turnquist - * @since 2.0 - */ -public class JdbcRepositoryFactoryBean, S, ID extends Serializable> // - extends TransactionalRepositoryFactoryBeanSupport implements ApplicationEventPublisherAware { - - private ApplicationEventPublisher publisher; - private JdbcMappingContext mappingContext; - private DataAccessStrategy dataAccessStrategy; - - JdbcRepositoryFactoryBean(Class repositoryInterface) { - super(repositoryInterface); - } - - @Override - public void setApplicationEventPublisher(ApplicationEventPublisher publisher) { - - super.setApplicationEventPublisher(publisher); - this.publisher = publisher; - } - - /** - * Creates the actual {@link RepositoryFactorySupport} instance. - * - * @return - */ - @Override - protected RepositoryFactorySupport doCreateRepositoryFactory() { - return new JdbcRepositoryFactory(publisher, mappingContext, dataAccessStrategy); - } - - @Autowired - protected void setMappingContext(JdbcMappingContext mappingContext) { - - super.setMappingContext(mappingContext); - this.mappingContext = mappingContext; - } - - @Autowired - public void setDataAccessStrategy(DataAccessStrategy dataAccessStrategy) { - this.dataAccessStrategy = dataAccessStrategy; - } - - @Override - public void afterPropertiesSet() { - - Assert.notNull(this.dataAccessStrategy, "DataAccessStrategy must not be null!"); - Assert.notNull(this.mappingContext, "MappingContext must not be null!"); - super.afterPropertiesSet(); - } -} diff --git a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryQuery.java b/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryQuery.java deleted file mode 100644 index d927c4cae4..0000000000 --- a/src/main/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryQuery.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import org.springframework.beans.BeanUtils; -import org.springframework.dao.EmptyResultDataAccessException; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.repository.query.RepositoryQuery; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; -import org.springframework.util.StringUtils; - -/** - * A query to be executed based on a repository method, it's annotated SQL query and the arguments provided to the - * method. - * - * @author Jens Schauder - * @author Kazuki Shimizu - */ -class JdbcRepositoryQuery implements RepositoryQuery { - - private static final String PARAMETER_NEEDS_TO_BE_NAMED = "For queries with named parameters you need to provide names for method parameters. Use @Param for query method parameters, or when on Java 8+ use the javac flag -parameters."; - - private final JdbcQueryMethod queryMethod; - private final JdbcMappingContext context; - private final RowMapper rowMapper; - - JdbcRepositoryQuery(JdbcQueryMethod queryMethod, JdbcMappingContext context, RowMapper defaultRowMapper) { - - this.queryMethod = queryMethod; - this.context = context; - this.rowMapper = createRowMapper(queryMethod, defaultRowMapper); - } - - private static RowMapper createRowMapper(JdbcQueryMethod queryMethod, RowMapper defaultRowMapper) { - - Class rowMapperClass = queryMethod.getRowMapperClass(); - - return rowMapperClass == null || rowMapperClass == RowMapper.class ? defaultRowMapper - : (RowMapper) BeanUtils.instantiateClass(rowMapperClass); - } - - @Override - public Object execute(Object[] objects) { - - String query = determineQuery(); - - MapSqlParameterSource parameters = bindParameters(objects); - - if (queryMethod.isModifyingQuery()) { - - int updatedCount = context.getTemplate().update(query, parameters); - Class returnedObjectType = queryMethod.getReturnedObjectType(); - return (returnedObjectType == boolean.class || returnedObjectType == Boolean.class) ? updatedCount != 0 - : updatedCount; - } - - if (queryMethod.isCollectionQuery() || queryMethod.isStreamQuery()) { - return context.getTemplate().query(query, parameters, rowMapper); - } - - try { - return context.getTemplate().queryForObject(query, parameters, rowMapper); - } catch (EmptyResultDataAccessException e) { - return null; - } - } - - @Override - public JdbcQueryMethod getQueryMethod() { - return queryMethod; - } - - private String determineQuery() { - - String query = queryMethod.getAnnotatedQuery(); - - if (StringUtils.isEmpty(query)) { - throw new IllegalStateException(String.format("No query specified on %s", queryMethod.getName())); - } - return query; - } - - private MapSqlParameterSource bindParameters(Object[] objects) { - - MapSqlParameterSource parameters = new MapSqlParameterSource(); - queryMethod.getParameters().getBindableParameters().forEach(p -> { - - String parameterName = p.getName().orElseThrow(() -> new IllegalStateException(PARAMETER_NEEDS_TO_BE_NAMED)); - parameters.addValue(parameterName, objects[p.getIndex()]); - }); - return parameters; - } -} diff --git a/src/main/java/org/springframework/data/jdbc/support/JdbcUtil.java b/src/main/java/org/springframework/data/jdbc/support/JdbcUtil.java deleted file mode 100644 index 12ad4b39d0..0000000000 --- a/src/main/java/org/springframework/data/jdbc/support/JdbcUtil.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.support; - -import lombok.experimental.UtilityClass; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.sql.Date; -import java.sql.Time; -import java.sql.Timestamp; -import java.sql.Types; -import java.util.HashMap; -import java.util.Map; - -import org.springframework.jdbc.support.JdbcUtils; - -/** - * Contains methods dealing with the quirks of JDBC, independent of any Entity, Aggregate or Repository abstraction. - * - * @author Jens Schauder - */ -@UtilityClass -public class JdbcUtil { - - private static final Map, Integer> sqlTypeMappings = new HashMap<>(); - - static { - - sqlTypeMappings.put(String.class, Types.VARCHAR); - sqlTypeMappings.put(BigInteger.class, Types.BIGINT); - sqlTypeMappings.put(BigDecimal.class, Types.NUMERIC); - sqlTypeMappings.put(Byte.class, Types.TINYINT); - sqlTypeMappings.put(byte.class, Types.TINYINT); - sqlTypeMappings.put(Short.class, Types.SMALLINT); - sqlTypeMappings.put(short.class, Types.SMALLINT); - sqlTypeMappings.put(Integer.class, Types.INTEGER); - sqlTypeMappings.put(int.class, Types.INTEGER); - sqlTypeMappings.put(Long.class, Types.BIGINT); - sqlTypeMappings.put(long.class, Types.BIGINT); - sqlTypeMappings.put(Double.class, Types.DOUBLE); - sqlTypeMappings.put(double.class, Types.DOUBLE); - sqlTypeMappings.put(Float.class, Types.REAL); - sqlTypeMappings.put(float.class, Types.REAL); - sqlTypeMappings.put(Boolean.class, Types.BIT); - sqlTypeMappings.put(boolean.class, Types.BIT); - sqlTypeMappings.put(byte[].class, Types.VARBINARY); - sqlTypeMappings.put(Date.class, Types.DATE); - sqlTypeMappings.put(Time.class, Types.TIME); - sqlTypeMappings.put(Timestamp.class, Types.TIMESTAMP); - } - - public static int sqlTypeFor(Class type) { - return sqlTypeMappings.keySet().stream() // - .filter(k -> k.isAssignableFrom(type)) // - .findFirst() // - .map(sqlTypeMappings::get) // - .orElse(JdbcUtils.TYPE_UNKNOWN); - } -} diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt deleted file mode 100644 index c759ee3dd0..0000000000 --- a/src/main/resources/changelog.txt +++ /dev/null @@ -1,8 +0,0 @@ -Spring Data JDBC Changelog -========================= - -Changes in version 1.0.0.M1 (2018-02-06) ----------------------------------------- -* DATAJDBC-171 - Release 1.0 M1 (Lovelace). - - diff --git a/src/main/resources/license.txt b/src/main/resources/license.txt index 7584e2dfe2..964a55d1c3 100644 --- a/src/main/resources/license.txt +++ b/src/main/resources/license.txt @@ -1,6 +1,6 @@ Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +192,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -207,7 +207,7 @@ similar licenses that require the source code and/or modifications to source code to be made available (as would be noted above), you may obtain a copy of the source code corresponding to the binaries for such open source components and modifications thereto, if any, (the "Source Files"), by -downloading the Source Files from http://www.springsource.org/download, +downloading the Source Files from https://www.springsource.org/download, or by sending a request, with your name and address to: VMware, Inc., 3401 Hillview Avenue, Palo Alto, CA 94304, United States of America or email info@vmware.com. All such requests should clearly specify: OPEN SOURCE FILES REQUEST, Attention General diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 4ab3c232d1..2d1d063052 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,5 +1,5 @@ -Spring Data JDBC 1.0 M1 -Copyright (c) [2017-2018] Pivotal Software, Inc. +Spring Data Relational 3.5 RC1 (2025.0.0) +Copyright (c) [2017-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). You may not use this product except in compliance with the License. @@ -8,3 +8,56 @@ This product may include a number of subcomponents with separate copyright notices and license terms. Your use of the source code for the these subcomponents is subject to the terms and conditions of the subcomponent's license, as noted in the LICENSE file. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/test/java/org/springframework/data/jdbc/core/DefaultDataAccessStrategyUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/DefaultDataAccessStrategyUnitTests.java deleted file mode 100644 index 6ffbe1403e..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/DefaultDataAccessStrategyUnitTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import lombok.RequiredArgsConstructor; - -import java.util.HashMap; - -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.jdbc.core.namedparam.SqlParameterSource; -import org.springframework.jdbc.support.KeyHolder; - -/** - * @author Jens Schauder - */ -public class DefaultDataAccessStrategyUnitTests { - - public static final long ID_FROM_ADDITIONAL_VALUES = 23L; - public static final long ORIGINAL_ID = 4711L; - - NamedParameterJdbcOperations jdbcOperations = mock(NamedParameterJdbcOperations.class); - JdbcMappingContext context = new JdbcMappingContext(new DefaultNamingStrategy(), jdbcOperations, __ -> {}); - HashMap additionalParameters = new HashMap<>(); - ArgumentCaptor captor = ArgumentCaptor.forClass(SqlParameterSource.class); - - DefaultDataAccessStrategy accessStrategy = new DefaultDataAccessStrategy( // - new SqlGeneratorSource(context), // - jdbcOperations, // - context // - ); - - @Test // DATAJDBC-146 - public void additionalParameterForIdDoesNotLeadToDuplicateParameters() { - - additionalParameters.put("id", ID_FROM_ADDITIONAL_VALUES); - - accessStrategy.insert(new DummyEntity(ORIGINAL_ID), DummyEntity.class, additionalParameters); - - verify(jdbcOperations).update(eq("insert into DummyEntity (id) values (:id)"), captor.capture(), - any(KeyHolder.class)); - assertThat(captor.getValue().getValue("id")).isEqualTo(ID_FROM_ADDITIONAL_VALUES); - } - - @Test // DATAJDBC-146 - public void additionalParametersGetAddedToStatement() { - - additionalParameters.put("reference", ID_FROM_ADDITIONAL_VALUES); - - accessStrategy.insert(new DummyEntity(ORIGINAL_ID), DummyEntity.class, additionalParameters); - - verify(jdbcOperations).update(eq("insert into DummyEntity (id, reference) values (:id, :reference)"), - captor.capture(), any(KeyHolder.class)); - assertThat(captor.getValue().getValue("id")).isEqualTo(ORIGINAL_ID); - } - - @RequiredArgsConstructor - private static class DummyEntity { - - @Id private final Long id; - } - -} diff --git a/src/test/java/org/springframework/data/jdbc/core/DefaultJdbcInterpreterUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/DefaultJdbcInterpreterUnitTests.java deleted file mode 100644 index 134c826cb1..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/DefaultJdbcInterpreterUnitTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import java.util.AbstractMap.SimpleEntry; -import java.util.Map; - -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.core.conversion.DbAction; -import org.springframework.data.jdbc.core.conversion.DbAction.Insert; -import org.springframework.data.jdbc.core.conversion.JdbcPropertyPath; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests for {@link DefaultJdbcInterpreter} - * - * @author Jens Schauder - */ -public class DefaultJdbcInterpreterUnitTests { - - static final long CONTAINER_ID = 23L; - static final String BACK_REFERENCE = "back-reference"; - - JdbcMappingContext context = new JdbcMappingContext(new DefaultNamingStrategy() { - @Override - public String getReverseColumnName(JdbcPersistentProperty property) { - return BACK_REFERENCE; - } - }, mock(NamedParameterJdbcOperations.class), __ -> {}); - - DataAccessStrategy dataAccessStrategy = mock(DataAccessStrategy.class); - DefaultJdbcInterpreter interpreter = new DefaultJdbcInterpreter(context, dataAccessStrategy); - - @Test // DATAJDBC-145 - public void insertDoesHonourNamingStrategyForBackReference() { - - Container container = new Container(); - container.id = CONTAINER_ID; - - Element element = new Element(); - - Insert containerInsert = DbAction.insert(container, JdbcPropertyPath.from("", Container.class), null); - Insert insert = DbAction.insert(element, JdbcPropertyPath.from("element", Container.class), containerInsert); - - interpreter.interpret(insert); - - ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass(Map.class); - verify(dataAccessStrategy).insert(eq(element), eq(Element.class), argumentCaptor.capture()); - - assertThat(argumentCaptor.getValue()).containsExactly(new SimpleEntry(BACK_REFERENCE, CONTAINER_ID)); - } - - static class Container { - - @Id Long id; - - Element element; - } - - static class Element {} -} diff --git a/src/test/java/org/springframework/data/jdbc/core/EntityRowMapperUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/EntityRowMapperUnitTests.java deleted file mode 100644 index 36798f02e5..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/EntityRowMapperUnitTests.java +++ /dev/null @@ -1,353 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import lombok.RequiredArgsConstructor; -import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.core.convert.support.GenericConversionService; -import org.springframework.data.annotation.Id; -import org.springframework.data.convert.Jsr310Converters; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.data.repository.query.Param; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.util.Assert; - -import javax.naming.OperationNotSupportedException; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.AbstractMap.SimpleEntry; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static java.util.Arrays.*; -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -/** - * Tests the extraction of entities from a {@link ResultSet} by the {@link EntityRowMapper}. - * - * @author Jens Schauder - */ -public class EntityRowMapperUnitTests { - - public static final long ID_FOR_ENTITY_REFERENCING_MAP = 42L; - public static final long ID_FOR_ENTITY_REFERENCING_LIST = 4711L; - public static final long ID_FOR_ENTITY_NOT_REFERENCING_MAP = 23L; - public static final DefaultNamingStrategy X_APPENDING_NAMINGSTRATEGY = new DefaultNamingStrategy() { - @Override - public String getColumnName(JdbcPersistentProperty property) { - return super.getColumnName(property) + "x"; - } - }; - - @Test // DATAJDBC-113 - public void simpleEntitiesGetProperlyExtracted() throws SQLException { - - ResultSet rs = mockResultSet(asList("id", "name"), // - ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - rs.next(); - - Trivial extracted = createRowMapper(Trivial.class).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name) // - .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - } - - @Test // DATAJDBC-181 - public void namingStrategyGetsHonored() throws SQLException { - - ResultSet rs = mockResultSet(asList("idx", "namex"), // - ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - rs.next(); - - Trivial extracted = createRowMapper(Trivial.class, X_APPENDING_NAMINGSTRATEGY).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name) // - .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - } - - @Test // DATAJDBC-181 - public void namingStrategyGetsHonoredForConstructor() throws SQLException { - - ResultSet rs = mockResultSet(asList("idx", "namex"), // - ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - rs.next(); - - TrivialImmutable extracted = createRowMapper(TrivialImmutable.class, X_APPENDING_NAMINGSTRATEGY).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name) // - .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - } - - @Test // DATAJDBC-113 - public void simpleOneToOneGetsProperlyExtracted() throws SQLException { - - ResultSet rs = mockResultSet(asList("id", "name", "child_id", "child_name"), // - ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); - rs.next(); - - OneToOne extracted = createRowMapper(OneToOne.class).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name, e -> e.child.id, e -> e.child.name) // - .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 24L, "beta"); - } - - @Test // DATAJDBC-113 - public void collectionReferenceGetsLoadedWithAdditionalSelect() throws SQLException { - - ResultSet rs = mockResultSet(asList("id", "name"), // - ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha"); - rs.next(); - - OneToSet extracted = createRowMapper(OneToSet.class).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name, e -> e.children.size()) // - .containsExactly(ID_FOR_ENTITY_NOT_REFERENCING_MAP, "alpha", 2); - } - - @Test // DATAJDBC-131 - public void mapReferenceGetsLoadedWithAdditionalSelect() throws SQLException { - - ResultSet rs = mockResultSet(asList("id", "name"), // - ID_FOR_ENTITY_REFERENCING_MAP, "alpha"); - rs.next(); - - OneToMap extracted = createRowMapper(OneToMap.class).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name, e -> e.children.size()) // - .containsExactly(ID_FOR_ENTITY_REFERENCING_MAP, "alpha", 2); - } - - @Test // DATAJDBC-130 - public void listReferenceGetsLoadedWithAdditionalSelect() throws SQLException { - - ResultSet rs = mockResultSet(asList("id", "name"), // - ID_FOR_ENTITY_REFERENCING_LIST, "alpha"); - rs.next(); - - OneToMap extracted = createRowMapper(OneToMap.class).mapRow(rs, 1); - - assertThat(extracted) // - .isNotNull() // - .extracting(e -> e.id, e -> e.name, e -> e.children.size()) // - .containsExactly(ID_FOR_ENTITY_REFERENCING_LIST, "alpha", 2); - } - - private EntityRowMapper createRowMapper(Class type) { - return createRowMapper(type, new DefaultNamingStrategy()); - } - - private EntityRowMapper createRowMapper(Class type, NamingStrategy namingStrategy) { - - JdbcMappingContext context = new JdbcMappingContext( // - namingStrategy, // - mock(NamedParameterJdbcOperations.class), // - __ -> { - } // - ); - - DataAccessStrategy accessStrategy = mock(DataAccessStrategy.class); - - // the ID of the entity is used to determine what kind of ResultSet is needed for subsequent selects. - doReturn(new HashSet<>(asList(new Trivial(), new Trivial()))).when(accessStrategy).findAllByProperty(eq(ID_FOR_ENTITY_NOT_REFERENCING_MAP), - any(JdbcPersistentProperty.class)); - - doReturn(new HashSet<>(asList( // - new SimpleEntry("one", new Trivial()), // - new SimpleEntry("two", new Trivial()) // - ))).when(accessStrategy).findAllByProperty(eq(ID_FOR_ENTITY_REFERENCING_MAP), any(JdbcPersistentProperty.class)); - - doReturn(new HashSet<>(asList( // - new SimpleEntry(1, new Trivial()), // - new SimpleEntry(2, new Trivial()) // - ))).when(accessStrategy).findAllByProperty(eq(ID_FOR_ENTITY_REFERENCING_LIST), any(JdbcPersistentProperty.class)); - - GenericConversionService conversionService = new GenericConversionService(); - conversionService.addConverter(new IterableOfEntryToMapConverter()); - DefaultConversionService.addDefaultConverters(conversionService); - Jsr310Converters.getConvertersToRegister().forEach(conversionService::addConverter); - - return new EntityRowMapper<>((JdbcPersistentEntity) context.getRequiredPersistentEntity(type), - conversionService, context, accessStrategy); - } - - private static ResultSet mockResultSet(List columns, Object... values) { - - Assert.isTrue( // - values.length % columns.size() == 0, // - String // - .format( // - "Number of values [%d] must be a multiple of the number of columns [%d]", // - values.length, // - columns.size() // - ) // - ); - - List> result = convertValues(columns, values); - - return mock(ResultSet.class, new ResultSetAnswer(result)); - } - - private static List> convertValues(List columns, Object[] values) { - - List> result = new ArrayList<>(); - - int index = 0; - while (index < values.length) { - - Map row = new HashMap<>(); - result.add(row); - for (String column : columns) { - - row.put(column, values[index]); - index++; - } - } - return result; - } - - private static class ResultSetAnswer implements Answer { - - private final List> values; - private int index = -1; - - public ResultSetAnswer(List> values) { - - this.values = values; - } - - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - - switch (invocation.getMethod().getName()) { - case "next": - } - - if (invocation.getMethod().getName().equals("next")) - return next(); - - if (invocation.getMethod().getName().equals("getObject")) - return getObject(invocation.getArgument(0)); - - if (invocation.getMethod().getName().equals("isAfterLast")) - return isAfterLast(); - - if (invocation.getMethod().getName().equals("isBeforeFirst")) - return isBeforeFirst(); - - if (invocation.getMethod().getName().equals("getRow")) - return isAfterLast() || isBeforeFirst() ? 0 : index + 1; - - if (invocation.getMethod().getName().equals("toString")) - return this.toString(); - - throw new OperationNotSupportedException(invocation.getMethod().getName()); - } - - private boolean isAfterLast() { - return index >= values.size() && !values.isEmpty(); - } - - private boolean isBeforeFirst() { - return index < 0 && !values.isEmpty(); - } - - private Object getObject(String column) { - - Map rowMap = values.get(index); - - Assert.isTrue(rowMap.containsKey(column), String.format("Trying to access a column (%s) that does not exist", column)); - - return rowMap.get(column); - } - - private boolean next() { - - index++; - return index < values.size(); - } - } - - @RequiredArgsConstructor - static class TrivialImmutable { - - @Id - private final Long id; - private final String name; - } - - static class Trivial { - - @Id - Long id; - String name; - } - - static class OneToOne { - - @Id - Long id; - String name; - Trivial child; - } - - static class OneToSet { - - @Id - Long id; - String name; - Set children; - } - - static class OneToMap { - - @Id - Long id; - String name; - Map children; - } - - static class OneToList { - - @Id - Long id; - String name; - List children; - } -} diff --git a/src/test/java/org/springframework/data/jdbc/core/EventPublishingEntityRowMapperUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/EventPublishingEntityRowMapperUnitTests.java deleted file mode 100644 index 2abe30e2a5..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/EventPublishingEntityRowMapperUnitTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.isA; -import static org.mockito.Mockito.*; - -import lombok.Value; - -import java.sql.ResultSet; -import java.sql.SQLException; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.event.AfterCreation; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntityInformation; -import org.springframework.jdbc.core.RowMapper; - -/** - * Unit tests for {@link EventPublishingEntityRowMapper}. - * - * @author Jens Schauder - * @author Oliver Gierke - */ -@RunWith(MockitoJUnitRunner.class) -public class EventPublishingEntityRowMapperUnitTests { - - @Mock RowMapper rowMapperDelegate; - @Mock JdbcPersistentEntityInformation entityInformation; - @Mock ApplicationEventPublisher publisher; - - @Test // DATAJDBC-99 - public void eventGetsPublishedAfterInstantiation() throws SQLException { - - when(rowMapperDelegate.mapRow(any(ResultSet.class), anyInt())).thenReturn(new DummyEntity(1L)); - when(entityInformation.getRequiredId(any())).thenReturn(1L); - - EventPublishingEntityRowMapper rowMapper = new EventPublishingEntityRowMapper<>(rowMapperDelegate, - entityInformation, publisher); - - ResultSet resultSet = mock(ResultSet.class); - rowMapper.mapRow(resultSet, 1); - - verify(publisher).publishEvent(isA(AfterCreation.class)); - } - - @Value - static class DummyEntity { - @Id Long Id; - } -} diff --git a/src/test/java/org/springframework/data/jdbc/core/JdbcEntityTemplateIntegrationTests.java b/src/test/java/org/springframework/data/jdbc/core/JdbcEntityTemplateIntegrationTests.java deleted file mode 100644 index e2f0bfe6f4..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/JdbcEntityTemplateIntegrationTests.java +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static java.util.Collections.singletonList; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.tuple; - -import lombok.Data; - -import org.assertj.core.api.SoftAssertions; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.testing.TestConfiguration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.transaction.annotation.Transactional; - -/** - * Integration tests for {@link JdbcEntityTemplate}. - * - * @author Jens Schauder - */ -@ContextConfiguration -@Transactional -public class JdbcEntityTemplateIntegrationTests { - - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - @Autowired JdbcEntityOperations template; - - LegoSet legoSet = createLegoSet(); - - @Test // DATAJDBC-112 - public void saveAndLoadAnEntityWithReferencedEntityById() { - - template.save(legoSet, LegoSet.class); - - assertThat(legoSet.manual.id).describedAs("id of stored manual").isNotNull(); - - LegoSet reloadedLegoSet = template.findById(legoSet.getId(), LegoSet.class); - - assertThat(reloadedLegoSet.manual).isNotNull(); - - SoftAssertions softly = new SoftAssertions(); - - softly.assertThat(reloadedLegoSet.manual.getId()) // - .isEqualTo(legoSet.getManual().getId()) // - .isNotNull(); - softly.assertThat(reloadedLegoSet.manual.getContent()).isEqualTo(legoSet.getManual().getContent()); - - softly.assertAll(); - } - - @Test // DATAJDBC-112 - public void saveAndLoadManyEntitiesWithReferencedEntity() { - - template.save(legoSet, LegoSet.class); - - Iterable reloadedLegoSets = template.findAll(LegoSet.class); - - assertThat(reloadedLegoSets).hasSize(1).extracting("id", "manual.id", "manual.content") - .contains(tuple(legoSet.getId(), legoSet.getManual().getId(), legoSet.getManual().getContent())); - } - - @Test // DATAJDBC-112 - public void saveAndLoadManyEntitiesByIdWithReferencedEntity() { - - template.save(legoSet, LegoSet.class); - - Iterable reloadedLegoSets = template.findAllById(singletonList(legoSet.getId()), LegoSet.class); - - assertThat(reloadedLegoSets).hasSize(1).extracting("id", "manual.id", "manual.content") - .contains(tuple(legoSet.getId(), legoSet.getManual().getId(), legoSet.getManual().getContent())); - } - - @Test // DATAJDBC-112 - public void saveAndLoadAnEntityWithReferencedNullEntity() { - - legoSet.setManual(null); - - template.save(legoSet, LegoSet.class); - - LegoSet reloadedLegoSet = template.findById(legoSet.getId(), LegoSet.class); - - assertThat(reloadedLegoSet.manual).isNull(); - } - - @Test // DATAJDBC-112 - public void saveAndDeleteAnEntityWithReferencedEntity() { - - template.save(legoSet, LegoSet.class); - - template.delete(legoSet, LegoSet.class); - - SoftAssertions softly = new SoftAssertions(); - - softly.assertThat(template.findAll(LegoSet.class)).isEmpty(); - softly.assertThat(template.findAll(Manual.class)).isEmpty(); - - softly.assertAll(); - } - - @Test // DATAJDBC-112 - public void saveAndDeleteAllWithReferencedEntity() { - - template.save(legoSet, LegoSet.class); - - template.deleteAll(LegoSet.class); - - SoftAssertions softly = new SoftAssertions(); - - assertThat(template.findAll(LegoSet.class)).isEmpty(); - assertThat(template.findAll(Manual.class)).isEmpty(); - - softly.assertAll(); - } - - @Test // DATAJDBC-112 - public void updateReferencedEntityFromNull() { - - legoSet.setManual(null); - template.save(legoSet, LegoSet.class); - - Manual manual = new Manual(23L); - manual.setContent("Some content"); - legoSet.setManual(manual); - - template.save(legoSet, LegoSet.class); - - LegoSet reloadedLegoSet = template.findById(legoSet.getId(), LegoSet.class); - - assertThat(reloadedLegoSet.manual.content).isEqualTo("Some content"); - } - - @Test // DATAJDBC-112 - public void updateReferencedEntityToNull() { - - template.save(legoSet, LegoSet.class); - - legoSet.setManual(null); - - template.save(legoSet, LegoSet.class); - - LegoSet reloadedLegoSet = template.findById(legoSet.getId(), LegoSet.class); - - SoftAssertions softly = new SoftAssertions(); - - softly.assertThat(reloadedLegoSet.manual).isNull(); - softly.assertThat(template.findAll(Manual.class)).describedAs("Manuals failed to delete").isEmpty(); - - softly.assertAll(); - } - - @Test // DATAJDBC-112 - public void replaceReferencedEntity() { - - template.save(legoSet, LegoSet.class); - - Manual manual = new Manual(null); - manual.setContent("other content"); - legoSet.setManual(manual); - - template.save(legoSet, LegoSet.class); - - LegoSet reloadedLegoSet = template.findById(legoSet.getId(), LegoSet.class); - - SoftAssertions softly = new SoftAssertions(); - - softly.assertThat(reloadedLegoSet.manual.content).isEqualTo("other content"); - softly.assertThat(template.findAll(Manual.class)).describedAs("The should be only one manual").hasSize(1); - - softly.assertAll(); - } - - @Test // DATAJDBC-112 - public void changeReferencedEntity() { - - template.save(legoSet, LegoSet.class); - - legoSet.manual.setContent("new content"); - - template.save(legoSet, LegoSet.class); - - LegoSet reloadedLegoSet = template.findById(legoSet.getId(), LegoSet.class); - - assertThat(reloadedLegoSet.manual.content).isEqualTo("new content"); - } - - private static LegoSet createLegoSet() { - - LegoSet entity = new LegoSet(); - entity.setName("Star Destroyer"); - - Manual manual = new Manual(null); - manual.setContent("Accelerates to 99% of light speed. Destroys almost everything. See https://what-if.xkcd.com/1/"); - entity.setManual(manual); - - return entity; - } - - @Data - static class LegoSet { - - @Id private Long id; - - private String name; - - private Manual manual; - - } - - @Data - static class Manual { - - @Id private final Long id; - private String content; - - } - - @Configuration - @Import(TestConfiguration.class) - static class Config { - - @Bean - Class testClass() { - return JdbcEntityTemplateIntegrationTests.class; - } - - @Bean - JdbcEntityOperations operations(ApplicationEventPublisher publisher, JdbcMappingContext context, DataAccessStrategy dataAccessStrategy) { - return new JdbcEntityTemplate(publisher, context, dataAccessStrategy); - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/core/SelectBuilderUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/SelectBuilderUnitTests.java deleted file mode 100644 index 5877f4e1cc..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/SelectBuilderUnitTests.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.assertj.core.api.Assertions.*; - -import org.junit.Test; - -/** - * Unit tests for the {@link SelectBuilder}. - * - * @author Jens Schauder - */ -public class SelectBuilderUnitTests { - - @Test // DATAJDBC-112 - public void simplestSelect() { - - String sql = new SelectBuilder("mytable") // - .column(cb -> cb.tableAlias("mytable").column("mycolumn").as("myalias")) // - .build(); - - assertThat(sql).isEqualTo("SELECT mytable.mycolumn AS myalias FROM mytable"); - } - - @Test // DATAJDBC-112 - public void columnWithoutTableAlias() { - - String sql = new SelectBuilder("mytable") // - .column(cb -> cb.column("mycolumn").as("myalias")) // - .build(); - - assertThat(sql).isEqualTo("SELECT mycolumn AS myalias FROM mytable"); - } - - @Test // DATAJDBC-112 - public void whereClause() { - - String sql = new SelectBuilder("mytable") // - .column(cb -> cb.tableAlias("mytable").column("mycolumn").as("myalias")) // - .where(cb -> cb.tableAlias("mytable").column("mycolumn").eq().variable("var")).build(); - - assertThat(sql).isEqualTo("SELECT mytable.mycolumn AS myalias FROM mytable WHERE mytable.mycolumn = :var"); - } - - @Test // DATAJDBC-112 - public void multipleColumnsSelect() { - - String sql = new SelectBuilder("mytable") // - .column(cb -> cb.tableAlias("mytable").column("one").as("oneAlias")) // - .column(cb -> cb.tableAlias("mytable").column("two").as("twoAlias")) // - .build(); - - assertThat(sql).isEqualTo("SELECT mytable.one AS oneAlias, mytable.two AS twoAlias FROM mytable"); - } - - @Test // DATAJDBC-112 - public void join() { - String sql = new SelectBuilder("mytable") // - .column(cb -> cb.tableAlias("mytable").column("mycolumn").as("myalias")) // - .join(jb -> jb.table("other").as("o").where("oid").eq().column("mytable", "id")).build(); - - assertThat(sql).isEqualTo("SELECT mytable.mycolumn AS myalias FROM mytable JOIN other AS o ON o.oid = mytable.id"); - } - - @Test // DATAJDBC-112 - public void outerJoin() { - String sql = new SelectBuilder("mytable") // - .column(cb -> cb.tableAlias("mytable").column("mycolumn").as("myalias")) // - .join(jb -> jb.rightOuter().table("other").as("o").where("oid").eq().column("mytable", "id")).build(); - - assertThat(sql) - .isEqualTo("SELECT mytable.mycolumn AS myalias FROM mytable RIGHT OUTER JOIN other AS o ON o.oid = mytable.id"); - } - -} diff --git a/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorContextBasedNamingStrategyUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorContextBasedNamingStrategyUnitTests.java deleted file mode 100644 index 88c48f15d7..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorContextBasedNamingStrategyUnitTests.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; - -import org.assertj.core.api.SoftAssertions; -import org.junit.Test; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests to verify a contextual {@link NamingStrategy} implementation that customizes using a user-centric {@link ThreadLocal}. - * - * NOTE: Due to the need to verify SQL generation and {@link SqlGenerator}'s package-private status suggests - * this unit test exist in this package, not {@literal org.springframework.data.jdbc.mappings.model}. - * - * @author Greg Turnquist - */ -public class SqlGeneratorContextBasedNamingStrategyUnitTests { - - private final ThreadLocal userHandler = new ThreadLocal<>(); - - /** - * Use a {@link DefaultNamingStrategy}, but override the schema with a {@link ThreadLocal}-based setting. - */ - private final NamingStrategy contextualNamingStrategy = new DefaultNamingStrategy() { - @Override - public String getSchema() { - return userHandler.get(); - } - }; - - @Test // DATAJDBC-107 - public void findOne() { - - testAgainstMultipleUsers(user -> { - - SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); - - String sql = sqlGenerator.getFindOne(); - - SoftAssertions softAssertions = new SoftAssertions(); - softAssertions.assertThat(sql) // - .startsWith("SELECT") // - .contains(user + ".DummyEntity.id AS id,") // - .contains(user + ".DummyEntity.name AS name,") // - .contains("ref.l1id AS ref_l1id") // - .contains("ref.content AS ref_content") // - .contains("FROM " + user + ".DummyEntity"); - softAssertions.assertAll(); - }); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteFirstLevel() { - - testAgainstMultipleUsers(user -> { - - SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); - - String sql = sqlGenerator.createDeleteByPath(PropertyPath.from("ref", DummyEntity.class)); - - assertThat(sql).isEqualTo( - "DELETE FROM " + user + ".ReferencedEntity WHERE " + user + ".DummyEntity = :rootId"); - }); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteAllSecondLevel() { - - testAgainstMultipleUsers(user -> { - - SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); - - String sql = sqlGenerator.createDeleteByPath(PropertyPath.from("ref.further", DummyEntity.class)); - - assertThat(sql).isEqualTo( - "DELETE FROM " + user + ".SecondLevelReferencedEntity " + - "WHERE " + user + ".ReferencedEntity IN " + - "(SELECT l1id FROM " + user + ".ReferencedEntity " + - "WHERE " + user + ".DummyEntity = :rootId)"); - }); - } - - @Test // DATAJDBC-107 - public void deleteAll() { - - testAgainstMultipleUsers(user -> { - - SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); - - String sql = sqlGenerator.createDeleteAllSql(null); - - assertThat(sql).isEqualTo("DELETE FROM " + user + ".DummyEntity"); - }); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteAllFirstLevel() { - - testAgainstMultipleUsers(user -> { - - SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); - - String sql = sqlGenerator.createDeleteAllSql(PropertyPath.from("ref", DummyEntity.class)); - - assertThat(sql).isEqualTo( - "DELETE FROM " + user + ".ReferencedEntity WHERE " + user + ".DummyEntity IS NOT NULL"); - }); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteSecondLevel() { - - testAgainstMultipleUsers(user -> { - - SqlGenerator sqlGenerator = configureSqlGenerator(contextualNamingStrategy); - - String sql = sqlGenerator.createDeleteAllSql(PropertyPath.from("ref.further", DummyEntity.class)); - - assertThat(sql).isEqualTo( - "DELETE FROM " + user + ".SecondLevelReferencedEntity " + - "WHERE " + user + ".ReferencedEntity IN " + - "(SELECT l1id FROM " + user + ".ReferencedEntity " + - "WHERE " + user + ".DummyEntity IS NOT NULL)"); - }); - } - - /** - * Take a set of user-based assertions and run them against multiple users, in different threads. - */ - private void testAgainstMultipleUsers(Consumer testAssertions) { - - CountDownLatch latch = new CountDownLatch(2); - - threadedTest("User1", latch, testAssertions); - threadedTest("User2", latch, testAssertions); - - try { - latch.await(10L, TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - - /** - * Inside a {@link Runnable}, fetch the {@link ThreadLocal}-based username and execute the provided - * set of assertions. Then signal through the provided {@link CountDownLatch}. - */ - private void threadedTest(String user, CountDownLatch latch, Consumer testAssertions) { - - new Thread(() -> { - userHandler.set(user); - - testAssertions.accept(user); - - latch.countDown(); - }).start(); - } - - /** - * Plug in a custom {@link NamingStrategy} for this test case. - */ - private SqlGenerator configureSqlGenerator(NamingStrategy namingStrategy) { - - JdbcMappingContext context = new JdbcMappingContext(namingStrategy, mock(NamedParameterJdbcOperations.class), __ -> {}); - JdbcPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); - - return new SqlGenerator(context, persistentEntity, new SqlGeneratorSource(context)); - } - - @SuppressWarnings("unused") - static class DummyEntity { - - @Id Long id; - String name; - ReferencedEntity ref; - } - - @SuppressWarnings("unused") - static class ReferencedEntity { - - @Id Long l1id; - String content; - SecondLevelReferencedEntity further; - } - - @SuppressWarnings("unused") - static class SecondLevelReferencedEntity { - - @Id Long l2id; - String something; - } - -} diff --git a/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorFixedNamingStrategyUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorFixedNamingStrategyUnitTests.java deleted file mode 100644 index 0fe50121b1..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorFixedNamingStrategyUnitTests.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import org.assertj.core.api.SoftAssertions; -import org.junit.Test; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests the {@link SqlGenerator} with a fixed {@link NamingStrategy} implementation containing a hard wired - * schema, table, and property prefix. - * - * @author Greg Turnquist - */ -public class SqlGeneratorFixedNamingStrategyUnitTests { - - final NamingStrategy fixedCustomTablePrefixStrategy = new DefaultNamingStrategy() { - - @Override - public String getSchema() { - return "FixedCustomSchema"; - } - - @Override - public String getTableName(Class type) { - return "FixedCustomTablePrefix_" + type.getSimpleName(); - } - - @Override - public String getColumnName(JdbcPersistentProperty property) { - return "FixedCustomPropertyPrefix_" + property.getName(); - } - }; - - final NamingStrategy upperCaseLowerCaseStrategy = new DefaultNamingStrategy() { - - @Override - public String getTableName(Class type) { - return type.getSimpleName().toUpperCase(); - } - - @Override - public String getColumnName(JdbcPersistentProperty property) { - return property.getName().toLowerCase(); - } - }; - - @Test // DATAJDBC-107 - public void findOneWithOverriddenFixedTableName() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.getFindOne(); - - SoftAssertions softAssertions = new SoftAssertions(); - softAssertions.assertThat(sql) // - .startsWith("SELECT") // - .contains( - "FixedCustomSchema.FixedCustomTablePrefix_DummyEntity.FixedCustomPropertyPrefix_id AS FixedCustomPropertyPrefix_id,") // - .contains( - "FixedCustomSchema.FixedCustomTablePrefix_DummyEntity.FixedCustomPropertyPrefix_name AS FixedCustomPropertyPrefix_name,") // - .contains("ref.FixedCustomPropertyPrefix_l1id AS ref_FixedCustomPropertyPrefix_l1id") // - .contains("ref.FixedCustomPropertyPrefix_content AS ref_FixedCustomPropertyPrefix_content") // - .contains("FROM FixedCustomSchema.FixedCustomTablePrefix_DummyEntity"); - softAssertions.assertAll(); - } - - @Test // DATAJDBC-107 - public void findOneWithUppercasedTablesAndLowercasedColumns() { - - SqlGenerator sqlGenerator = configureSqlGenerator(upperCaseLowerCaseStrategy); - - String sql = sqlGenerator.getFindOne(); - - SoftAssertions softAssertions = new SoftAssertions(); - softAssertions.assertThat(sql) // - .startsWith("SELECT") // - .contains("DUMMYENTITY.id AS id,") // - .contains("DUMMYENTITY.name AS name,") // - .contains("ref.l1id AS ref_l1id") // - .contains("ref.content AS ref_content") // - .contains("FROM DUMMYENTITY"); - softAssertions.assertAll(); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteFirstLevel() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.createDeleteByPath(PropertyPath.from("ref", DummyEntity.class)); - - assertThat(sql).isEqualTo("DELETE FROM FixedCustomSchema.FixedCustomTablePrefix_ReferencedEntity " - + "WHERE FixedCustomSchema.FixedCustomTablePrefix_DummyEntity = :rootId"); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteAllSecondLevel() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.createDeleteByPath(PropertyPath.from("ref.further", DummyEntity.class)); - - assertThat(sql).isEqualTo("DELETE FROM FixedCustomSchema.FixedCustomTablePrefix_SecondLevelReferencedEntity " - + "WHERE FixedCustomSchema.FixedCustomTablePrefix_ReferencedEntity IN " - + "(SELECT FixedCustomPropertyPrefix_l1id " + "FROM FixedCustomSchema.FixedCustomTablePrefix_ReferencedEntity " - + "WHERE FixedCustomSchema.FixedCustomTablePrefix_DummyEntity = :rootId)"); - } - - @Test // DATAJDBC-107 - public void deleteAll() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.createDeleteAllSql(null); - - assertThat(sql).isEqualTo("DELETE FROM FixedCustomSchema.FixedCustomTablePrefix_DummyEntity"); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteAllFirstLevel() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.createDeleteAllSql(PropertyPath.from("ref", DummyEntity.class)); - - assertThat(sql).isEqualTo("DELETE FROM FixedCustomSchema.FixedCustomTablePrefix_ReferencedEntity " - + "WHERE FixedCustomSchema.FixedCustomTablePrefix_DummyEntity IS NOT NULL"); - } - - @Test // DATAJDBC-107 - public void cascadingDeleteSecondLevel() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.createDeleteAllSql(PropertyPath.from("ref.further", DummyEntity.class)); - - assertThat(sql).isEqualTo("DELETE FROM FixedCustomSchema.FixedCustomTablePrefix_SecondLevelReferencedEntity " - + "WHERE FixedCustomSchema.FixedCustomTablePrefix_ReferencedEntity IN " - + "(SELECT FixedCustomPropertyPrefix_l1id " + "FROM FixedCustomSchema.FixedCustomTablePrefix_ReferencedEntity " - + "WHERE FixedCustomSchema.FixedCustomTablePrefix_DummyEntity IS NOT NULL)"); - } - - @Test // DATAJDBC-113 - public void deleteByList() { - - SqlGenerator sqlGenerator = configureSqlGenerator(fixedCustomTablePrefixStrategy); - - String sql = sqlGenerator.getDeleteByList(); - - assertThat(sql).isEqualTo("DELETE FROM FixedCustomSchema.FixedCustomTablePrefix_DummyEntity WHERE FixedCustomPropertyPrefix_id IN (:ids)"); - } - - - /** - * Plug in a custom {@link NamingStrategy} for this test case. - * - * @param namingStrategy - */ - private SqlGenerator configureSqlGenerator(NamingStrategy namingStrategy) { - - JdbcMappingContext context = new JdbcMappingContext(namingStrategy, mock(NamedParameterJdbcOperations.class), __ -> {}); - JdbcPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); - return new SqlGenerator(context, persistentEntity, new SqlGeneratorSource(context)); - } - - static class DummyEntity { - - @Id Long id; - String name; - ReferencedEntity ref; - } - - static class ReferencedEntity { - - @Id Long l1id; - String content; - SecondLevelReferencedEntity further; - } - - static class SecondLevelReferencedEntity { - - @Id Long l2id; - String something; - } - -} diff --git a/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorUnitTests.java deleted file mode 100644 index 3ec255a536..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/SqlGeneratorUnitTests.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import java.util.Map; -import java.util.Set; - -import org.assertj.core.api.SoftAssertions; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentEntity; -import org.springframework.data.jdbc.mapping.model.JdbcPersistentProperty; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests for the {@link SqlGenerator}. - * - * @author Jens Schauder - * @author Greg Turnquist - */ -public class SqlGeneratorUnitTests { - - private SqlGenerator sqlGenerator; - - @Before - public void setUp() { - - NamingStrategy namingStrategy = new PrefixingNamingStrategy(); - JdbcMappingContext context = new JdbcMappingContext(namingStrategy, mock(NamedParameterJdbcOperations.class), __ -> {}); - JdbcPersistentEntity persistentEntity = context.getRequiredPersistentEntity(DummyEntity.class); - this.sqlGenerator = new SqlGenerator(context, persistentEntity, new SqlGeneratorSource(context)); - } - - @Test // DATAJDBC-112 - public void findOne() { - - String sql = sqlGenerator.getFindOne(); - - SoftAssertions softAssertions = new SoftAssertions(); - softAssertions.assertThat(sql) // - .startsWith("SELECT") // - .contains("DummyEntity.x_id AS x_id,") // - .contains("DummyEntity.x_name AS x_name,") // - .contains("ref.x_l1id AS ref_x_l1id") // - .contains("ref.x_content AS ref_x_content").contains(" FROM DummyEntity") // - // 1-N relationships do not get loaded via join - .doesNotContain("Element AS elements"); - softAssertions.assertAll(); - } - - @Test // DATAJDBC-112 - public void cascadingDeleteFirstLevel() { - - String sql = sqlGenerator.createDeleteByPath(PropertyPath.from("ref", DummyEntity.class)); - - assertThat(sql).isEqualTo("DELETE FROM ReferencedEntity WHERE DummyEntity = :rootId"); - } - - @Test // DATAJDBC-112 - public void cascadingDeleteAllSecondLevel() { - - String sql = sqlGenerator.createDeleteByPath(PropertyPath.from("ref.further", DummyEntity.class)); - - assertThat(sql).isEqualTo( - "DELETE FROM SecondLevelReferencedEntity WHERE ReferencedEntity IN (SELECT x_l1id FROM ReferencedEntity WHERE DummyEntity = :rootId)"); - } - - @Test // DATAJDBC-112 - public void deleteAll() { - - String sql = sqlGenerator.createDeleteAllSql(null); - - assertThat(sql).isEqualTo("DELETE FROM DummyEntity"); - } - - @Test // DATAJDBC-112 - public void cascadingDeleteAllFirstLevel() { - - String sql = sqlGenerator.createDeleteAllSql(PropertyPath.from("ref", DummyEntity.class)); - - assertThat(sql).isEqualTo("DELETE FROM ReferencedEntity WHERE DummyEntity IS NOT NULL"); - } - - @Test // DATAJDBC-112 - public void cascadingDeleteSecondLevel() { - - String sql = sqlGenerator.createDeleteAllSql(PropertyPath.from("ref.further", DummyEntity.class)); - - assertThat(sql).isEqualTo( - "DELETE FROM SecondLevelReferencedEntity WHERE ReferencedEntity IN (SELECT x_l1id FROM ReferencedEntity WHERE DummyEntity IS NOT NULL)"); - } - - @Test // DATAJDBC-131 - public void findAllByProperty() { - - // this would get called when DummyEntity is the element type of a Set - String sql = sqlGenerator.getFindAllByProperty("back-ref", null, false); - - assertThat(sql).isEqualTo("SELECT DummyEntity.x_id AS x_id, DummyEntity.x_name AS x_name, " - + "ref.x_l1id AS ref_x_l1id, ref.x_content AS ref_x_content, ref.x_further AS ref_x_further " - + "FROM DummyEntity LEFT OUTER JOIN ReferencedEntity AS ref ON ref.DummyEntity = DummyEntity.x_id " - + "WHERE back-ref = :back-ref"); - } - - @Test // DATAJDBC-131 - public void findAllByPropertyWithKey() { - - // this would get called when DummyEntity is th element type of a Map - String sql = sqlGenerator.getFindAllByProperty("back-ref", "key-column", false); - - assertThat(sql).isEqualTo("SELECT DummyEntity.x_id AS x_id, DummyEntity.x_name AS x_name, " - + "ref.x_l1id AS ref_x_l1id, ref.x_content AS ref_x_content, ref.x_further AS ref_x_further, " - + "DummyEntity.key-column AS key-column " - + "FROM DummyEntity LEFT OUTER JOIN ReferencedEntity AS ref ON ref.DummyEntity = DummyEntity.x_id " - + "WHERE back-ref = :back-ref"); - } - - @Test (expected = IllegalArgumentException.class) // DATAJDBC-130 - public void findAllByPropertyOrderedWithoutKey() { - String sql = sqlGenerator.getFindAllByProperty("back-ref", null, true); - } - - @Test // DATAJDBC-131 - public void findAllByPropertyWithKeyOrdered() { - - // this would get called when DummyEntity is th element type of a Map - String sql = sqlGenerator.getFindAllByProperty("back-ref", "key-column", true); - - assertThat(sql).isEqualTo("SELECT DummyEntity.x_id AS x_id, DummyEntity.x_name AS x_name, " - + "ref.x_l1id AS ref_x_l1id, ref.x_content AS ref_x_content, ref.x_further AS ref_x_further, " - + "DummyEntity.key-column AS key-column " - + "FROM DummyEntity LEFT OUTER JOIN ReferencedEntity AS ref ON ref.DummyEntity = DummyEntity.x_id " - + "WHERE back-ref = :back-ref " - + "ORDER BY key-column" - ); - } - - @SuppressWarnings("unused") - static class DummyEntity { - - @Id Long id; - String name; - ReferencedEntity ref; - Set elements; - Map mappedElements; - } - - @SuppressWarnings("unused") - static class ReferencedEntity { - - @Id Long l1id; - String content; - SecondLevelReferencedEntity further; - } - - @SuppressWarnings("unused") - static class SecondLevelReferencedEntity { - - @Id Long l2id; - String something; - } - - static class Element { - @Id Long id; - String content; - } - - private static class PrefixingNamingStrategy extends DefaultNamingStrategy { - - @Override - public String getColumnName(JdbcPersistentProperty property) { - return "x_" + super.getColumnName(property); - } - - } -} diff --git a/src/test/java/org/springframework/data/jdbc/core/conversion/DbActionUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/conversion/DbActionUnitTests.java deleted file mode 100644 index 03b4e95095..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/conversion/DbActionUnitTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; - -import org.junit.Test; - -/** - * Unit tests for {@link DbAction}s - * - * @author Jens Schauder - */ -public class DbActionUnitTests { - - @Test // DATAJDBC-150 - public void exceptionFromActionContainsUsefulInformationWhenInterpreterFails() { - - DummyEntity entity = new DummyEntity(); - DbAction.Insert insert = DbAction.insert(entity, JdbcPropertyPath.from("someName", DummyEntity.class), - null); - - Interpreter failingInterpreter = mock(Interpreter.class); - doThrow(new RuntimeException()).when(failingInterpreter).interpret(any(DbAction.Insert.class)); - - assertThatExceptionOfType(DbActionExecutionException.class) // - .isThrownBy(() -> insert.executeWith(failingInterpreter)) // - .withMessageContaining("Insert") // - .withMessageContaining(entity.toString()); - - } - - static class DummyEntity { - String someName; - } -} diff --git a/src/test/java/org/springframework/data/jdbc/core/conversion/JdbcEntityDeleteWriterUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/conversion/JdbcEntityDeleteWriterUnitTests.java deleted file mode 100644 index d26b40b1b3..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/conversion/JdbcEntityDeleteWriterUnitTests.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import static org.mockito.Mockito.*; - -import lombok.Data; - -import org.assertj.core.api.Assertions; -import org.assertj.core.groups.Tuple; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.core.conversion.AggregateChange.Kind; -import org.springframework.data.jdbc.core.conversion.DbAction.Delete; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests for the {@link JdbcEntityDeleteWriter} - * - * @author Jens Schauder - */ -@RunWith(MockitoJUnitRunner.class) -public class JdbcEntityDeleteWriterUnitTests { - - JdbcEntityDeleteWriter converter = new JdbcEntityDeleteWriter(new JdbcMappingContext(mock(NamedParameterJdbcOperations.class))); - - @Test - public void deleteDeletesTheEntityAndReferencedEntities() { - - SomeEntity entity = new SomeEntity(23L); - - AggregateChange aggregateChange = new AggregateChange(Kind.DELETE, SomeEntity.class, entity); - - converter.write(entity, aggregateChange); - - Assertions.assertThat(aggregateChange.getActions()).extracting(DbAction::getClass, DbAction::getEntityType) - .containsExactly( // - Tuple.tuple(Delete.class, YetAnother.class), // - Tuple.tuple(Delete.class, OtherEntity.class), // - Tuple.tuple(Delete.class, SomeEntity.class) // - ); - } - - @Data - private static class SomeEntity { - - @Id final Long id; - OtherEntity other; - // should not trigger own Dbaction - String name; - } - - @Data - private class OtherEntity { - - @Id final Long id; - YetAnother yetAnother; - } - - @Data - private class YetAnother { - @Id final Long id; - } -} diff --git a/src/test/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriterUnitTests.java b/src/test/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriterUnitTests.java deleted file mode 100644 index f37fb2f313..0000000000 --- a/src/test/java/org/springframework/data/jdbc/core/conversion/JdbcEntityWriterUnitTests.java +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.core.conversion; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import lombok.RequiredArgsConstructor; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.core.conversion.AggregateChange.Kind; -import org.springframework.data.jdbc.core.conversion.DbAction.Delete; -import org.springframework.data.jdbc.core.conversion.DbAction.Insert; -import org.springframework.data.jdbc.core.conversion.DbAction.Update; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests for the {@link JdbcEntityWriter} - * - * @author Jens Schauder - */ -@RunWith(MockitoJUnitRunner.class) -public class JdbcEntityWriterUnitTests { - - public static final long SOME_ENTITY_ID = 23L; - JdbcEntityWriter converter = new JdbcEntityWriter(new JdbcMappingContext(mock(NamedParameterJdbcOperations.class))); - - @Test // DATAJDBC-112 - public void newEntityGetsConvertedToOneInsert() { - - SingleReferenceEntity entity = new SingleReferenceEntity(null); - AggregateChange aggregateChange = // - new AggregateChange(Kind.SAVE, SingleReferenceEntity.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) // - .extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Insert.class, SingleReferenceEntity.class, "") // - ); - } - - @Test // DATAJDBC-112 - public void existingEntityGetsConvertedToUpdate() { - - SingleReferenceEntity entity = new SingleReferenceEntity(SOME_ENTITY_ID); - AggregateChange aggregateChange = // - new AggregateChange(Kind.SAVE, SingleReferenceEntity.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) // - .extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Delete.class, Element.class, "other"), // - tuple(Update.class, SingleReferenceEntity.class, "") // - ); - } - - @Test // DATAJDBC-112 - public void referenceTriggersDeletePlusInsert() { - - SingleReferenceEntity entity = new SingleReferenceEntity(SOME_ENTITY_ID); - entity.other = new Element(null); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, SingleReferenceEntity.class, - entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) // - .extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Delete.class, Element.class, "other"), // - tuple(Update.class, SingleReferenceEntity.class, ""), // - tuple(Insert.class, Element.class, "other") // - ); - } - - @Test // DATAJDBC-113 - public void newEntityWithEmptySetResultsInSingleInsert() { - - SetContainer entity = new SetContainer(null); - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, SetContainer.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) // - .extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Insert.class, SetContainer.class, "")); - } - - @Test // DATAJDBC-113 - public void newEntityWithSetResultsInAdditionalInsertPerElement() { - - SetContainer entity = new SetContainer(null); - entity.elements.add(new Element(null)); - entity.elements.add(new Element(null)); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, SetContainer.class, entity); - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()).extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Insert.class, SetContainer.class, ""), // - tuple(Insert.class, Element.class, "elements"), // - tuple(Insert.class, Element.class, "elements") // - ); - } - - @Test // DATAJDBC-113 - public void cascadingReferencesTriggerCascadingActions() { - - CascadingReferenceEntity entity = new CascadingReferenceEntity(null); - - entity.other.add(createMiddleElement( // - new Element(null), // - new Element(null)) // - ); - - entity.other.add(createMiddleElement( // - new Element(null), // - new Element(null)) // - ); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, SetContainer.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()).extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Insert.class, CascadingReferenceEntity.class, ""), // - tuple(Insert.class, CascadingReferenceMiddleElement.class, "other"), // - tuple(Insert.class, Element.class, "other.element"), // - tuple(Insert.class, Element.class, "other.element"), // - tuple(Insert.class, CascadingReferenceMiddleElement.class, "other"), // - tuple(Insert.class, Element.class, "other.element"), // - tuple(Insert.class, Element.class, "other.element") // - ); - } - - @Test // DATAJDBC-131 - public void newEntityWithEmptyMapResultsInSingleInsert() { - - MapContainer entity = new MapContainer(null); - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, MapContainer.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()).extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Insert.class, MapContainer.class, "")); - } - - @Test // DATAJDBC-131 - public void newEntityWithMapResultsInAdditionalInsertPerElement() { - - MapContainer entity = new MapContainer(null); - entity.elements.put("one", new Element(null)); - entity.elements.put("two", new Element(null)); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, MapContainer.class, entity); - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) - .extracting(DbAction::getClass, DbAction::getEntityType, this::getMapKey, this::extractPath) // - .containsExactlyInAnyOrder( // - tuple(Insert.class, MapContainer.class, null, ""), // - tuple(Insert.class, Element.class, "one", "elements"), // - tuple(Insert.class, Element.class, "two", "elements") // - ).containsSubsequence( // container comes before the elements - tuple(Insert.class, MapContainer.class, null, ""), // - tuple(Insert.class, Element.class, "two", "elements") // - ).containsSubsequence( // container comes before the elements - tuple(Insert.class, MapContainer.class, null, ""), // - tuple(Insert.class, Element.class, "one", "elements") // - ); - } - - @Test // DATAJDBC-130 - public void newEntityWithEmptyListResultsInSingleInsert() { - - ListContainer entity = new ListContainer(null); - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, ListContainer.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()).extracting(DbAction::getClass, DbAction::getEntityType, this::extractPath) // - .containsExactly( // - tuple(Insert.class, ListContainer.class, "")); - } - - @Test // DATAJDBC-130 - public void newEntityWithListResultsInAdditionalInsertPerElement() { - - ListContainer entity = new ListContainer(null); - entity.elements.add( new Element(null)); - entity.elements.add( new Element(null)); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, ListContainer.class, entity); - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) - .extracting(DbAction::getClass, DbAction::getEntityType, this::getListKey, this::extractPath) // - .containsExactlyInAnyOrder( // - tuple(Insert.class, ListContainer.class, null, ""), // - tuple(Insert.class, Element.class, 0, "elements"), // - tuple(Insert.class, Element.class, 1, "elements") // - ).containsSubsequence( // container comes before the elements - tuple(Insert.class, ListContainer.class, null, ""), // - tuple(Insert.class, Element.class, 1, "elements") // - ).containsSubsequence( // container comes before the elements - tuple(Insert.class, ListContainer.class, null, ""), // - tuple(Insert.class, Element.class, 0, "elements") // - ); - } - - @Test // DATAJDBC-131 - public void mapTriggersDeletePlusInsert() { - - MapContainer entity = new MapContainer(SOME_ENTITY_ID); - entity.elements.put("one", new Element(null)); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, MapContainer.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) // - .extracting(DbAction::getClass, DbAction::getEntityType, this::getMapKey, this::extractPath) // - .containsExactly( // - tuple(Delete.class, Element.class, null, "elements"), // - tuple(Update.class, MapContainer.class, null, ""), // - tuple(Insert.class, Element.class, "one", "elements") // - ); - } - - @Test // DATAJDBC-130 - public void listTriggersDeletePlusInsert() { - - ListContainer entity = new ListContainer(SOME_ENTITY_ID); - entity.elements.add( new Element(null)); - - AggregateChange aggregateChange = new AggregateChange(Kind.SAVE, ListContainer.class, entity); - - converter.write(entity, aggregateChange); - - assertThat(aggregateChange.getActions()) // - .extracting(DbAction::getClass, DbAction::getEntityType, this::getListKey, this::extractPath) // - .containsExactly( // - tuple(Delete.class, Element.class, null, "elements"), // - tuple(Update.class, ListContainer.class, null, ""), // - tuple(Insert.class, Element.class, 0, "elements") // - ); - } - - private CascadingReferenceMiddleElement createMiddleElement(Element first, Element second) { - - CascadingReferenceMiddleElement middleElement1 = new CascadingReferenceMiddleElement(null); - middleElement1.element.add(first); - middleElement1.element.add(second); - return middleElement1; - } - - private Object getMapKey(DbAction a) { - return a.getAdditionalValues().get("MapContainer_key"); - } - - private Object getListKey(DbAction a) { - return a.getAdditionalValues().get("ListContainer_key"); - } - - private String extractPath(DbAction action) { - return action.getPropertyPath().toDotPath(); - } - - @RequiredArgsConstructor - static class SingleReferenceEntity { - - @Id final Long id; - Element other; - // should not trigger own Dbaction - String name; - } - - @RequiredArgsConstructor - private static class CascadingReferenceMiddleElement { - - @Id final Long id; - final Set element = new HashSet<>(); - } - - @RequiredArgsConstructor - private static class CascadingReferenceEntity { - - @Id final Long id; - final Set other = new HashSet<>(); - } - - @RequiredArgsConstructor - private static class SetContainer { - - @Id final Long id; - Set elements = new HashSet<>(); - } - - @RequiredArgsConstructor - private static class MapContainer { - - @Id final Long id; - Map elements = new HashMap<>(); - } - - @RequiredArgsConstructor - private static class ListContainer { - - @Id final Long id; - List< Element> elements = new ArrayList<>(); - } - - @RequiredArgsConstructor - private static class Element { - @Id final Long id; - } - -} diff --git a/src/test/java/org/springframework/data/jdbc/degraph/DependencyTests.java b/src/test/java/org/springframework/data/jdbc/degraph/DependencyTests.java deleted file mode 100644 index 4470fc4f10..0000000000 --- a/src/test/java/org/springframework/data/jdbc/degraph/DependencyTests.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.degraph; -import static de.schauderhaft.degraph.check.JCheck.classpath; -import static org.junit.Assert.assertThat; - -import org.junit.Test; - -import de.schauderhaft.degraph.check.JCheck; - -/** - * @author Jens Schauder - */ -public class DependencyTests { - - @Test public void test() { - assertThat( classpath() - .noJars() - .including("org.springframework.data.jdbc.**") - .filterClasspath("*target/classes") - .printOnFailure("degraph.graphml"), JCheck.violationFree()); - - } - -} diff --git a/src/test/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentEntityInformationUnitTests.java b/src/test/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentEntityInformationUnitTests.java deleted file mode 100644 index 13f3ae9d87..0000000000 --- a/src/test/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentEntityInformationUnitTests.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import static org.assertj.core.api.Java6Assertions.*; -import static org.mockito.Mockito.*; - -import org.junit.Test; -import org.springframework.data.annotation.Id; -import org.springframework.data.domain.Persistable; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.lang.Nullable; - -/** - * @author Jens Schauder - */ -public class BasicJdbcPersistentEntityInformationUnitTests { - - JdbcMappingContext context = new JdbcMappingContext(new DefaultNamingStrategy(), mock(NamedParameterJdbcOperations.class), cs -> {}); - private DummyEntity dummyEntity = new DummyEntity(); - private PersistableDummyEntity persistableDummyEntity = new PersistableDummyEntity(); - - @Test // DATAJDBC-158 - public void idIsBasedOnIdAnnotatedProperty() { - - dummyEntity.id = 42L; - assertThat(context.getRequiredPersistentEntityInformation(DummyEntity.class).getRequiredId(dummyEntity)) - .isEqualTo(42L); - } - - @Test // DATAJDBC-158 - public void idIsBasedOnPersistableGetId() { - - assertThat( // - context.getRequiredPersistentEntityInformation(PersistableDummyEntity.class) - .getRequiredId(persistableDummyEntity) // - ).isEqualTo(23L); - } - - @Test // DATAJDBC-158 - public void isNewIsBasedOnIdAnnotatedPropertyBeingNull() { - - assertThat(context.getRequiredPersistentEntityInformation(DummyEntity.class).isNew(dummyEntity)).isTrue(); - dummyEntity.id = 42L; - assertThat(context.getRequiredPersistentEntityInformation(DummyEntity.class).isNew(dummyEntity)).isFalse(); - } - - @Test // DATAJDBC-158 - public void isNewIsBasedOnPersistableIsNew() { - - persistableDummyEntity.isNewFlag = true; - assertThat( - context.getRequiredPersistentEntityInformation(PersistableDummyEntity.class).isNew(persistableDummyEntity)) - .isTrue(); - - persistableDummyEntity.isNewFlag = false; - assertThat( - context.getRequiredPersistentEntityInformation(PersistableDummyEntity.class).isNew(persistableDummyEntity)) - .isFalse(); - } - - private static class DummyEntity { - @Id Long id; - } - - private static class PersistableDummyEntity implements Persistable { - boolean isNewFlag; - - @Nullable - @Override - public Long getId() { - return 23L; - } - - @Override - public boolean isNew() { - return isNewFlag; - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentPropertyUnitTests.java b/src/test/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentPropertyUnitTests.java deleted file mode 100644 index 98f8095104..0000000000 --- a/src/test/java/org/springframework/data/jdbc/mapping/model/BasicJdbcPersistentPropertyUnitTests.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.mapping.model; - -import static org.assertj.core.api.AssertionsForClassTypes.*; -import static org.mockito.Mockito.*; - -import lombok.Data; - -import java.time.LocalDateTime; -import java.time.ZonedDateTime; -import java.util.Date; - -import org.assertj.core.api.Assertions; -import org.junit.Test; -import org.springframework.data.mapping.PropertyHandler; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; - -/** - * Unit tests for the {@link BasicJdbcPersistentProperty}. - * - * @author Jens Schauder - */ -public class BasicJdbcPersistentPropertyUnitTests { - - @Test // DATAJDBC-104 - public void enumGetsStoredAsString() { - - JdbcPersistentEntity persistentEntity = new JdbcMappingContext(mock(NamedParameterJdbcOperations.class)) - .getRequiredPersistentEntity(DummyEntity.class); - - persistentEntity.doWithProperties((PropertyHandler) p -> { - switch (p.getName()) { - case "someEnum": - assertThat(p.getColumnType()).isEqualTo(String.class); - break; - case "localDateTime": - assertThat(p.getColumnType()).isEqualTo(Date.class); - break; - case "zonedDateTime": - assertThat(p.getColumnType()).isEqualTo(String.class); - break; - default: - Assertions.fail("property with out assert: " + p.getName()); - } - }); - - } - - @Data - private static class DummyEntity { - - private final SomeEnum someEnum; - private final LocalDateTime localDateTime; - private final ZonedDateTime zonedDateTime; - } - - private enum SomeEnum { - @SuppressWarnings("unused") - ALPHA - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIdGenerationIntegrationTests.java b/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIdGenerationIntegrationTests.java deleted file mode 100644 index 05c2d83145..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIdGenerationIntegrationTests.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository; - -import static org.assertj.core.api.Assertions.assertThat; - -import lombok.Data; -import lombok.Value; - -import javax.sql.DataSource; - -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; -import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; -import org.springframework.data.repository.CrudRepository; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; - -/** - * Testing special cases for id generation with {@link SimpleJdbcRepository}. - * - * @author Jens Schauder - * @author Greg Turnquist - */ -@ContextConfiguration -public class JdbcRepositoryIdGenerationIntegrationTests { - - @Configuration - @Import(TestConfiguration.class) - static class Config { - - @Autowired JdbcRepositoryFactory factory; - - @Bean - Class testClass() { - return JdbcRepositoryIdGenerationIntegrationTests.class; - } - } - - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - - @Autowired NamedParameterJdbcTemplate template; - @Autowired ReadOnlyIdEntityRepository readOnlyIdrepository; - @Autowired PrimitiveIdEntityRepository primitiveIdRepository; - - @Test // DATAJDBC-98 - public void idWithoutSetterGetsSet() { - - ReadOnlyIdEntity entity = readOnlyIdrepository.save(new ReadOnlyIdEntity(null, "Entity Name")); - - assertThat(entity.getId()).isNotNull(); - - assertThat(readOnlyIdrepository.findById(entity.getId())).hasValueSatisfying(it -> { - - assertThat(it.getId()).isEqualTo(entity.getId()); - assertThat(it.getName()).isEqualTo(entity.getName()); - }); - } - - @Test // DATAJDBC-98 - public void primitiveIdGetsSet() { - - PrimitiveIdEntity entity = new PrimitiveIdEntity(0); - entity.setName("Entity Name"); - - PrimitiveIdEntity saved = primitiveIdRepository.save(entity); - - assertThat(saved.getId()).isNotEqualTo(0L); - - assertThat(primitiveIdRepository.findById(saved.getId())).hasValueSatisfying(it -> { - - assertThat(it.getId()).isEqualTo(saved.getId()); - assertThat(it.getName()).isEqualTo(saved.getName()); - }); - } - - private interface PrimitiveIdEntityRepository extends CrudRepository {} - - public interface ReadOnlyIdEntityRepository extends CrudRepository {} - - @Value - static class ReadOnlyIdEntity { - - @Id Long id; - String name; - } - - @Data - static class PrimitiveIdEntity { - - @Id private final long id; - String name; - } - - @Configuration - @ComponentScan("org.springframework.data.jdbc.testing") - @EnableJdbcRepositories(considerNestedRepositories = true) - static class TestConfiguration { - - @Bean - Class testClass() { - return JdbcRepositoryIdGenerationIntegrationTests.class; - } - - /** - * {@link NamingStrategy} that harmlessly uppercases the table name, demonstrating how to inject one while not - * breaking existing SQL operations. - */ - @Bean - NamingStrategy namingStrategy() { - return new DefaultNamingStrategy() { - @Override - public String getTableName(Class type) { - return type.getSimpleName().toUpperCase(); - } - }; - } - - @Bean - NamedParameterJdbcTemplate template(DataSource db) { - return new NamedParameterJdbcTemplate(db); - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIntegrationTests.java b/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIntegrationTests.java deleted file mode 100644 index 75c6edd861..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryIntegrationTests.java +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository; - -import static java.util.Arrays.*; -import static org.assertj.core.api.Assertions.*; - -import lombok.Data; - -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; -import org.springframework.data.jdbc.testing.TestConfiguration; -import org.springframework.data.repository.CrudRepository; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; -import org.springframework.test.jdbc.JdbcTestUtils; -import org.springframework.transaction.annotation.Transactional; - -/** - * Very simple use cases for creation and usage of JdbcRepositories. - * - * @author Jens Schauder - */ -@ContextConfiguration -@Transactional -public class JdbcRepositoryIntegrationTests { - - @Configuration - @Import(TestConfiguration.class) - static class Config { - - @Autowired JdbcRepositoryFactory factory; - - @Bean - Class testClass() { - return JdbcRepositoryIntegrationTests.class; - } - - @Bean - DummyEntityRepository dummyEntityRepository() { - return factory.getRepository(DummyEntityRepository.class); - } - - } - - - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - - @Autowired NamedParameterJdbcTemplate template; - @Autowired DummyEntityRepository repository; - - @Test // DATAJDBC-95 - public void savesAnEntity() { - - DummyEntity entity = repository.save(createDummyEntity()); - - assertThat(JdbcTestUtils.countRowsInTableWhere((JdbcTemplate) template.getJdbcOperations(), "dummyentity", - "idProp = " + entity.getIdProp())).isEqualTo(1); - } - - @Test // DATAJDBC-95 - public void saveAndLoadAnEntity() { - - DummyEntity entity = repository.save(createDummyEntity()); - - assertThat(repository.findById(entity.getIdProp())).hasValueSatisfying(it -> { - - assertThat(it.getIdProp()).isEqualTo(entity.getIdProp()); - assertThat(it.getName()).isEqualTo(entity.getName()); - }); - } - - @Test // DATAJDBC-97 - public void savesManyEntities() { - - DummyEntity entity = createDummyEntity(); - DummyEntity other = createDummyEntity(); - - repository.saveAll(asList(entity, other)); - - assertThat(repository.findAll()) // - .extracting(DummyEntity::getIdProp) // - .containsExactlyInAnyOrder(entity.getIdProp(), other.getIdProp()); - } - - @Test // DATAJDBC-97 - public void existsReturnsTrueIffEntityExists() { - - DummyEntity entity = repository.save(createDummyEntity()); - - assertThat(repository.existsById(entity.getIdProp())).isTrue(); - assertThat(repository.existsById(entity.getIdProp() + 1)).isFalse(); - } - - @Test // DATAJDBC-97 - public void findAllFindsAllEntities() { - - DummyEntity entity = repository.save(createDummyEntity()); - DummyEntity other = repository.save(createDummyEntity()); - - Iterable all = repository.findAll(); - - assertThat(all)// - .extracting(DummyEntity::getIdProp)// - .containsExactlyInAnyOrder(entity.getIdProp(), other.getIdProp()); - } - - @Test // DATAJDBC-97 - public void findAllFindsAllSpecifiedEntities() { - - DummyEntity entity = repository.save(createDummyEntity()); - DummyEntity other = repository.save(createDummyEntity()); - - assertThat(repository.findAllById(asList(entity.getIdProp(), other.getIdProp())))// - .extracting(DummyEntity::getIdProp)// - .containsExactlyInAnyOrder(entity.getIdProp(), other.getIdProp()); - } - - @Test // DATAJDBC-97 - public void countsEntities() { - - repository.save(createDummyEntity()); - repository.save(createDummyEntity()); - repository.save(createDummyEntity()); - - assertThat(repository.count()).isEqualTo(3L); - } - - @Test // DATAJDBC-97 - public void deleteById() { - - DummyEntity one = repository.save(createDummyEntity()); - DummyEntity two = repository.save(createDummyEntity()); - DummyEntity three = repository.save(createDummyEntity()); - - repository.deleteById(two.getIdProp()); - - assertThat(repository.findAll()) // - .extracting(DummyEntity::getIdProp) // - .containsExactlyInAnyOrder(one.getIdProp(), three.getIdProp()); - } - - @Test // DATAJDBC-97 - public void deleteByEntity() { - - DummyEntity one = repository.save(createDummyEntity()); - DummyEntity two = repository.save(createDummyEntity()); - DummyEntity three = repository.save(createDummyEntity()); - - repository.delete(one); - - assertThat(repository.findAll()) // - .extracting(DummyEntity::getIdProp) // - .containsExactlyInAnyOrder(two.getIdProp(), three.getIdProp()); - } - - @Test // DATAJDBC-97 - public void deleteByList() { - - DummyEntity one = repository.save(createDummyEntity()); - DummyEntity two = repository.save(createDummyEntity()); - DummyEntity three = repository.save(createDummyEntity()); - - repository.deleteAll(asList(one, three)); - - assertThat(repository.findAll()) // - .extracting(DummyEntity::getIdProp) // - .containsExactlyInAnyOrder(two.getIdProp()); - } - - @Test // DATAJDBC-97 - public void deleteAll() { - - repository.save(createDummyEntity()); - repository.save(createDummyEntity()); - repository.save(createDummyEntity()); - - assertThat(repository.findAll()).isNotEmpty(); - - repository.deleteAll(); - - assertThat(repository.findAll()).isEmpty(); - } - - @Test // DATAJDBC-98 - public void update() { - - DummyEntity entity = repository.save(createDummyEntity()); - - entity.setName("something else"); - DummyEntity saved = repository.save(entity); - - assertThat(repository.findById(entity.getIdProp())).hasValueSatisfying(it -> { - assertThat(it.getName()).isEqualTo(saved.getName()); - }); - } - - @Test // DATAJDBC-98 - public void updateMany() { - - DummyEntity entity = repository.save(createDummyEntity()); - DummyEntity other = repository.save(createDummyEntity()); - - entity.setName("something else"); - other.setName("others Name"); - - repository.saveAll(asList(entity, other)); - - assertThat(repository.findAll()) // - .extracting(DummyEntity::getName) // - .containsExactlyInAnyOrder(entity.getName(), other.getName()); - } - - @Test // DATAJDBC-112 - public void findByIdReturnsEmptyWhenNoneFound() { - - // NOT saving anything, so DB is empty - - assertThat(repository.findById(-1L)).isEmpty(); - } - - private static DummyEntity createDummyEntity() { - - DummyEntity entity = new DummyEntity(); - entity.setName("Entity Name"); - return entity; - } - - interface DummyEntityRepository extends CrudRepository {} - - @Data - static class DummyEntity { - - String name; - @Id private Long idProp; - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryManipulateDbActionsIntegrationTests.java b/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryManipulateDbActionsIntegrationTests.java deleted file mode 100644 index 8cbb6136a0..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/JdbcRepositoryManipulateDbActionsIntegrationTests.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository; - -import static java.util.Arrays.asList; -import static org.assertj.core.api.Assertions.assertThat; - -import junit.framework.AssertionFailedError; -import lombok.Data; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import lombok.Setter; - -import java.util.List; -import java.util.Random; - -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationListener; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.data.annotation.Id; -import org.springframework.data.annotation.PersistenceConstructor; -import org.springframework.data.jdbc.core.conversion.DbAction; -import org.springframework.data.jdbc.mapping.event.BeforeDelete; -import org.springframework.data.jdbc.mapping.event.BeforeSave; -import org.springframework.data.jdbc.repository.config.EnableJdbcRepositories; -import org.springframework.data.jdbc.testing.TestConfiguration; -import org.springframework.data.repository.CrudRepository; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.rules.SpringClassRule; -import org.springframework.test.context.junit4.rules.SpringMethodRule; - -/** - * Tests that the event infrastructure of Spring Data JDBC is sufficient to manipulate the {@link DbAction}s to be - * executed against the database. - * - * @author Jens Schauder - * @author Greg Turnquist - */ -@ContextConfiguration -public class JdbcRepositoryManipulateDbActionsIntegrationTests { - - @ClassRule public static final SpringClassRule classRule = new SpringClassRule(); - @Rule public SpringMethodRule methodRule = new SpringMethodRule(); - - @Autowired DummyEntityRepository repository; - @Autowired LogRepository logRepository; - - @Test // DATAJDBC-120 - public void softDelete() { - - // given a persistent entity - DummyEntity entity = new DummyEntity(null, "Hello"); - repository.save(entity); - assertThat(entity.id).isNotNull(); - - // when I delete the entity - repository.delete(entity); - - // it is still in the repository, but marked as deleted - assertThat(repository.findById(entity.id)) // - .contains(new DummyEntity( // - entity.id, // - entity.name, // - true) // - ); - - } - - @Test // DATAJDBC-120 - public void softDeleteMany() { - - // given persistent entities - DummyEntity one = new DummyEntity(null, "One"); - DummyEntity two = new DummyEntity(null, "Two"); - repository.saveAll(asList(one, two)); - - assertThat(one.id).isNotNull(); - - // when I delete the entities - repository.deleteAll(asList(one, two)); - - // they are still in the repository, but marked as deleted - assertThat(repository.findById(one.id)) // - .contains(new DummyEntity( // - one.id, // - one.name, // - true) // - ); - - assertThat(repository.findById(two.id)) // - .contains(new DummyEntity( // - two.id, // - two.name, // - true) // - ); - } - - @Test // DATAJDBC-120 - public void loggingOnSave() { - - // given a new entity - DummyEntity one = new DummyEntity(null, "one"); - - repository.save(one); - assertThat(one.id).isNotNull(); - - // they are still in the repository, but marked as deleted - assertThat(logRepository.findById(Config.lastLogId)) // - .isNotEmpty() // - .map(Log::getText) // - .contains("one saved"); - } - - @Test // DATAJDBC-120 - public void loggingOnSaveMany() { - - // given a new entity - DummyEntity one = new DummyEntity(null, "one"); - DummyEntity two = new DummyEntity(null, "two"); - - repository.saveAll(asList(one, two)); - assertThat(one.id).isNotNull(); - - // they are still in the repository, but marked as deleted - assertThat(logRepository.findById(Config.lastLogId)) // - .isNotEmpty() // - .map(Log::getText) // - .contains("two saved"); - } - - @Data - private static class DummyEntity { - - final @Id Long id; - String name; - boolean deleted; - - DummyEntity(Long id, String name) { - - this.id = id; - this.name = name; - this.deleted = false; - } - - @PersistenceConstructor - DummyEntity(Long id, String name, boolean deleted) { - - this.id = id; - this.name = name; - this.deleted = deleted; - } - } - - private interface DummyEntityRepository extends CrudRepository {} - - @Getter - @Setter - @RequiredArgsConstructor - private static class Log { - - @Id final Long id; - DummyEntity entity; - String text; - } - - private interface LogRepository extends CrudRepository {} - - @Configuration - @Import(TestConfiguration.class) - @EnableJdbcRepositories(considerNestedRepositories = true) - static class Config { - - static long lastLogId; - - @Bean - Class testClass() { - return JdbcRepositoryManipulateDbActionsIntegrationTests.class; - } - - @Bean - ApplicationListener softDeleteListener() { - - return event -> { - - DummyEntity entity = (DummyEntity) event.getOptionalEntity().orElseThrow(AssertionFailedError::new); - entity.deleted = true; - - List actions = event.getChange().getActions(); - actions.clear(); - actions.add(DbAction.update(entity, null, null)); - }; - } - - @Bean - ApplicationListener logOnSaveListener() { - - // this would actually be easier to implement with an AfterSave listener, but we want to test AggregateChange - // manipulation. - return event -> { - - DummyEntity entity = (DummyEntity) event.getOptionalEntity().orElseThrow(AssertionFailedError::new); - lastLogId = new Random().nextLong(); - Log log = new Log(lastLogId); - log.entity = entity; - log.text = entity.name + " saved"; - - - List actions = event.getChange().getActions(); - actions.add(DbAction.insert(log, null, null)); - }; - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/SimpleJdbcRepositoryEventsUnitTests.java b/src/test/java/org/springframework/data/jdbc/repository/SimpleJdbcRepositoryEventsUnitTests.java deleted file mode 100644 index a5b8c5a772..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/SimpleJdbcRepositoryEventsUnitTests.java +++ /dev/null @@ -1,160 +0,0 @@ -package org.springframework.data.jdbc.repository; - -import static java.util.Arrays.*; -import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.*; - -import junit.framework.AssertionFailedError; -import lombok.Data; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import org.assertj.core.groups.Tuple; -import org.junit.Before; -import org.junit.Test; -import org.mockito.stubbing.Answer; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.core.DefaultDataAccessStrategy; -import org.springframework.data.jdbc.core.SqlGeneratorSource; -import org.springframework.data.jdbc.mapping.event.AfterDelete; -import org.springframework.data.jdbc.mapping.event.AfterSave; -import org.springframework.data.jdbc.mapping.event.BeforeDelete; -import org.springframework.data.jdbc.mapping.event.BeforeSave; -import org.springframework.data.jdbc.mapping.event.Identifier; -import org.springframework.data.jdbc.mapping.event.JdbcEvent; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; -import org.springframework.data.repository.CrudRepository; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.jdbc.core.namedparam.SqlParameterSource; -import org.springframework.jdbc.support.KeyHolder; - -/** - * @author Jens Schauder - */ -public class SimpleJdbcRepositoryEventsUnitTests { - - FakePublisher publisher = new FakePublisher(); - - DummyEntityRepository repository; - - @Before - public void before() { - - final JdbcMappingContext context = new JdbcMappingContext(mock(NamedParameterJdbcOperations.class)); - JdbcRepositoryFactory factory = new JdbcRepositoryFactory( // - publisher, // - context, // - new DefaultDataAccessStrategy( // - new SqlGeneratorSource(context), // - createIdGeneratingOperations(), // - context // - ) // - ); - - repository = factory.getRepository(DummyEntityRepository.class); - } - - @Test // DATAJDBC-99 - public void publishesEventsOnSave() { - - DummyEntity entity = new DummyEntity(23L); - - repository.save(entity); - - assertThat(publisher.events) // - .extracting(e -> (Class) e.getClass()) // - .containsExactly( // - BeforeSave.class, // - AfterSave.class // - ); - } - - @Test // DATAJDBC-99 - public void publishesEventsOnSaveMany() { - - DummyEntity entity1 = new DummyEntity(null); - DummyEntity entity2 = new DummyEntity(23L); - - repository.saveAll(asList(entity1, entity2)); - - assertThat(publisher.events) // - .extracting(e -> (Class) e.getClass()) // - .containsExactly( // - BeforeSave.class, // - AfterSave.class, // - BeforeSave.class, // - AfterSave.class // - ); - } - - @Test // DATAJDBC-99 - public void publishesEventsOnDelete() { - - DummyEntity entity = new DummyEntity(23L); - - repository.delete(entity); - - assertThat(publisher.events).extracting( // - e -> (Class) e.getClass(), // - e -> e.getOptionalEntity().orElseGet(AssertionFailedError::new), // - JdbcEvent::getId // - ).containsExactly( // - Tuple.tuple(BeforeDelete.class, entity, Identifier.of(23L)), // - Tuple.tuple(AfterDelete.class, entity, Identifier.of(23L)) // - ); - } - - @Test // DATAJDBC-99 - public void publishesEventsOnDeleteById() { - - repository.deleteById(23L); - - assertThat(publisher.events) // - .extracting(e -> (Class) e.getClass()) // - .containsExactly( // - BeforeDelete.class, // - AfterDelete.class // - ); - } - - private static NamedParameterJdbcOperations createIdGeneratingOperations() { - - Answer setIdInKeyHolder = invocation -> { - - HashMap keys = new HashMap<>(); - keys.put("id", 4711L); - KeyHolder keyHolder = invocation.getArgument(2); - keyHolder.getKeyList().add(keys); - - return 1; - }; - - NamedParameterJdbcOperations operations = mock(NamedParameterJdbcOperations.class); - when(operations.update(anyString(), any(SqlParameterSource.class), any(KeyHolder.class))) - .thenAnswer(setIdInKeyHolder); - return operations; - } - - interface DummyEntityRepository extends CrudRepository {} - - @Data - static class DummyEntity { - private final @Id Long id; - } - - static class FakePublisher implements ApplicationEventPublisher { - - List events = new ArrayList<>(); - - @Override - public void publishEvent(Object o) { - events.add((JdbcEvent) o); - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesIntegrationTests.java b/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesIntegrationTests.java deleted file mode 100644 index 91529b8da4..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/config/EnableJdbcRepositoriesIntegrationTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.config; - -import static org.junit.Assert.assertNotNull; - -import lombok.Data; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.repository.config.EnableJdbcRepositoriesIntegrationTests.TestConfiguration; -import org.springframework.data.repository.CrudRepository; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * Tests the {@link EnableJdbcRepositories} annotation. - * - * @author Jens Schauder - * @author Greg Turnquist - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes = TestConfiguration.class) -public class EnableJdbcRepositoriesIntegrationTests { - - @Autowired DummyRepository repository; - - @Test // DATAJDBC-100 - public void repositoryGetsPickedUp() { - - assertNotNull(repository); - - Iterable all = repository.findAll(); - - assertNotNull(all); - } - - interface DummyRepository extends CrudRepository { - - } - - @Data - static class DummyEntity { - @Id private Long id; - } - - @ComponentScan("org.springframework.data.jdbc.testing") - @EnableJdbcRepositories(considerNestedRepositories = true) - static class TestConfiguration { - - @Bean - Class testClass() { - return EnableJdbcRepositoriesIntegrationTests.class; - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/support/JdbcQueryMethodUnitTests.java b/src/test/java/org/springframework/data/jdbc/repository/support/JdbcQueryMethodUnitTests.java deleted file mode 100644 index c450740bfb..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/support/JdbcQueryMethodUnitTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import static org.assertj.core.api.Assertions.*; -import static org.mockito.Mockito.*; - -import java.lang.reflect.Method; -import java.sql.ResultSet; - -import org.junit.Test; -import org.springframework.data.jdbc.repository.query.Query; -import org.springframework.data.projection.ProjectionFactory; -import org.springframework.data.repository.core.RepositoryMetadata; -import org.springframework.jdbc.core.RowMapper; - -/** - * Unit tests for {@link JdbcQueryMethod}. - * - * @author Jens Schauder - */ -public class JdbcQueryMethodUnitTests { - - public static final String DUMMY_SELECT = "SELECT something"; - - @Test // DATAJDBC-165 - public void returnsSqlStatement() throws NoSuchMethodException { - - RepositoryMetadata metadata = mock(RepositoryMetadata.class); - when(metadata.getReturnedDomainClass(any(Method.class))).thenReturn((Class) String.class); - - JdbcQueryMethod queryMethod = new JdbcQueryMethod(JdbcQueryMethodUnitTests.class.getDeclaredMethod("queryMethod"), - metadata, mock(ProjectionFactory.class)); - - assertThat(queryMethod.getAnnotatedQuery()).isEqualTo(DUMMY_SELECT); - } - - @Test // DATAJDBC-165 - public void returnsSpecifiedRowMapperClass() throws NoSuchMethodException { - - RepositoryMetadata metadata = mock(RepositoryMetadata.class); - when(metadata.getReturnedDomainClass(any(Method.class))).thenReturn((Class) String.class); - - JdbcQueryMethod queryMethod = new JdbcQueryMethod(JdbcQueryMethodUnitTests.class.getDeclaredMethod("queryMethod"), - metadata, mock(ProjectionFactory.class)); - - assertThat(queryMethod.getRowMapperClass()).isEqualTo(CustomRowMapper.class); - } - - @Query(value = DUMMY_SELECT, rowMapperClass = CustomRowMapper.class) - private void queryMethod() {} - - private class CustomRowMapper implements RowMapper { - - @Override - public Object mapRow(ResultSet rs, int rowNum) { - return null; - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBeanUnitTests.java b/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBeanUnitTests.java deleted file mode 100644 index cc92d592dc..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryFactoryBeanUnitTests.java +++ /dev/null @@ -1,63 +0,0 @@ -package org.springframework.data.jdbc.repository.support; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.data.annotation.Id; -import org.springframework.data.jdbc.core.DataAccessStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.repository.CrudRepository; -import org.springframework.data.repository.Repository; - -/** - * Tests the dependency injection for {@link JdbcRepositoryFactoryBean}. - * - * @author Jens Schauder - * @author Greg Turnquist - */ -@RunWith(MockitoJUnitRunner.class) -public class JdbcRepositoryFactoryBeanUnitTests { - - JdbcRepositoryFactoryBean factoryBean; - - @Mock ListableBeanFactory beanFactory; - @Mock Repository repository; - @Mock DataAccessStrategy dataAccessStrategy; - @Mock JdbcMappingContext mappingContext; - - @Before - public void setUp() { - - // Setup standard configuration - factoryBean = new JdbcRepositoryFactoryBean<>(DummyEntityRepository.class); - } - - @Test - public void setsUpBasicInstanceCorrectly() { - - factoryBean.setDataAccessStrategy(dataAccessStrategy); - factoryBean.setMappingContext(mappingContext); - factoryBean.afterPropertiesSet(); - - assertThat(factoryBean.getObject()).isNotNull(); - } - - @Test(expected = IllegalArgumentException.class) - public void requiresListableBeanFactory() { - - factoryBean.setBeanFactory(mock(BeanFactory.class)); - } - - private static class DummyEntity { - @Id private Long id; - } - - private interface DummyEntityRepository extends CrudRepository {} -} diff --git a/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryQueryUnitTests.java b/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryQueryUnitTests.java deleted file mode 100644 index 2d93d43240..0000000000 --- a/src/test/java/org/springframework/data/jdbc/repository/support/JdbcRepositoryQueryUnitTests.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.repository.support; - -import org.assertj.core.api.Assertions; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.repository.query.DefaultParameters; -import org.springframework.data.repository.query.Parameters; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.core.namedparam.SqlParameterSource; - -import java.sql.ResultSet; - -import static org.mockito.Mockito.*; - -/** - * Unit tests for {@link JdbcRepositoryQuery}. - * - * @author Jens Schauder - */ -public class JdbcRepositoryQueryUnitTests { - - JdbcQueryMethod queryMethod; - JdbcMappingContext context; - RowMapper defaultRowMapper; - JdbcRepositoryQuery query; - - @Before - public void setup() throws NoSuchMethodException { - - Parameters parameters = new DefaultParameters(JdbcRepositoryQueryUnitTests.class.getDeclaredMethod("dummyMethod")); - queryMethod = mock(JdbcQueryMethod.class); - when(queryMethod.getParameters()).thenReturn(parameters); - - context = mock(JdbcMappingContext.class, RETURNS_DEEP_STUBS); - defaultRowMapper = mock(RowMapper.class); - } - - @Test // DATAJDBC-165 - public void emptyQueryThrowsException() { - - when(queryMethod.getAnnotatedQuery()).thenReturn(null); - query = new JdbcRepositoryQuery(queryMethod, context, defaultRowMapper); - - Assertions.assertThatExceptionOfType(IllegalStateException.class) // - .isThrownBy(() -> query.execute(new Object[]{})); - } - - @Test // DATAJDBC-165 - public void defaultRowMapperIsUsedByDefault() { - - when(queryMethod.getAnnotatedQuery()).thenReturn("some sql statement"); - when(queryMethod.getRowMapperClass()).thenReturn((Class) RowMapper.class); - query = new JdbcRepositoryQuery(queryMethod, context, defaultRowMapper); - - query.execute(new Object[]{}); - - verify(context.getTemplate()).queryForObject(anyString(), any(SqlParameterSource.class), eq(defaultRowMapper)); - } - - @Test // DATAJDBC-165 - public void defaultRowMapperIsUsedForNull() { - - when(queryMethod.getAnnotatedQuery()).thenReturn("some sql statement"); - query = new JdbcRepositoryQuery(queryMethod, context, defaultRowMapper); - - query.execute(new Object[]{}); - - verify(context.getTemplate()).queryForObject(anyString(), any(SqlParameterSource.class), eq(defaultRowMapper)); - } - - @Test // DATAJDBC-165 - public void customRowMapperIsUsedWhenSpecified() { - - when(queryMethod.getAnnotatedQuery()).thenReturn("some sql statement"); - when(queryMethod.getRowMapperClass()).thenReturn((Class) CustomRowMapper.class); - query = new JdbcRepositoryQuery(queryMethod, context, defaultRowMapper); - - query.execute(new Object[]{}); - - verify(context.getTemplate()).queryForObject(anyString(), any(SqlParameterSource.class), isA(CustomRowMapper.class)); - } - - /** - * The whole purpose of this method is to easily generate a {@link DefaultParameters} instance during test setup. - */ - private void dummyMethod() { - } - - private static class CustomRowMapper implements RowMapper { - @Override - public Object mapRow(ResultSet rs, int rowNum) { - return null; - } - } -} diff --git a/src/test/java/org/springframework/data/jdbc/testing/MySqlDataSourceConfiguration.java b/src/test/java/org/springframework/data/jdbc/testing/MySqlDataSourceConfiguration.java deleted file mode 100644 index f82203fbe3..0000000000 --- a/src/test/java/org/springframework/data/jdbc/testing/MySqlDataSourceConfiguration.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.testing; - -import java.sql.SQLException; - -import javax.annotation.PostConstruct; -import javax.script.ScriptException; -import javax.sql.DataSource; - -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; -import org.testcontainers.containers.MySQLContainer; -import org.testcontainers.jdbc.ext.ScriptUtils; - -import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; - -/** - * {@link DataSource} setup for MySQL. - * - * Starts a docker container with a MySql database and sets up a database name "test" in it. - * - * @author Jens Schauder - * @author Oliver Gierke - * @author Sedat Gokcen - */ -@Configuration -@Profile("mysql") -class MySqlDataSourceConfiguration extends DataSourceConfiguration { - - private static final MySQLContainer MYSQL_CONTAINER = new MySQLContainer().withConfigurationOverride(""); - - static { - MYSQL_CONTAINER.start(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.jdbc.testing.DataSourceConfiguration#createDataSource() - */ - @Override - protected DataSource createDataSource() { - - MysqlDataSource dataSource = new MysqlDataSource(); - dataSource.setUrl(MYSQL_CONTAINER.getJdbcUrl()); - dataSource.setUser(MYSQL_CONTAINER.getUsername()); - dataSource.setPassword(MYSQL_CONTAINER.getPassword()); - dataSource.setDatabaseName(MYSQL_CONTAINER.getDatabaseName()); - - return dataSource; - } - - @PostConstruct - public void initDatabase() throws SQLException, ScriptException { - ScriptUtils.executeSqlScript(createDataSource().getConnection(), null, "DROP DATABASE test;CREATE DATABASE test;"); - } -} diff --git a/src/test/java/org/springframework/data/jdbc/testing/TestConfiguration.java b/src/test/java/org/springframework/data/jdbc/testing/TestConfiguration.java deleted file mode 100644 index 50889396da..0000000000 --- a/src/test/java/org/springframework/data/jdbc/testing/TestConfiguration.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.jdbc.testing; - -import java.util.Optional; - -import javax.sql.DataSource; - -import org.apache.ibatis.session.SqlSessionFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.jdbc.core.DataAccessStrategy; -import org.springframework.data.jdbc.core.DefaultDataAccessStrategy; -import org.springframework.data.jdbc.core.DelegatingDataAccessStrategy; -import org.springframework.data.jdbc.core.SqlGeneratorSource; -import org.springframework.data.jdbc.mapping.model.ConversionCustomizer; -import org.springframework.data.jdbc.mapping.model.DefaultNamingStrategy; -import org.springframework.data.jdbc.mapping.model.JdbcMappingContext; -import org.springframework.data.jdbc.mapping.model.NamingStrategy; -import org.springframework.data.jdbc.repository.support.JdbcRepositoryFactory; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import org.springframework.transaction.PlatformTransactionManager; - -/** - * Infrastructure configuration for integration tests. - * - * @author Oliver Gierke - * @author Jens Schauder - */ -@Configuration -@ComponentScan // To pick up configuration classes (per activated profile) -public class TestConfiguration { - - @Autowired DataSource dataSource; - @Autowired ApplicationEventPublisher publisher; - @Autowired(required = false) SqlSessionFactory sqlSessionFactory; - - @Bean - JdbcRepositoryFactory jdbcRepositoryFactory() { - - NamedParameterJdbcTemplate jdbcTemplate = namedParameterJdbcTemplate(); - - final JdbcMappingContext context = new JdbcMappingContext(new DefaultNamingStrategy(), jdbcTemplate, __ -> {}); - - return new JdbcRepositoryFactory( // - publisher, // - context, // - new DefaultDataAccessStrategy( // - new SqlGeneratorSource(context), // - jdbcTemplate, // - context) // - ); - } - - @Bean - NamedParameterJdbcTemplate namedParameterJdbcTemplate() { - return new NamedParameterJdbcTemplate(dataSource); - } - - @Bean - PlatformTransactionManager transactionManager() { - return new DataSourceTransactionManager(dataSource); - } - - @Bean - DataAccessStrategy defaultDataAccessStrategy(JdbcMappingContext context, - @Qualifier("namedParameterJdbcTemplate") NamedParameterJdbcOperations operations) { - - DelegatingDataAccessStrategy accessStrategy = new DelegatingDataAccessStrategy(); - - accessStrategy.setDelegate(new DefaultDataAccessStrategy( // - new SqlGeneratorSource(context), // - operations, // - context, // - accessStrategy) // - ); - - return accessStrategy; - } - - @Bean - JdbcMappingContext jdbcMappingContext(NamedParameterJdbcOperations template, Optional namingStrategy, - Optional conversionCustomizer) { - - return new JdbcMappingContext( // - namingStrategy.orElse(new DefaultNamingStrategy()), // - template, // - conversionCustomizer.orElse(conversionService -> {}) // - ); - } -} diff --git a/src/test/resources/logback.xml b/src/test/resources/logback.xml deleted file mode 100644 index e5cc8699c9..0000000000 --- a/src/test/resources/logback.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-hsql.sql deleted file mode 100644 index a20900fd99..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-hsql.sql +++ /dev/null @@ -1,5 +0,0 @@ -CREATE TABLE LEGOSET ( id BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(30)); -CREATE TABLE MANUAL ( id BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, LEGOSET BIGINT, CONTENT VARCHAR(2000)); - -ALTER TABLE MANUAL ADD FOREIGN KEY (LEGOSET) -REFERENCES LEGOSET(id); diff --git a/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-mysql.sql deleted file mode 100644 index a3a849054e..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-mysql.sql +++ /dev/null @@ -1,5 +0,0 @@ -CREATE TABLE LEGOSET ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(30)); -CREATE TABLE MANUAL ( id BIGINT AUTO_INCREMENT PRIMARY KEY, LEGOSET BIGINT, CONTENT VARCHAR(2000)); - -ALTER TABLE MANUAL ADD FOREIGN KEY (LEGOSET) -REFERENCES LEGOSET(id); diff --git a/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-postgres.sql deleted file mode 100644 index e13b292566..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.core/JdbcEntityTemplateIntegrationTests-postgres.sql +++ /dev/null @@ -1,8 +0,0 @@ -DROP TABLE MANUAL; -DROP TABLE LEGOSET; - -CREATE TABLE LEGOSET ( id SERIAL PRIMARY KEY, NAME VARCHAR(30)); -CREATE TABLE MANUAL ( id SERIAL PRIMARY KEY, LEGOSET BIGINT, CONTENT VARCHAR(2000)); - -ALTER TABLE MANUAL ADD FOREIGN KEY (LEGOSET) -REFERENCES LEGOSET(id); diff --git a/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-hsql.sql deleted file mode 100644 index ee4d4de3a2..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-hsql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE DummyEntity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY) \ No newline at end of file diff --git a/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mysql.sql deleted file mode 100644 index 808c99e6e5..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-mysql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE DummyEntity ( id BIGINT AUTO_INCREMENT PRIMARY KEY) \ No newline at end of file diff --git a/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-postgres.sql deleted file mode 100644 index 0a855395a1..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository.config/EnableJdbcRepositoriesIntegrationTests-postgres.sql +++ /dev/null @@ -1,2 +0,0 @@ -DROP TABLE DummyEntity -CREATE TABLE DummyEntity ( id SERIAL PRIMARY KEY) \ No newline at end of file diff --git a/src/test/resources/org.springframework.data.jdbc.repository.query/QueryAnnotationHsqlIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository.query/QueryAnnotationHsqlIntegrationTests-hsql.sql deleted file mode 100644 index 23bf117b32..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository.query/QueryAnnotationHsqlIntegrationTests-hsql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)) diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-hsql.sql deleted file mode 100644 index 1618e2569b..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-hsql.sql +++ /dev/null @@ -1,4 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE TABLE ReadOnlyIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)) -CREATE TABLE PrimitiveIdEntity (ID BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 1) PRIMARY KEY, NAME VARCHAR(100)) diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-postgres.sql deleted file mode 100644 index 267f0b7aba..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIdGenerationIntegrationTests-postgres.sql +++ /dev/null @@ -1,5 +0,0 @@ -DROP TABLE ReadOnlyIdEntity; -DROP TABLE PrimitiveIdEntity; - -CREATE TABLE ReadOnlyIdEntity (ID SERIAL PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE PrimitiveIdEntity (ID SERIAL PRIMARY KEY, NAME VARCHAR(100)); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-hsql.sql deleted file mode 100644 index d6458976a7..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-hsql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE dummyentity ( idProp BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)) diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mysql.sql deleted file mode 100644 index 89763ecdbf..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-mysql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE dummyentity (idProp BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-postgres.sql deleted file mode 100644 index 25e9c426ae..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryIntegrationTests-postgres.sql +++ /dev/null @@ -1,2 +0,0 @@ -DROP TABLE dummyentity; -CREATE TABLE dummyentity (idProp SERIAL PRIMARY KEY, NAME VARCHAR(100)); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-hsql.sql deleted file mode 100644 index 27f8287ca4..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-hsql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100), DELETED CHAR(1), log BIGINT); -CREATE TABLE log ( id BIGINT, TEXT VARCHAR(100)); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-mysql.sql deleted file mode 100644 index 9b52d44dab..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-mysql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100), DELETED CHAR(1), log BIGINT); -CREATE TABLE log ( id BIGINT, TEXT VARCHAR(100)); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-postgres.sql deleted file mode 100644 index 703cd57fb1..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryManipulateDbActionsIntegrationTests-postgres.sql +++ /dev/null @@ -1,4 +0,0 @@ -DROP TABLE dummyentity; -DROP TABLE log; -CREATE TABLE dummyentity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100), DELETED CHAR(5), log BIGINT); -CREATE TABLE log ( id BIGINT, TEXT VARCHAR(100)); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-hsql.sql deleted file mode 100644 index e60eb17bea..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-hsql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE ENTITYWITHCOLUMNSREQUIRINGCONVERSIONS ( idTimestamp DATETIME PRIMARY KEY, bool boolean, SOMEENUM VARCHAR(100), bigDecimal DECIMAL(1025), bigInteger DECIMAL(20), date DATETIME, localDateTime DATETIME, zonedDateTime VARCHAR(30)) diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mysql.sql deleted file mode 100644 index 91f0b575da..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-mysql.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE ENTITYWITHCOLUMNSREQUIRINGCONVERSIONS ( idTimestamp DATETIME PRIMARY KEY, bool boolean, SOMEENUM VARCHAR(100), bigDecimal DECIMAL(65), bigInteger DECIMAL(20), date DATETIME, localDateTime DATETIME, zonedDateTime VARCHAR(30)) diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-postgres.sql deleted file mode 100644 index 0384fb5521..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryPropertyConversionIntegrationTests-postgres.sql +++ /dev/null @@ -1,2 +0,0 @@ -DROP TABLE ENTITYWITHCOLUMNSREQUIRINGCONVERSIONS; -CREATE TABLE ENTITYWITHCOLUMNSREQUIRINGCONVERSIONS ( idTimestamp TIMESTAMP PRIMARY KEY, bool boolean, SOMEENUM VARCHAR(100), bigDecimal DECIMAL(65), bigInteger BIGINT, date TIMESTAMP, localDateTime TIMESTAMP, zonedDateTime VARCHAR(30)) diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-hsql.sql deleted file mode 100644 index 814e583a23..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-hsql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mysql.sql deleted file mode 100644 index 0e0a7e5626..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-mysql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-postgres.sql deleted file mode 100644 index cfab77d5b0..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithCollectionsIntegrationTests-postgres.sql +++ /dev/null @@ -1,4 +0,0 @@ -DROP TABLE element; -DROP TABLE dummyentity; -CREATE TABLE dummyentity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id SERIAL PRIMARY KEY, content VARCHAR(100), dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-hsql.sql deleted file mode 100644 index 8ef56e10ff..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-hsql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), DummyEntity_key BIGINT, dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mysql.sql deleted file mode 100644 index 7dad33bd2d..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-mysql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), DummyEntity_key BIGINT,dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-postgres.sql deleted file mode 100644 index 80fd52e371..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-postgres.sql +++ /dev/null @@ -1,4 +0,0 @@ -DROP TABLE element; -DROP TABLE dummyentity; -CREATE TABLE dummyentity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id SERIAL PRIMARY KEY, content VARCHAR(100),dummyentity_key BIGINT, dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-hsql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-hsql.sql deleted file mode 100644 index 9e813b3003..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-hsql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT GENERATED BY DEFAULT AS IDENTITY ( START WITH 1 ) PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1) PRIMARY KEY, content VARCHAR(100), DummyEntity_key VARCHAR(100), dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mysql.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mysql.sql deleted file mode 100644 index f30df0af7f..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-mysql.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE TABLE dummyentity ( id BIGINT AUTO_INCREMENT PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id BIGINT AUTO_INCREMENT PRIMARY KEY, content VARCHAR(100), DummyEntity_key VARCHAR(100),dummyentity BIGINT); diff --git a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-postgres.sql b/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-postgres.sql deleted file mode 100644 index 25562c6550..0000000000 --- a/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithMapsIntegrationTests-postgres.sql +++ /dev/null @@ -1,4 +0,0 @@ -DROP TABLE element; -DROP TABLE dummyentity; -CREATE TABLE dummyentity ( id SERIAL PRIMARY KEY, NAME VARCHAR(100)); -CREATE TABLE element (id SERIAL PRIMARY KEY, content VARCHAR(100),dummyentity_key VARCHAR(100), dummyentity BIGINT);