diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..c1d8714607 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,17 @@ + + +- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc). +- [ ] **There is a ticket in the bug tracker for the project in our [issue tracker](https://github.com/spring-projects/spring-data-elasticsearch/issues)**. Add the issue number to the _Closes #issue-number_ line below +- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes. +- [ ] You submit test cases (unit or integration tests) that back your changes. +- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only). + +Closes #issue-number diff --git a/.gitignore b/.gitignore index 20068080ec..449f58ea44 100644 --- a/.gitignore +++ b/.gitignore @@ -1,21 +1,36 @@ -atlassian-ide-plugin.xml - -## Ignore svn files -.svn - -## ignore any target dir -target - -##ignore only top level data dir - local node data files for unit tests -/data - -## Ignore project files created by Eclipse -.settings -.project -.classpath - -## Ignore project files created by IntelliJ IDEA -*.iml -*.ipr -*.iws -.idea +.DS_Store +*.graphml +.springBeans + +atlassian-ide-plugin.xml + +## Ignore svn files +.svn + +## ignore any target dir +target + +## Ignore project files created by Eclipse +.settings +.project +.classpath + +## Ignore project files created by IntelliJ IDEA +*.iml +*.ipr +*.iws +.idea +/.env + + +/zap.env +/localdocker.env +.localdocker-env + +build/ +node_modules +node +package-lock.json + +.mvn/.develocity +/src/test/resources/testcontainers-local.properties diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 0000000000..e0857eaa25 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,8 @@ + + + + io.spring.develocity.conventions + develocity-conventions-maven-extension + 0.0.22 + + diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..e27f6e8f5e --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,14 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED +--add-opens=java.base/java.util=ALL-UNNAMED +--add-opens=java.base/java.lang.reflect=ALL-UNNAMED +--add-opens=java.base/java.text=ALL-UNNAMED +--add-opens=java.desktop/java.awt.font=ALL-UNNAMED diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java new file mode 100644 index 0000000000..64a46202ac --- /dev/null +++ b/.mvn/wrapper/MavenWrapperDownloader.java @@ -0,0 +1,115 @@ + +/* + * Copyright 2007-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import java.net.*; +import java.io.*; +import java.nio.channels.*; +import java.util.Properties; + +public class MavenWrapperDownloader { + + private static final String WRAPPER_VERSION = "0.5.6"; + /** + * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. + */ + private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" + + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; + + /** + * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to use instead of the default + * one. + */ + private static final String MAVEN_WRAPPER_PROPERTIES_PATH = ".mvn/wrapper/maven-wrapper.properties"; + + /** + * Path where the maven-wrapper.jar will be saved to. + */ + private static final String MAVEN_WRAPPER_JAR_PATH = ".mvn/wrapper/maven-wrapper.jar"; + + /** + * Name of the property which should be used to override the default download url for the wrapper. + */ + private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; + + public static void main(String args[]) { + System.out.println("- Downloader started"); + File baseDirectory = new File(args[0]); + System.out.println("- Using transport directory: " + baseDirectory.getAbsolutePath()); + + // If the maven-wrapper.properties exists, read it and check if it contains a custom + // wrapperUrl parameter. + File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); + String url = DEFAULT_DOWNLOAD_URL; + if (mavenWrapperPropertyFile.exists()) { + FileInputStream mavenWrapperPropertyFileInputStream = null; + try { + mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); + Properties mavenWrapperProperties = new Properties(); + mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); + url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); + } catch (IOException e) { + System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); + } finally { + try { + if (mavenWrapperPropertyFileInputStream != null) { + mavenWrapperPropertyFileInputStream.close(); + } + } catch (IOException e) { + // Ignore ... + } + } + } + System.out.println("- Downloading from: " + url); + + File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); + if (!outputFile.getParentFile().exists()) { + if (!outputFile.getParentFile().mkdirs()) { + System.out.println("- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); + } + } + System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); + try { + downloadFileFromURL(url, outputFile); + System.out.println("Done"); + System.exit(0); + } catch (Throwable e) { + System.out.println("- Error downloading"); + e.printStackTrace(); + System.exit(1); + } + } + + private static void downloadFileFromURL(String urlString, File destination) throws Exception { + if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { + String username = System.getenv("MVNW_USERNAME"); + char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(username, password); + } + }); + } + URL website = new URL(urlString); + ReadableByteChannel rbc; + rbc = Channels.newChannel(website.openStream()); + FileOutputStream fos = new FileOutputStream(destination); + fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); + fos.close(); + rbc.close(); + } + +} diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100644 index 0000000000..2cc7d4a55c Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000000..e075a74d86 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,3 @@ +#Thu Nov 07 09:47:28 CET 2024 +wrapperUrl=https\://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/CI.adoc b/CI.adoc new file mode 100644 index 0000000000..56af9d15ee --- /dev/null +++ b/CI.adoc @@ -0,0 +1,43 @@ += Continuous Integration + +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2Fmain&subject=2020.0.0%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2F4.0.x&subject=Neumann%20(4.0.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2F3.2.x&subject=Moore%20(3.2.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/] + +== Running CI tasks locally + +Since this pipeline is purely Docker-based, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your `verify.sh` script before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what the CI server does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-elasticsearch-github adoptopenjdk/openjdk8:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-elasticsearch-github`. ++ +2. `cd spring-data-elasticsearch-github` ++ +Next, run your tests from inside the container: ++ +3. `./mvnw clean dependency:list test -Dsort` (or whatever profile you need to test out) + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to package things up, do this: + +1. `docker run -it -v /var/run/docker.sock:/var/run/docker.sock --mount type=bind,source="$(pwd)",target=/spring-data-elasticsearch-github adoptopenjdk/openjdk8:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-elasticsearch-github`. ++ +2. `cd spring-data-elasticsearch-github` ++ +Next, try to package everything up from inside the container: ++ +3. `./mvnw -Pci,snapshot -Dmaven.test.skip=true clean package` + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc new file mode 100644 index 0000000000..1cff01d255 --- /dev/null +++ b/CONTRIBUTING.adoc @@ -0,0 +1,10 @@ += Spring Data contribution guidelines + +You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. +**Please read these carefully!** + +Do not submit a Pull Request before having created an issue and having discussed it. This prevents you from doing work that might be rejected. + +== Running the test locally + +In order to run the tests locally with `./mvnw test` you need to have docker running because Spring Data Elasticsearch uses https://www.testcontainers.org/[Testcontainers] to start a local running Elasticsearch instance. diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..1d2500ed1e --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,132 @@ +def p = [:] +node { + checkout scm + p = readProperties interpolate: true, file: 'ci/pipeline.properties' +} + +pipeline { + agent none + + triggers { + pollSCM 'H/10 * * * *' + upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS) + } + + options { + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '14')) + } + + stages { + stage("test: baseline (main)") { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh "PROFILE=none JENKINS_USER_NAME=${p['jenkins.user.name']} ci/verify.sh" + sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh" + } + } + } + } + } + + stage("Test other configurations") { + when { + beforeAgent(true) + allOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + parallel { + stage("test: baseline (next)") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.next.image']).inside(p['docker.java.inside.docker']) { + sh "PROFILE=none JENKINS_USER_NAME=${p['jenkins.user.name']} ci/verify.sh" + sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh" + } + } + } + } + } + } + } + + stage('Release to artifactory') { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 20, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Pci,artifactory " + + "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " + + "-Dartifactory.server=${p['artifactory.url']} " + + "-Dartifactory.username=${ARTIFACTORY_USR} " + + "-Dartifactory.password=${ARTIFACTORY_PSW} " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + + "-Dartifactory.build-name=spring-data-elasticsearch " + + "-Dartifactory.build-number=spring-data-elasticsearch-${BRANCH_NAME}-build-${BUILD_NUMBER} " + + "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch " + + "-Dmaven.test.skip=true clean deploy -U -B" + } + } + } + } + } + } + + post { + changed { + script { + emailext( + subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", + mimeType: 'text/html', + recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']], + body: "${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}") + } + } + } +} diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..ff77379631 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000000..0242089d82 --- /dev/null +++ b/README.adoc @@ -0,0 +1,179 @@ += Spring Data for Elasticsearch image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring%20Data%20Elasticsearch"] + +The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. + +The Spring Data Elasticsearch project provides integration with the https://www.elastic.co/[Elasticsearch] search engine. +Key functional areas of Spring Data Elasticsearch are a POJO centric model for interacting with Elasticsearch Documents and easily writing a Repository style data access layer. + +This project is lead and maintained by the community. + +== Features + +* Spring configuration support using Java based `@Configuration` classes or an XML namespace for an ES client instances. +* `ElasticsearchOperations` class and implementations that increases productivity performing common ES operations. +Includes integrated object mapping between documents and POJOs. +* Feature Rich Object Mapping integrated with Spring’s Conversion Service +* Annotation based mapping metadata +* Automatic implementation of `Repository` interfaces including support for custom search methods. +* CDI support for repositories + +== Code of Conduct + +This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. +By participating, you are expected to uphold this code of conduct. +Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. + +== Getting Started + +Here is a quick teaser of an application using Spring Data Repositories in Java: + +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + List findByLastname(String lastname); + + List findByFirstnameLike(String firstname); +} + +@Service +public class MyService { + + private final PersonRepository repository; + + public MyService(PersonRepository repository) { + this.repository = repository; + } + + public void doWork() { + + repository.deleteAll(); + + Person person = new Person(); + person.setFirstname("Oliver"); + person.setLastname("Gierke"); + repository.save(person); + + List lastNameResults = repository.findByLastname("Gierke"); + List firstNameResults = repository.findByFirstnameLike("Oli"); + } +} +---- + +=== Using the RestClient + +Please check the https://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/#elasticsearch.clients.configuration[official documentation]. + +=== Maven configuration + +Add the Maven dependency: + +[source,xml] +---- + + org.springframework.data + spring-data-elasticsearch + ${version} + +---- + +**Compatibility Matrix** + +The compatibility between Spring Data Elasticsearch, Elasticsearch client drivers and Spring Boot versions can be found in the https://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/#preface.versions[reference documentation]. + +To use the Release candidate versions of the upcoming major version, use our Maven milestone repository and declare the appropriate dependency version: + +[source,xml] +---- + + org.springframework.data + spring-data-elasticsearch + ${version}.RCx + + + + spring-snapshot + Spring Snapshot Repository + https://repo.spring.io/milestone + +---- + +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version: + +[source,xml] +---- + + org.springframework.data + spring-data-elasticsearch + ${version}-SNAPSHOT + + + + spring-snapshot + Spring Snapshot Repository + https://repo.spring.io/snapshot + +---- + +== Getting Help + +Having trouble with Spring Data? +We’d love to help! + +* Check the +https://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/[reference documentation], and https://docs.spring.io/spring-data/elasticsearch/docs/current/api/[Javadocs]. +* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. +If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. +* Ask a question or chat with the community on https://app.gitter.im/#/room/#spring-projects_spring-data:gitter.im[Gitter]. +* Report bugs with Spring Data for Elasticsearch at https://github.com/spring-projects/spring-data-elasticsearch/issues[https://github.com/spring-projects/spring-data-elasticsearch/issues]. + +== Reporting Issues + +Spring Data uses GitHub as issue tracking system to record bugs and feature requests. +If you want to raise an issue, please follow the recommendations below: + +* Before you log a bug, please search the +https://github.com/spring-projects/spring-data-elasticsearch/issues[issue tracker] to see if someone has already reported the problem. +* If the issue doesn't already exist, https://github.com/spring-projects/spring-data-elasticsearch/issues/new[create a new issue]. +* Please provide as much information as possible with the issue report, we like to know the version of Spring Data Elasticsearch that you are using and JVM version. +* If you need to paste code, or include a stack trace use Markdown +++```+++ escapes before and after your text. +* If possible try to create a test-case or project that replicates the issue. +Attach a link to your code or a compressed file containing your code. + +== Building from Source + +You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper]. + +You need JDK 17 or above to build the _main_ branch. +For the branches up to and including release 4.4, JDK 8 is required. + +[source,bash] +---- + $ ./mvnw clean install +---- + +If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above]. + +_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before submitting your first pull request._ + +IMPORTANT: When contributing, please make sure an issue exists in https://github.com/spring-projects/spring-data-elasticsearch/issues[issue tracker] and comment on this issue with how you want to address it. +By this we not only know that someone is working on an issue, we can also align architectural questions and possible solutions before work is invested . We so can prevent that much work is put into Pull Requests that have little or no chances of being merged. + +=== Building reference documentation + +Building the documentation builds also the project without running tests. + +[source,bash] +---- + $ ./mvnw clean install -Pantora +---- + +The generated documentation is available from `target/site/index.html`. + +== Examples + +For examples on using the Spring Data for Elasticsearch, see the https://github.com/spring-projects/spring-data-examples/tree/main/elasticsearch/example[spring-data-examples] project. + +== License + +Spring Data for Elasticsearch Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license]. diff --git a/README.md b/README.md deleted file mode 100644 index 6dd0f7b7bf..0000000000 --- a/README.md +++ /dev/null @@ -1,247 +0,0 @@ -Spring Data Elasticsearch -========================= - -Spring Data implementation for ElasticSearch - -Spring Data makes it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services as well as provide improved support for relational database technologies. - -The Spring Data Elasticsearch project provides integration with the [elasticsearch](http://www.elasticsearch.org/) search engine. - -Guide ------------- - -* [Reference Documentation](http://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/) -* [PDF Documentation](http://docs.spring.io/spring-data/elasticsearch/docs/current/reference/pdf/spring-data-elasticsearch-reference.pdf) -* [API Documentation](http://docs.spring.io/spring-data/elasticsearch/docs/current/api/) -* [Spring Data Project](http://projects.spring.io/spring-data) -* [Sample Test Application](https://github.com/BioMedCentralLtd/spring-data-elasticsearch-sample-application) -* [Spring Data Elasticsearch Google Group](https://groups.google.com/d/forum/spring-data-elasticsearch-devs) -* For more detailed questions, use the [forum](http://forum.springsource.org/forumdisplay.php?f=80). - - -Quick Start ------------ -Wiki page for [Getting Started](https://github.com/spring-projects/spring-data-elasticsearch/wiki/How-to-start-with-spring-data-elasticsearch) - -### Maven configuration - -Add the Maven dependency: - -```xml - - org.springframework.data - spring-data-elasticsearch - 1.2.0.RELEASE - -``` - -If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare -the appropriate dependency version. - -```xml - - org.springframework.data - spring-data-elasticsearch - 1.3.0.BUILD-SNAPSHOT - - - - spring-libs-snapshot - Spring Snapshot Repository - http://repo.spring.io/libs-snapshot - -``` - -###Note: -Spring data elaticsearch until 1.0.0.RELEASE version is on elasticsearch 1.1.x library, which uses java 1.6 or later version. - -From spring data elastic search 1.1.0.RELEASE (current) on, the version of elasticsearch 1.3.x is supporting java 1.7 or later. - -### ElasticsearchRepository -A default implementation of ElasticsearchRepository, aligning to the generic Repository Interfaces, is provided. Spring can do the Repository implementation for you depending on method names in the interface definition. - -The ElasticsearchCrudRepository extends PagingAndSortingRepository - -```java - public interface ElasticsearchCrudRepository extends ElasticsearchRepository, PagingAndSortingRepository { - } -``` - -Extending ElasticsearchRepository for custom methods - -```java - public interface BookRepository extends Repository { - - List findByNameAndPrice(String name, Integer price); - - List findByNameOrPrice(String name, Integer price); - - Page findByName(String name,Pageable page); - - Page findByNameNot(String name,Pageable page); - - Page findByPriceBetween(int price,Pageable page); - - Page findByNameLike(String name,Pageable page); - - @Query("{\"bool\" : {\"must\" : {\"term\" : {\"message\" : \"?0\"}}}}") - Page findByMessage(String message, Pageable pageable); - } -``` -Indexing a single document with Repository - -```java - @Autowired - private SampleElasticsearchRepository repository; - - String documentId = "123456"; - SampleEntity sampleEntity = new SampleEntity(); - sampleEntity.setId(documentId); - sampleEntity.setMessage("some message"); - - repository.save(sampleEntity); -``` - -Indexing multiple Document(bulk index) using Repository - -```java - @Autowired - private SampleElasticsearchRepository repository; - - String documentId = "123456"; - SampleEntity sampleEntity1 = new SampleEntity(); - sampleEntity1.setId(documentId); - sampleEntity1.setMessage("some message"); - - String documentId2 = "123457" - SampleEntity sampleEntity2 = new SampleEntity(); - sampleEntity2.setId(documentId2); - sampleEntity2.setMessage("test message"); - - List sampleEntities = Arrays.asList(sampleEntity1, sampleEntity2); - - //bulk index - repository.save(sampleEntities); -``` - - -### ElasticsearchTemplate - -ElasticsearchTemplate is the central support class for elasticsearch operations. - -Indexing a single document using Elasticsearch Template - -```java - String documentId = "123456"; - SampleEntity sampleEntity = new SampleEntity(); - sampleEntity.setId(documentId); - sampleEntity.setMessage("some message"); - IndexQuery indexQuery = new IndexQueryBuilder().withId(sampleEntity.getId()).withObject(sampleEntity).build(); - elasticsearchTemplate.index(indexQuery); -``` - -Indexing multiple Document(bulk index) using Elasticsearch Template - -```java - @Autowired - private ElasticsearchTemplate elasticsearchTemplate; - - List indexQueries = new ArrayList(); - //first document - String documentId = "123456"; - SampleEntity sampleEntity1 = new SampleEntity(); - sampleEntity1.setId(documentId); - sampleEntity1.setMessage("some message"); - - IndexQuery indexQuery1 = new IndexQueryBuilder().withId(sampleEntity1.getId()).withObject(sampleEntity1).build(); - indexQueries.add(indexQuery1); - - //second document - String documentId2 = "123457"; - SampleEntity sampleEntity2 = new SampleEntity(); - sampleEntity2.setId(documentId2); - sampleEntity2.setMessage("some message"); - - IndexQuery indexQuery2 = new IndexQueryBuilder().withId(sampleEntity2.getId()).withObject(sampleEntity2).build() - indexQueries.add(indexQuery2); - - //bulk index - elasticsearchTemplate.bulkIndex(indexQueries); -``` - -Searching entities using Elasticsearch Template - -```java - @Autowired - private ElasticsearchTemplate elasticsearchTemplate; - - SearchQuery searchQuery = new NativeSearchQueryBuilder() - .withQuery(queryString(documentId).field("id")) - .build(); - Page sampleEntities = elasticsearchTemplate.queryForPage(searchQuery,SampleEntity.class); -``` - -### XML Namespace - -You can set up repository scanning via xml configuration, which will happily create your repositories. - -Using Node Client - -```xml - - - - - - - - - - -``` - -Using Transport Client - -```xml - - - - - - - - - - - - -``` - -## Help Pages - -* [Geo distance and location search](https://github.com/spring-projects/spring-data-elasticsearch/wiki/Geo-indexing-and-request) -* [Custom object mapper](https://github.com/spring-projects/spring-data-elasticsearch/wiki/Custom-ObjectMapper) - -## Contributing to Spring Data - -Here are some ways for you to get involved in the community: - -* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.springsource.org/forumdisplay.php?f=80) by responding to questions and joining the debate. -* Create [JIRA](https://jira.springframework.org/browse/DATAES) tickets for bugs and new features and comment and vote on the ones that you are interested in. -* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing. -* Watch for upcoming articles on Spring by [subscribing](http://www.springsource.org/node/feed) to springframework.org - -Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests. - - -Code formatting for [Eclipse and Intellij](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) - -[More information about contributing to Spring Data](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md) diff --git a/SECURITY.adoc b/SECURITY.adoc new file mode 100644 index 0000000000..2694f228b5 --- /dev/null +++ b/SECURITY.adoc @@ -0,0 +1,9 @@ +# Security Policy + +## Supported Versions + +Please see the https://spring.io/projects/spring-data-elasticsearch[Spring Data Elasticsearch] project page for supported versions. + +## Reporting a Vulnerability + +Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly. diff --git a/TESTING.adoc b/TESTING.adoc new file mode 100644 index 0000000000..f30c7efe34 --- /dev/null +++ b/TESTING.adoc @@ -0,0 +1,20 @@ += Testing + +== Unit tests + +Unit tests in the project are run with + +---- +./mvnw test +---- + +== Integration tests + +Integration tests are executed when +---- +./mvnw verify +---- +is run. There must be _docker_ running, as the integration tests use docker to start an Elasticsearch server. + +Integration tests are tests that have the Junit5 Tag `@Tag("integration-test")` on the test class. Normally this should not be set explicitly, but the annotation `@SpringIntegrationTest` should be used. This not only marks the test as integration test, but integrates an automatic setup of an Elasticsearch Testcontainer and integrate this with Spring, so +that the required Beans can be automatically injected. Check _src/test/java/org/springframework/data/elasticsearch/JUnit5SampleRestClientBasedTests.java_ as a reference setup diff --git a/ci/clean.sh b/ci/clean.sh new file mode 100755 index 0000000000..ca174330ee --- /dev/null +++ b/ci/clean.sh @@ -0,0 +1,8 @@ +#!/bin/bash -x + +set -euo pipefail + +export JENKINS_USER=${JENKINS_USER_NAME} + +MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \ + ./mvnw -s settings.xml clean -Dscan=false -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root diff --git a/ci/pipeline.properties b/ci/pipeline.properties new file mode 100644 index 0000000000..cde4a8e881 --- /dev/null +++ b/ci/pipeline.properties @@ -0,0 +1,31 @@ +# Java versions +java.main.tag=24.0.1_9-jdk-noble +java.next.tag=24.0.1_9-jdk-noble + +# Docker container images - standard +docker.java.main.image=library/eclipse-temurin:${java.main.tag} +docker.java.next.image=library/eclipse-temurin:${java.next.tag} + +# Supported versions of MongoDB +docker.mongodb.6.0.version=6.0.23 +docker.mongodb.7.0.version=7.0.20 +docker.mongodb.8.0.version=8.0.9 + +# Supported versions of Redis +docker.redis.6.version=6.2.13 +docker.redis.7.version=7.2.4 + +# Docker environment settings +docker.java.inside.basic=-v $HOME:/tmp/jenkins-home +docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home + +# Credentials +docker.registry= +docker.credentials=hub.docker.com-springbuildmaster +docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com +docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token +artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local +develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/verify.sh b/ci/verify.sh new file mode 100755 index 0000000000..46afc80280 --- /dev/null +++ b/ci/verify.sh @@ -0,0 +1,10 @@ +#!/bin/bash -x + +set -euo pipefail + +mkdir -p /tmp/jenkins-home/.m2/spring-data-elasticsearch +export JENKINS_USER=${JENKINS_USER_NAME} + +MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \ + ./mvnw -s settings.xml \ + -P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root diff --git a/mvnw b/mvnw new file mode 100755 index 0000000000..9091adf188 --- /dev/null +++ b/mvnw @@ -0,0 +1,310 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project transport directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + if [ -n "$MVNW_REPOURL" ]; then + jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + else + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + fi + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + if $cygwin; then + wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` + fi + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + wget "$jarUrl" -O "$wrapperJarPath" + else + wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" + fi + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + curl -o "$wrapperJarPath" "$jarUrl" -f + else + curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f + fi + + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + # For Cygwin, switch paths to Windows format before running javac + if $cygwin; then + javaClass=`cygpath --path --windows "$javaClass"` + fi + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +# Provide a "standardized" way to retrieve the CLI args that will +# work with both Windows and non-Windows executions. +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" +export MAVEN_CMD_LINE_ARGS + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100644 index 0000000000..86115719e5 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,182 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + +FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + if "%MVNW_VERBOSE%" == "true" ( + echo Found %WRAPPER_JAR% + ) +) else ( + if not "%MVNW_REPOURL%" == "" ( + SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + ) + if "%MVNW_VERBOSE%" == "true" ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + ) + + powershell -Command "&{"^ + "$webclient = new-object System.Net.WebClient;"^ + "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ + "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ + "}"^ + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ + "}" + if "%MVNW_VERBOSE%" == "true" ( + echo Finished downloading %WRAPPER_JAR% + ) +) +@REM End of extension + +@REM Provide a "standardized" way to retrieve the CLI args that will +@REM work with both Windows and non-Windows executions. +set MAVEN_CMD_LINE_ARGS=%* + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/package.json b/package.json new file mode 100644 index 0000000000..4689506b3f --- /dev/null +++ b/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "antora": "3.2.0-alpha.6", + "@antora/atlas-extension": "1.0.0-alpha.2", + "@antora/collector-extension": "1.0.0-alpha.7", + "@asciidoctor/tabs": "1.0.0-beta.6", + "@springio/antora-extensions": "1.13.0", + "@springio/asciidoctor-extensions": "1.0.0-alpha.11" + } +} diff --git a/pom.xml b/pom.xml index 41cd7de8bf..4396ce5a62 100644 --- a/pom.xml +++ b/pom.xml @@ -1,182 +1,488 @@ - - 4.0.0 - - org.springframework.data - spring-data-elasticsearch - 1.3.0.BUILD-SNAPSHOT - - - org.springframework.data.build - spring-data-parent - 1.7.0.BUILD-SNAPSHOT - ../spring-data-build/parent/pom.xml - - - Spring Data Elasticsearch - Spring Data Implementation for Elasticsearch - https://github.com/spring-projects/spring-data-elasticsearch - - - - DATAES - - 3.2.1 - 2.6 - 1.5.2 - 1.11.0.BUILD-SNAPSHOT - - - - - - - - org.springframework - spring-context - - - commons-logging - commons-logging - - - - - - org.springframework - spring-tx - - - - - org.springframework.data - spring-data-commons - ${springdata.commons} - - - - - commons-lang - commons-lang - ${commonslang} - - - commons-collections - commons-collections - ${commonscollections} - - - - - joda-time - joda-time - ${jodatime} - - - - - org.elasticsearch - elasticsearch - ${elasticsearch} - - - - - com.fasterxml.jackson.core - jackson-core - ${jackson} - - - com.fasterxml.jackson.core - jackson-databind - ${jackson} - - - - - javax.enterprise - cdi-api - ${cdi} - provided - true - - - - - org.springframework - spring-test - ${spring} - test - - - org.apache.openwebbeans.test - cditest-owb - ${webbeans} - test - - - javax.servlet - servlet-api - 3.0-alpha-1 - test - - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - - - org.codehaus.mojo - wagon-maven-plugin - - - org.asciidoctor - asciidoctor-maven-plugin - - - - - - - biomedcentral - BioMed Central Development Team - +0 - - - - - - spring-libs-snapshot - https://repo.spring.io/libs-snapshot - - - - - - spring-plugins-release - https://repo.spring.io/plugins-release - - - - - https://github.com/spring-projects/spring-data-elasticsearch - scm:git:git://github.com/spring-projects/spring-data-elasticsearch.git - scm:git:ssh://git@github.com/spring-projects/spring-data-elasticsearch.git - - - - - Bamboo - http://build.springsource.org/browse/SPRINGDATAES - - - - JIRA - https://jira.springsource.org/browse/DATAES - + + + 4.0.0 + + org.springframework.data + spring-data-elasticsearch + 6.0.0-SNAPSHOT + + + org.springframework.data.build + spring-data-parent + 4.0.0-SNAPSHOT + + + Spring Data Elasticsearch + Spring Data Implementation for Elasticsearch + https://github.com/spring-projects/spring-data-elasticsearch + + + 4.0.0-SNAPSHOT + + + 9.0.1 + + 0.19.0 + 2.23.1 + 1.5.3 + 1.20.0 + 3.9.1 + + spring.data.elasticsearch + + + test + integration-test + + + + + biomedcentral + BioMed Central Development Team + +0 + + + cstrobl + Christoph Strobl + cstrobl at pivotal.io + Pivotal + https://www.pivotal.io + + Developer + + +1 + + + mpaluch + Mark Paluch + mpaluch at pivotal.io + Pivotal + https://www.pivotal.io + + Developer + + +1 + + + + + https://github.com/spring-projects/spring-data-elasticsearch + scm:git:git://github.com/spring-projects/spring-data-elasticsearch.git + scm:git:ssh://git@github.com/spring-projects/spring-data-elasticsearch.git + + + + + Bamboo + https://build.spring.io/browse/SPRINGDATAES + + + + GitHub + https://github.com/spring-projects/spring-data-elasticsearch/issues + + + + + + + org.springframework + spring-context + + + + org.springframework + spring-tx + + + + + org.springframework.data + spring-data-commons + ${springdata.commons} + + + + + org.springframework + spring-webflux + true + + + + io.projectreactor + reactor-test + test + + + + co.elastic.clients + elasticsearch-java + ${elasticsearch-java} + + + commons-logging + commons-logging + + + + + + org.elasticsearch.client + elasticsearch-rest-client + ${elasticsearch-java} + + + commons-logging + commons-logging + + + + + + com.querydsl + querydsl-core + ${querydsl} + true + + + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + + + + javax.interceptor + javax.interceptor-api + 1.2.2 + test + + + + jakarta.enterprise + jakarta.enterprise.cdi-api + provided + true + + + + jakarta.annotation + jakarta.annotation-api + ${jakarta-annotation-api} + test + + + + org.apache.openwebbeans + openwebbeans-se + ${webbeans} + test + + + + + org.jetbrains.kotlin + kotlin-stdlib + true + + + + org.jetbrains.kotlin + kotlin-reflect + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-core + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-reactor + true + + + + + org.springframework + spring-test + test + + + ch.qos.logback + logback-classic + + + + + + org.jetbrains.kotlinx + kotlinx-coroutines-test + test + true + + + + org.slf4j + log4j-over-slf4j + ${slf4j} + test + + + org.apache.logging.log4j + log4j-core + ${log4j} + test + + + org.apache.logging.log4j + log4j-to-slf4j + ${log4j} + test + + + + org.skyscreamer + jsonassert + ${jsonassert} + test + + + + org.wiremock + wiremock + ${wiremock} + test + + + + commons-logging + commons-logging + + + org.ow2.asm + asm + + + + + + io.specto + hoverfly-java-junit5 + ${hoverfly} + test + + + + + org.apache.xbean + xbean-asm5-shaded + 4.5 + test + + + + javax.servlet + javax.servlet-api + 3.1.0 + test + + + + org.mockito + mockito-junit-jupiter + ${mockito} + test + + + + org.testcontainers + elasticsearch + ${testcontainers} + test + + + + + commons-codec + commons-codec + 1.15 + test + + + + com.tngtech.archunit + archunit-junit5 + ${archunit} + test + + + + + + + + src/main/resources + true + + **/versions.properties + + + + src/main/resources + false + + **/versions.properties + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + true + false + + **/*Tests.java + **/*Test.java + + + false + + + + + + default-test + ${mvn.unit-test.goal} + + test + + + integration-test + + + + + integration-test-elasticsearch + ${mvn.integration-test-elasticsearch.goal} + + test + + + integration-test + + elasticsearch + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.apache.logging.log4j + log4j-core + ${log4j} + + + + + + + + + + ci + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + + + + + + + + + **/* + + .git/**/*,target/**/*,**/target/**/*,.idea/**/*,**/spring.schemas,**/*.svg,mvnw,mvnw.cmd,**/*.policy + + ./ + + + + + + + + antora-process-resources + + + + src/main/antora/resources/antora-resources + true + + + + + + + antora + + + + org.antora + antora-maven-plugin + + + + + + + + + spring-snapshot + https://repo.spring.io/snapshot + + true + + + false + + + + spring-milestone + https://repo.spring.io/milestone + + diff --git a/settings.xml b/settings.xml new file mode 100644 index 0000000000..b3227cc110 --- /dev/null +++ b/settings.xml @@ -0,0 +1,29 @@ + + + + + spring-plugins-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-snapshot + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-milestone + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + + \ No newline at end of file diff --git a/src/main/antora/antora-playbook.yml b/src/main/antora/antora-playbook.yml new file mode 100644 index 0000000000..1a4f73c1e6 --- /dev/null +++ b/src/main/antora/antora-playbook.yml @@ -0,0 +1,40 @@ +# PACKAGES antora@3.2.0-alpha.2 @antora/atlas-extension:1.0.0-alpha.1 @antora/collector-extension@1.0.0-alpha.3 @springio/antora-extensions@1.1.0-alpha.2 @asciidoctor/tabs@1.0.0-alpha.12 @opendevise/antora-release-line-extension@1.0.0-alpha.2 +# +# The purpose of this Antora playbook is to build the docs in the current branch. +antora: + extensions: + - require: '@springio/antora-extensions' + root_component_name: 'data-elasticsearch' +site: + title: Spring Data Elasticsearch + url: https://docs.spring.io/spring-data-elasticsearch/reference/ +content: + sources: + - url: ./../../.. + branches: HEAD + start_path: src/main/antora + worktrees: true + - url: https://github.com/spring-projects/spring-data-commons + # Refname matching: + # https://docs.antora.org/antora/latest/playbook/content-refname-matching/ + branches: [ main, 3.4.x, 3.3.x ] + start_path: src/main/antora +asciidoc: + attributes: + hide-uri-scheme: '@' + tabs-sync-option: '@' + extensions: + - '@asciidoctor/tabs' + - '@springio/asciidoctor-extensions' + - '@springio/asciidoctor-extensions/javadoc-extension' + sourcemap: true +urls: + latest_version_segment: '' +runtime: + log: + failure_level: warn + format: pretty +ui: + bundle: + url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.16/ui-bundle.zip + snapshot: true diff --git a/src/main/antora/antora.yml b/src/main/antora/antora.yml new file mode 100644 index 0000000000..2348fca613 --- /dev/null +++ b/src/main/antora/antora.yml @@ -0,0 +1,17 @@ +name: data-elasticsearch +version: true +title: Spring Data Elasticsearch +nav: + - modules/ROOT/nav.adoc +ext: + collector: + - run: + command: ./mvnw validate process-resources -am -Pantora-process-resources + local: true + scan: + dir: target/classes/ + - run: + command: ./mvnw package -Pdistribute + local: true + scan: + dir: target/antora diff --git a/src/main/antora/modules/ROOT/nav.adoc b/src/main/antora/modules/ROOT/nav.adoc new file mode 100644 index 0000000000..fa1ee8110d --- /dev/null +++ b/src/main/antora/modules/ROOT/nav.adoc @@ -0,0 +1,47 @@ +* xref:index.adoc[Overview] +** xref:commons/upgrade.adoc[] +** xref:migration-guides.adoc[] +*** xref:migration-guides/migration-guide-3.2-4.0.adoc[] +*** xref:migration-guides/migration-guide-4.0-4.1.adoc[] +*** xref:migration-guides/migration-guide-4.1-4.2.adoc[] +*** xref:migration-guides/migration-guide-4.2-4.3.adoc[] +*** xref:migration-guides/migration-guide-4.3-4.4.adoc[] +*** xref:migration-guides/migration-guide-4.4-5.0.adoc[] +*** xref:migration-guides/migration-guide-5.0-5.1.adoc[] +*** xref:migration-guides/migration-guide-5.1-5.2.adoc[] +*** xref:migration-guides/migration-guide-5.2-5.3.adoc[] +*** xref:migration-guides/migration-guide-5.3-5.4.adoc[] +*** xref:migration-guides/migration-guide-5.4-5.5.adoc[] +*** xref:migration-guides/migration-guide-5.5-6.0.adoc[] + + +* xref:elasticsearch.adoc[] +** xref:elasticsearch/clients.adoc[] +** xref:elasticsearch/object-mapping.adoc[] +** xref:elasticsearch/template.adoc[] +** xref:elasticsearch/reactive-template.adoc[] +** xref:elasticsearch/entity-callbacks.adoc[] +** xref:elasticsearch/auditing.adoc[] +** xref:elasticsearch/join-types.adoc[] +** xref:elasticsearch/routing.adoc[] +** xref:elasticsearch/misc.adoc[] +** xref:elasticsearch/scripted-and-runtime-fields.adoc[] + +* xref:repositories.adoc[] +** xref:repositories/core-concepts.adoc[] +** xref:repositories/definition.adoc[] +** xref:elasticsearch/repositories/elasticsearch-repositories.adoc[] +** xref:elasticsearch/repositories/reactive-elasticsearch-repositories.adoc[] +** xref:repositories/create-instances.adoc[] +** xref:repositories/query-methods-details.adoc[] +** xref:elasticsearch/repositories/elasticsearch-repository-queries.adoc[] +** xref:repositories/projections.adoc[] +** xref:repositories/custom-implementations.adoc[] +** xref:repositories/core-domain-events.adoc[] +** xref:repositories/null-handling.adoc[] +** xref:elasticsearch/repositories/cdi-integration.adoc[] +** xref:repositories/query-keywords-reference.adoc[] +** xref:repositories/query-return-types-reference.adoc[] + +* xref:attachment$api/java/index.html[Javadoc,role=link-external,window=_blank] +* https://github.com/spring-projects/spring-data-commons/wiki[Wiki,role=link-external,window=_blank] diff --git a/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc new file mode 100644 index 0000000000..51a9189aa0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$upgrade.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch.adoc new file mode 100644 index 0000000000..fe0bddbf20 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch.adoc @@ -0,0 +1,16 @@ +[[elasticsearch.core]] += Elasticsearch Support +:page-section-summary-toc: 1 + +Spring Data support for Elasticsearch contains a wide range of features: + +* Spring configuration support for various xref:elasticsearch/clients.adoc[Elasticsearch clients]. +* The xref:elasticsearch/template.adoc[`ElasticsearchTemplate` and `ReactiveElasticsearchTemplate`] helper classes that provide object mapping between ES index operations and POJOs. +* xref:elasticsearch/template.adoc#exception-translation[Exception translation] into Spring's portable {springDocsUrl}data-access.html#dao-exceptions[Data Access Exception Hierarchy]. +* Feature rich xref:elasticsearch/object-mapping.adoc[object mapping] integrated with _Spring's_ {springDocsUrl}core.html#core-convert[Conversion Service]. +* xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations[Annotation-based mapping] metadata that is extensible to support other metadata formats. +* Java-based xref:elasticsearch/template.adoc#cassandra.template.query[query, criteria, and update DSLs]. +* Automatic implementation of xref:repositories.adoc[imperative and reactive `Repository` interfaces] including support for xref:repositories/custom-implementations.adoc[custom query methods]. + +For most data-oriented tasks, you can use the `[Reactive]ElasticsearchTemplate` or the `Repository` support, both of which use the rich object-mapping functionality. +Spring Data Elasticsearch uses consistent naming conventions on objects in various APIs to those found in the DataStax Java Driver so that they are familiar and so that you can map your existing knowledge onto the Spring APIs. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/auditing.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/auditing.adoc new file mode 100644 index 0000000000..f9633dec4f --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/auditing.adoc @@ -0,0 +1,85 @@ +[[elasticsearch.auditing]] += Elasticsearch Auditing + +[[elasticsearch.auditing.preparing]] +== Preparing entities + +In order for the auditing code to be able to decide whether an entity instance is new, the entity must implement the `Persistable` interface which is defined as follows: + +[source,java] +---- +package org.springframework.data.domain; + +import org.jspecify.annotations.Nullable; + +public interface Persistable { + @Nullable + ID getId(); + + boolean isNew(); +} +---- + +As the existence of an Id is not a sufficient criterion to determine if an enitity is new in Elasticsearch, additional information is necessary. One way is to use the creation-relevant auditing fields for this decision: + +A `Person` entity might look as follows - omitting getter and setter methods for brevity: + +[source,java] +---- +@Document(indexName = "person") +public class Person implements Persistable { + @Id private Long id; + private String lastName; + private String firstName; + @CreatedDate + @Field(type = FieldType.Date, format = DateFormat.basic_date_time) + private Instant createdDate; + @CreatedBy + private String createdBy + @Field(type = FieldType.Date, format = DateFormat.basic_date_time) + @LastModifiedDate + private Instant lastModifiedDate; + @LastModifiedBy + private String lastModifiedBy; + + public Long getId() { // <.> + return id; + } + + @Override + public boolean isNew() { + return id == null || (createdDate == null && createdBy == null); // <.> + } +} +---- +<.> the getter is the required implementation from the interface +<.> an object is new if it either has no `id` or none of fields containing creation attributes are set. + +[[elasticsearch.auditing.activating]] +== Activating auditing + +After the entities have been set up and providing the `AuditorAware` - or `ReactiveAuditorAware` - the Auditing must be activated by setting the `@EnableElasticsearchAuditing` on a configuration class: + +[source,java] +---- +@Configuration +@EnableElasticsearchRepositories +@EnableElasticsearchAuditing +class MyConfiguration { + // configuration code +} +---- + +When using the reactive stack this must be: +[source,java] +---- +@Configuration +@EnableReactiveElasticsearchRepositories +@EnableReactiveElasticsearchAuditing +class MyConfiguration { + // configuration code +} +---- + +If your code contains more than one `AuditorAware` bean for different types, you must provide the name of the bean to use as an argument to the `auditorAwareRef` parameter of the + `@EnableElasticsearchAuditing` annotation. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc new file mode 100644 index 0000000000..0cf7d5ea3c --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc @@ -0,0 +1,234 @@ +[[elasticsearch.clients]] += Elasticsearch Clients + +This chapter illustrates configuration and usage of supported Elasticsearch client implementations. + +Spring Data Elasticsearch operates upon an Elasticsearch client (provided by Elasticsearch client libraries) that is connected to a single Elasticsearch node or a cluster. +Although the Elasticsearch Client can be used directly to work with the cluster, applications using Spring Data Elasticsearch normally use the higher level abstractions of xref:elasticsearch/template.adoc[Elasticsearch Operations] and xref:elasticsearch/repositories/elasticsearch-repositories.adoc[Elasticsearch Repositories]. + +[[elasticsearch.clients.restclient]] +== Imperative Rest Client + +To use the imperative (non-reactive) client, a configuration bean must be configured like this: + +==== +[source,java] +---- +import org.springframework.data.elasticsearch.client.elc.ElasticsearchConfiguration; + +@Configuration +public class MyClientConfig extends ElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() <.> + .connectedTo("localhost:9200") + .build(); + } +} +---- + +<.> for a detailed description of the builder methods see xref:elasticsearch/clients.adoc#elasticsearch.clients.configuration[Client Configuration] +==== + +The javadoc:org.springframework.data.elasticsearch.client.elc.ElasticsearchConfiguration[]] class allows further configuration by overriding for example the `jsonpMapper()` or `transportOptions()` methods. + + +The following beans can then be injected in other Spring components: + +==== +[source,java] +---- +import org.springframework.beans.factory.annotation.Autowired;@Autowired +ElasticsearchOperations operations; <.> + +@Autowired +ElasticsearchClient elasticsearchClient; <.> + +@Autowired +RestClient restClient; <.> + +@Autowired +JsonpMapper jsonpMapper; <.> +---- + +<.> an implementation of javadoc:org.springframework.data.elasticsearch.core.ElasticsearchOperations[] +<.> the `co.elastic.clients.elasticsearch.ElasticsearchClient` that is used. +<.> the low level `RestClient` from the Elasticsearch libraries +<.> the `JsonpMapper` user by the Elasticsearch `Transport` +==== + +Basically one should just use the javadoc:org.springframework.data.elasticsearch.core.ElasticsearchOperations[] to interact with the Elasticsearch cluster. +When using repositories, this instance is used under the hood as well. + +[[elasticsearch.clients.reactiverestclient]] +== Reactive Rest Client + +When working with the reactive stack, the configuration must be derived from a different class: + +==== +[source,java] +---- +import org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchConfiguration; + +@Configuration +public class MyClientConfig extends ReactiveElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() <.> + .connectedTo("localhost:9200") + .build(); + } +} +---- + +<.> for a detailed description of the builder methods see xref:elasticsearch/clients.adoc#elasticsearch.clients.configuration[Client Configuration] +==== + +The javadoc:org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchConfiguration[] class allows further configuration by overriding for example the `jsonpMapper()` or `transportOptions()` methods. + +The following beans can then be injected in other Spring components: + +==== +[source,java] +---- +@Autowired +ReactiveElasticsearchOperations operations; <.> + +@Autowired +ReactiveElasticsearchClient elasticsearchClient; <.> + +@Autowired +RestClient restClient; <.> + +@Autowired +JsonpMapper jsonpMapper; <.> +---- + +the following can be injected: + +<.> an implementation of javadoc:org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations[] +<.> the `org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchClient` that is used. +This is a reactive implementation based on the Elasticsearch client implementation. +<.> the low level `RestClient` from the Elasticsearch libraries +<.> the `JsonpMapper` user by the Elasticsearch `Transport` +==== + +Basically one should just use the javadoc:org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations[] to interact with the Elasticsearch cluster. +When using repositories, this instance is used under the hood as well. + +[[elasticsearch.clients.configuration]] +== Client Configuration + +Client behaviour can be changed via the javadoc:org.springframework.data.elasticsearch.client.ClientConfiguration[] that allows to set options for SSL, connect and socket timeouts, headers and other parameters. + +.Client Configuration +==== +[source,java] +---- +import org.springframework.data.elasticsearch.client.ClientConfiguration; +import org.springframework.data.elasticsearch.support.HttpHeaders; + +import static org.springframework.data.elasticsearch.client.elc.ElasticsearchClients.*; + +HttpHeaders httpHeaders = new HttpHeaders(); +httpHeaders.add("some-header", "on every request") <.> + +ClientConfiguration clientConfiguration = ClientConfiguration.builder() + .connectedTo("localhost:9200", "localhost:9291") <.> + .usingSsl() <.> + .withProxy("localhost:8888") <.> + .withPathPrefix("ela") <.> + .withConnectTimeout(Duration.ofSeconds(5)) <.> + .withSocketTimeout(Duration.ofSeconds(3)) <.> + .withDefaultHeaders(defaultHeaders) <.> + .withBasicAuth(username, password) <.> + .withHeaders(() -> { <.> + HttpHeaders headers = new HttpHeaders(); + headers.add("currentTime", LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME)); + return headers; + }) + .withClientConfigurer( <.> + ElasticsearchHttpClientConfigurationCallback.from(clientBuilder -> { + // ... + return clientBuilder; + })) + . // ... other options + .build(); + +---- + +<.> Define default headers, if they need to be customized +<.> Use the builder to provide cluster addresses, set default `HttpHeaders` or enable SSL. +<.> Optionally enable SSL.There exist overloads of this function that can take a `SSLContext` or as an alternative the fingerprint of the certificate as it is output by Elasticsearch 8 on startup. +<.> Optionally set a proxy. +<.> Optionally set a path prefix, mostly used when different clusters a behind some reverse proxy. +<.> Set the connection timeout. +<.> Set the socket timeout. +<.> Optionally set headers. +<.> Add basic authentication. +<.> A `Supplier` function can be specified which is called every time before a request is sent to Elasticsearch - here, as an example, the current time is written in a header. +<.> a function to configure the created client (see xref:elasticsearch/clients.adoc#elasticsearch.clients.configuration.callbacks[Client configuration callbacks]), can be added multiple times. +==== + +IMPORTANT: Adding a Header supplier as shown in above example allows to inject headers that may change over the time, like authentication JWT tokens. +If this is used in the reactive setup, the supplier function *must not* block! + +[[elasticsearch.clients.configuration.callbacks]] +=== Client configuration callbacks + +The javadoc:org.springframework.data.elasticsearch.client.ClientConfiguration[] class offers the most common parameters to configure the client. +In the case this is not enough, the user can add callback functions by using the `withClientConfigurer(ClientConfigurationCallback)` method. + +The following callbacks are provided: + +[[elasticsearch.clients.configuration.callbacks.rest]] +==== Configuration of the low level Elasticsearch `RestClient`: + +This callback provides a `org.elasticsearch.client.RestClientBuilder` that can be used to configure the Elasticsearch +`RestClient`: +==== +[source,java] +---- +ClientConfiguration.builder() + .connectedTo("localhost:9200", "localhost:9291") + .withClientConfigurer(ElasticsearchClients.ElasticsearchRestClientConfigurationCallback.from(restClientBuilder -> { + // configure the Elasticsearch RestClient + return restClientBuilder; + })) + .build(); +---- +==== + +[[elasticsearch.clients.configurationcallbacks.httpasync]] +==== Configuration of the HttpAsyncClient used by the low level Elasticsearch `RestClient`: + +This callback provides a `org.apache.http.impl.nio.client.HttpAsyncClientBuilder` to configure the HttpCLient that is +used by the `RestClient`. + +==== +[source,java] +---- +ClientConfiguration.builder() + .connectedTo("localhost:9200", "localhost:9291") + .withClientConfigurer(ElasticsearchClients.ElasticsearchHttpClientConfigurationCallback.from(httpAsyncClientBuilder -> { + // configure the HttpAsyncClient + return httpAsyncClientBuilder; + })) + .build(); +---- +==== + +[[elasticsearch.clients.logging]] +== Client Logging + +To see what is actually sent to and received from the server `Request` / `Response` logging on the transport level needs to be turned on as outlined in the snippet below. +This can be enabled in the Elasticsearch client by setting the level of the `tracer` package to "trace" (see +https://www.elastic.co/guide/en/elasticsearch/client/java-api-client/current/java-rest-low-usage-logging.html) + +.Enable transport layer logging +[source,xml] +---- + +---- diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc new file mode 100644 index 0000000000..f1e07dd195 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc @@ -0,0 +1,121 @@ +[[new-features]] += What's new + +[[new-features.6-0-0]] +== New in Spring Data Elasticsearch 6.6 + +* Upgarde to Spring 7 +* Switch to jspecify nullability annotations +* Upgrade to Elasticsearch 9.0.1 + + +[[new-features.5-5-0]] +== New in Spring Data Elasticsearch 5.5 + +* Upgrade to Elasticsearch 8.18.1. +* Add support for the `@SearchTemplateQuery` annotation on repository methods. +* Scripted field properties of type collection can be populated from scripts returning arrays. + +[[new-features.5-4-0]] +== New in Spring Data Elasticsearch 5.4 + +* Upgrade to Elasticsearch 8.15.3. +* Allow to customize the mapped type name for `@InnerField` and `@Field` annotations. +* Support for Elasticsearch SQL. +* Add support for retrieving request executionDuration. + +[[new-features.5-3-0]] +== New in Spring Data Elasticsearch 5.3 + +* Upgrade to Elasticsearch 8.13.2. +* Add support for highlight queries in highlighting. +* Add shard statistics to the `SearchHit` class. +* Add support for multi search template API. +* Add support for SpEL in @Query. +* Add support for field aliases in the index mapping. +* Add support for has_child and has_parent queries. + +[[new-features.5-2-0]] +== New in Spring Data Elasticsearch 5.2 + +* Upgrade to Elasticsearch 8.11.1 +* The `JsonpMapper` for Elasticsearch is now configurable and provided as bean. +* Improved AOT runtime hints for Elasticsearch client library classes. +* Add Kotlin extensions and repository coroutine support. +* Introducing `VersionConflictException` class thrown in case thatElasticsearch reports an 409 error with a version conflict. +* Enable MultiField annotation on property getter +* Support nested sort option +* Improved scripted und runtime field support +* Improved refresh policy support + +[[new-features.5-1-0]] +== New in Spring Data Elasticsearch 5.1 + +* Upgrade to Elasticsearch 8.7.1 +* Allow specification of the TLS certificate when connecting to an Elasticsearch 8 cluster + +[[new-features.5-0-0]] +== New in Spring Data Elasticsearch 5.0 + +* Upgrade to Java 17 baseline +* Upgrade to Spring Framework 6 +* Upgrade to Elasticsearch 8.5.0 +* Use the new Elasticsearch client library + +[[new-features.4-4-0]] +== New in Spring Data Elasticsearch 4.4 + +* Introduction of new imperative and reactive clients using the classes from the new Elasticsearch Java client +* Upgrade to Elasticsearch 7.17.3. + +[[new-features.4-3-0]] +== New in Spring Data Elasticsearch 4.3 + +* Upgrade to Elasticsearch 7.15.2. +* Allow runtime_fields to be defined in the index mapping. +* Add native support for range field types by using a range object. +* Add repository search for nullable or empty properties. +* Enable custom converters for single fields. +* Supply a custom `Sort.Order` providing Elasticsearch specific parameters. + +[[new-features.4-2-0]] +== New in Spring Data Elasticsearch 4.2 + +* Upgrade to Elasticsearch 7.10.0. +* Support for custom routing values + +[[new-features.4-1-0]] +== New in Spring Data Elasticsearch 4.1 + +* Uses Spring 5.3. +* Upgrade to Elasticsearch 7.9.3. +* Improved API for alias management. +* Introduction of `ReactiveIndexOperations` for index management. +* Index templates support. +* Support for Geo-shape data with GeoJson. + +[[new-features.4-0-0]] +== New in Spring Data Elasticsearch 4.0 + +* Uses Spring 5.2. +* Upgrade to Elasticsearch 7.6.2. +* Deprecation of `TransportClient` usage. +* Implements most of the mapping-types available for the index mappings. +* Removal of the Jackson `ObjectMapper`, now using the xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[MappingElasticsearchConverter] +* Cleanup of the API in the `*Operations` interfaces, grouping and renaming methods so that they match the Elasticsearch API, deprecating the old methods, aligning with other Spring Data modules. +* Introduction of `SearchHit` class to represent a found document together with the relevant result metadata for this document (i.e. _sortValues_). +* Introduction of the `SearchHits` class to represent a whole search result together with the metadata for the complete search result (i.e. _max_score_). +* Introduction of `SearchPage` class to represent a paged result containing a `SearchHits` instance. +* Introduction of the `GeoDistanceOrder` class to be able to create sorting by geographical distance +* Implementation of Auditing Support +* Implementation of lifecycle entity callbacks + +[[new-features.3-2-0]] +== New in Spring Data Elasticsearch 3.2 + +* Secured Elasticsearch cluster support with Basic Authentication and SSL transport. +* Upgrade to Elasticsearch 6.8.1. +* Reactive programming support with xref:elasticsearch/repositories/reactive-elasticsearch-repositories.adoc[Reactive Elasticsearch Repositories] and xref:. +* Introduction of the xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[ElasticsearchEntityMapper] as an alternative to the Jackson `ObjectMapper`. +* Field name customization in `@Field`. +* Support for Delete by Query. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/entity-callbacks.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/entity-callbacks.adoc new file mode 100644 index 0000000000..cbc08eee39 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/entity-callbacks.adoc @@ -0,0 +1,42 @@ +include::{commons}@data-commons::page$entity-callbacks.adoc[] + +[[elasticsearch.entity-callbacks]] +== Store specific EntityCallbacks + +Spring Data Elasticsearch uses the `EntityCallback` API internally for its auditing support and reacts on the following callbacks: + +.Supported Entity Callbacks +[%header,cols="4"] +|=== +| Callback +| Method +| Description +| Order + +| Reactive/BeforeConvertCallback +| `onBeforeConvert(T entity, IndexCoordinates index)` +| Invoked before a domain object is converted to `org.springframework.data.elasticsearch.core.document.Document`. +Can return the `entity` or a modified entity which then will be converted. +| `Ordered.LOWEST_PRECEDENCE` + +| Reactive/AfterLoadCallback +| `onAfterLoad(Document document, Class type, IndexCoordinates indexCoordinates)` +| Invoked after the result from Elasticsearch has been read into a `org.springframework.data.elasticsearch.core.document.Document`. +| `Ordered.LOWEST_PRECEDENCE` + +| Reactive/AfterConvertCallback +| `onAfterConvert(T entity, Document document, IndexCoordinates indexCoordinates)` +| Invoked after a domain object is converted from `org.springframework.data.elasticsearch.core.document.Document` on reading result data from Elasticsearch. +| `Ordered.LOWEST_PRECEDENCE` + +| Reactive/AuditingEntityCallback +| `onBeforeConvert(Object entity, IndexCoordinates index)` +| Marks an auditable entity _created_ or _modified_ +| 100 + +| Reactive/AfterSaveCallback +| `T onAfterSave(T entity, IndexCoordinates index)` +| Invoked after a domain object is saved. +| `Ordered.LOWEST_PRECEDENCE` + +|=== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/join-types.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/join-types.adoc new file mode 100644 index 0000000000..a1bc3df192 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/join-types.adoc @@ -0,0 +1,239 @@ +[[elasticsearch.jointype]] += Join-Type implementation + +Spring Data Elasticsearch supports the https://www.elastic.co/guide/en/elasticsearch/reference/current/parent-join.html[Join data type] for creating the corresponding index mappings and for storing the relevant information. + +[[elasticsearch.jointype.setting-up]] +== Setting up the data + +For an entity to be used in a parent child join relationship, it must have a property of type `JoinField` which must be annotated. +Let's assume a `Statement` entity where a statement may be a _question_, an _answer_, a _comment_ or a _vote_ (a _Builder_ is also shown in this example, it's not necessary, but later used in the sample code): + +==== +[source,java] +---- +@Document(indexName = "statements") +@Routing("routing") <.> +public class Statement { + @Id + private String id; + + @Field(type = FieldType.Text) + private String text; + + @Field(type = FieldType.Keyword) + private String routing; + + @JoinTypeRelations( + relations = + { + @JoinTypeRelation(parent = "question", children = {"answer", "comment"}), <.> + @JoinTypeRelation(parent = "answer", children = "vote") <.> + } + ) + private JoinField relation; <.> + + private Statement() { + } + + public static StatementBuilder builder() { + return new StatementBuilder(); + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getRouting() { + return routing; + } + + public void setRouting(String routing) { + this.routing = routing; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public JoinField getRelation() { + return relation; + } + + public void setRelation(JoinField relation) { + this.relation = relation; + } + + public static final class StatementBuilder { + private String id; + private String text; + private String routing; + private JoinField relation; + + private StatementBuilder() { + } + + public StatementBuilder withId(String id) { + this.id = id; + return this; + } + + public StatementBuilder withRouting(String routing) { + this.routing = routing; + return this; + } + + public StatementBuilder withText(String text) { + this.text = text; + return this; + } + + public StatementBuilder withRelation(JoinField relation) { + this.relation = relation; + return this; + } + + public Statement build() { + Statement statement = new Statement(); + statement.setId(id); + statement.setRouting(routing); + statement.setText(text); + statement.setRelation(relation); + return statement; + } + } +} +---- +<.> for routing related info see xref:elasticsearch/routing.adoc[Routing values] +<.> a question can have answers and comments +<.> an answer can have votes +<.> the `JoinField` property is used to combine the name (_question_, _answer_, _comment_ or _vote_) of the relation with the parent id. +The generic type must be the same as the `@Id` annotated property. +==== + +Spring Data Elasticsearch will build the following mapping for this class: + +==== +[source,json] +---- +{ + "statements": { + "mappings": { + "properties": { + "_class": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "routing": { + "type": "keyword" + }, + "relation": { + "type": "join", + "eager_global_ordinals": true, + "relations": { + "question": [ + "answer", + "comment" + ], + "answer": "vote" + } + }, + "text": { + "type": "text" + } + } + } + } +} +---- +==== + +[[elasticsearch.jointype.storing]] +== Storing data + +Given a repository for this class the following code inserts a question, two answers, a comment and a vote: + +==== +[source,java] +---- +void init() { + repository.deleteAll(); + + Statement savedWeather = repository.save( + Statement.builder() + .withText("How is the weather?") + .withRelation(new JoinField<>("question")) <1> + .build()); + + Statement sunnyAnswer = repository.save( + Statement.builder() + .withText("sunny") + .withRelation(new JoinField<>("answer", savedWeather.getId())) <2> + .build()); + + repository.save( + Statement.builder() + .withText("rainy") + .withRelation(new JoinField<>("answer", savedWeather.getId())) <3> + .build()); + + repository.save( + Statement.builder() + .withText("I don't like the rain") + .withRelation(new JoinField<>("comment", savedWeather.getId())) <4> + .build()); + + repository.save( + Statement.builder() + .withText("+1 for the sun") + .withRouting(savedWeather.getId()) + .withRelation(new JoinField<>("vote", sunnyAnswer.getId())) <5> + .build()); +} +---- +<1> create a question statement +<2> the first answer to the question +<3> the second answer +<4> a comment to the question +<5> a vote for the first answer, this needs to have the routing set to the weather document, see xref:elasticsearch/routing.adoc[Routing values]. +==== + +[[elasticsearch.jointype.retrieving]] +== Retrieving data + +Currently native queries must be used to query the data, so there is no support from standard repository methods. xref:repositories/custom-implementations.adoc[] can be used instead. + +The following code shows as an example how to retrieve all entries that have a _vote_ (which must be _answers_, because only answers can have a vote) using an `ElasticsearchOperations` instance: + +==== +[source,java] +---- +SearchHits hasVotes() { + + Query query = NativeQuery.builder() + .withQuery(co.elastic.clients.elasticsearch._types.query_dsl.Query.of(qb -> qb + .hasChild(hc -> hc + .type("answer") + .queryName("vote") + .query(matchAllQueryAsQuery()) + .scoreMode(ChildScoreMode.None) + ))) + .build(); + + return operations.search(query, Statement.class); +} +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/misc.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/misc.adoc new file mode 100644 index 0000000000..7f3ac8f0ff --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/misc.adoc @@ -0,0 +1,453 @@ +[[elasticsearch.misc]] += Miscellaneous Elasticsearch Operation Support + +This chapter covers additional support for Elasticsearch operations that cannot be directly accessed via the repository interface. +It is recommended to add those operations as custom implementation as described in xref:repositories/custom-implementations.adoc[] . + +[[elasticsearc.misc.index.settings]] +== Index settings + +When creating Elasticsearch indices with Spring Data Elasticsearch different index settings can be defined by using the `@Setting` annotation. +The following arguments are available: + +* `useServerConfiguration` does not send any settings parameters, so the Elasticsearch server configuration determines them. +* `settingPath` refers to a JSON file defining the settings that must be resolvable in the classpath +* `shards` the number of shards to use, defaults to _1_ +* `replicas` the number of replicas, defaults to _1_ +* `refreshIntervall`, defaults to _"1s"_ +* `indexStoreType`, defaults to _"fs"_ + +It is as well possible to define https://www.elastic.co/guide/en/elasticsearch/reference/7.11/index-modules-index-sorting.html[index sorting] (check the linked Elasticsearch documentation for the possible field types and values): + +==== +[source,java] +---- +@Document(indexName = "entities") +@Setting( + sortFields = { "secondField", "firstField" }, <.> + sortModes = { Setting.SortMode.max, Setting.SortMode.min }, <.> + sortOrders = { Setting.SortOrder.desc, Setting.SortOrder.asc }, + sortMissingValues = { Setting.SortMissing._last, Setting.SortMissing._first }) +class Entity { + @Nullable + @Id private String id; + + @Nullable + @Field(name = "first_field", type = FieldType.Keyword) + private String firstField; + + @Nullable @Field(name = "second_field", type = FieldType.Keyword) + private String secondField; + + // getter and setter... +} +---- + +<.> when defining sort fields, use the name of the Java property (_firstField_), not the name that might be defined for Elasticsearch (_first_field_) +<.> `sortModes`, `sortOrders` and `sortMissingValues` are optional, but if they are set, the number of entries must match the number of `sortFields` elements +==== + +[[elasticsearch.misc.mappings]] +== Index Mapping + +When Spring Data Elasticsearch creates the index mapping with the `IndexOperations.createMapping()` methods, it uses the annotations described in xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations[Mapping Annotation Overview], especially the `@Field` annotation. +In addition to that it is possible to add the `@Mapping` annotation to a class. +This annotation has the following properties: + +* `mappingPath` a classpath resource in JSON format; if this is not empty it is used as the mapping, no other mapping processing is done. +* `enabled` when set to false, this flag is written to the mapping and no further processing is done. +* `dateDetection` and `numericDetection` set the corresponding properties in the mapping when not set to `DEFAULT`. +* `dynamicDateFormats` when this String array is not empty, it defines the date formats used for automatic date detection. +* `runtimeFieldsPath` a classpath resource in JSON format containing the definition of runtime fields which is written to the index mappings, for example: + +==== +[source,json] +---- +{ + "day_of_week": { + "type": "keyword", + "script": { + "source": "emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))" + } + } +} +---- +==== + +[[elasticsearch.misc.filter]] +== Filter Builder + +Filter Builder improves query speed. + +==== +[source,java] +---- +private ElasticsearchOperations operations; + +IndexCoordinates index = IndexCoordinates.of("sample-index"); + +Query query = NativeQuery.builder() + .withQuery(q -> q + .matchAll(ma -> ma)) + .withFilter( q -> q + .bool(b -> b + .must(m -> m + .term(t -> t + .field("id") + .value(documentId)) + ))) + .build(); + +SearchHits sampleEntities = operations.search(query, SampleEntity.class, index); +---- +==== + +[[elasticsearch.scroll]] +== Using Scroll For Big Result Set + +Elasticsearch has a scroll API for getting big result set in chunks. +This is internally used by Spring Data Elasticsearch to provide the implementations of the ` SearchHitsIterator SearchOperations.searchForStream(Query query, Class clazz, IndexCoordinates index)` method. + +==== +[source,java] +---- +IndexCoordinates index = IndexCoordinates.of("sample-index"); + +Query searchQuery = NativeQuery.builder() + .withQuery(q -> q + .matchAll(ma -> ma)) + .withFields("message") + .withPageable(PageRequest.of(0, 10)) + .build(); + +SearchHitsIterator stream = elasticsearchOperations.searchForStream(searchQuery, SampleEntity.class, +index); + +List sampleEntities = new ArrayList<>(); +while (stream.hasNext()) { + sampleEntities.add(stream.next()); +} + +stream.close(); +---- +==== + +There are no methods in the `SearchOperations` API to access the scroll id, if it should be necessary to access this, the following methods of the `AbstractElasticsearchTemplate` can be used (this is the base implementation for the different `ElasticsearchOperations` implementations): + +==== +[source,java] +---- + +@Autowired ElasticsearchOperations operations; + +AbstractElasticsearchTemplate template = (AbstractElasticsearchTemplate)operations; + +IndexCoordinates index = IndexCoordinates.of("sample-index"); + +Query query = NativeQuery.builder() + .withQuery(q -> q + .matchAll(ma -> ma)) + .withFields("message") + .withPageable(PageRequest.of(0, 10)) + .build(); + +SearchScrollHits scroll = template.searchScrollStart(1000, query, SampleEntity.class, index); + +String scrollId = scroll.getScrollId(); +List sampleEntities = new ArrayList<>(); +while (scroll.hasSearchHits()) { + sampleEntities.addAll(scroll.getSearchHits()); + scrollId = scroll.getScrollId(); + scroll = template.searchScrollContinue(scrollId, 1000, SampleEntity.class); +} +template.searchScrollClear(scrollId); +---- +==== + +To use the Scroll API with repository methods, the return type must defined as `Stream` in the Elasticsearch Repository. +The implementation of the method will then use the scroll methods from the ElasticsearchTemplate. + +==== +[source,java] +---- +interface SampleEntityRepository extends Repository { + + Stream findBy(); + +} +---- +==== + +[[elasticsearch.misc.sorts]] +== Sort options + +In addition to the default sort options described in xref:repositories/query-methods-details.adoc#repositories.paging-and-sorting[Paging and Sorting], Spring Data Elasticsearch provides the class `org.springframework.data.elasticsearch.core.query.Order` which derives from `org.springframework.data.domain.Sort.Order`. +It offers additional parameters that can be sent to Elasticsearch when specifying the sorting of the result (see https://www.elastic.co/guide/en/elasticsearch/reference/7.15/sort-search-results.html). + +There also is the `org.springframework.data.elasticsearch.core.query.GeoDistanceOrder` class which can be used to have the result of a search operation ordered by geographical distance. + +If the class to be retrieved has a `GeoPoint` property named _location_, the following `Sort` would sort the results by distance to the given point: + +==== +[source,java] +---- +Sort.by(new GeoDistanceOrder("location", new GeoPoint(48.137154, 11.5761247))) +---- +==== + +[[elasticsearch.misc.runtime-fields]] +== Runtime Fields + +From version 7.12 on Elasticsearch has added the feature of runtime fields (https://www.elastic.co/guide/en/elasticsearch/reference/7.12/runtime.html). +Spring Data Elasticsearch supports this in two ways: + +[[elasticsearch.misc.runtime-fields.index-mappings]] +=== Runtime field definitions in the index mappings + +The first way to define runtime fields is by adding the definitions to the index mappings (see https://www.elastic.co/guide/en/elasticsearch/reference/7.12/runtime-mapping-fields.html). +To use this approach in Spring Data Elasticsearch the user must provide a JSON file that contains the corresponding definition, for example: + +.runtime-fields.json +==== +[source,json] +---- +{ + "day_of_week": { + "type": "keyword", + "script": { + "source": "emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))" + } + } +} +---- +==== + +The path to this JSON file, which must be present on the classpath, must then be set in the `@Mapping` annotation of the entity: + +==== +[source,java] +---- +@Document(indexName = "runtime-fields") +@Mapping(runtimeFieldsPath = "/runtime-fields.json") +public class RuntimeFieldEntity { + // properties, getter, setter,... +} + +---- +==== + +[[elasticsearch.misc.runtime-fields.query]] +=== Runtime fields definitions set on a Query + +The second way to define runtime fields is by adding the definitions to a search query (see https://www.elastic.co/guide/en/elasticsearch/reference/7.12/runtime-search-request.html). +The following code example shows how to do this with Spring Data Elasticsearch : + +The entity used is a simple object that has a `price` property: + +==== +[source,java] +---- +@Document(indexName = "some_index_name") +public class SomethingToBuy { + + private @Id @Nullable String id; + @Nullable @Field(type = FieldType.Text) private String description; + @Nullable @Field(type = FieldType.Double) private Double price; + + // getter and setter +} + +---- +==== + +The following query uses a runtime field that calculates a `priceWithTax` value by adding 19% to the price and uses this value in the search query to find all entities where `priceWithTax` is higher or equal than a given value: + +==== +[source,java] +---- +RuntimeField runtimeField = new RuntimeField("priceWithTax", "double", "emit(doc['price'].value * 1.19)"); +Query query = new CriteriaQuery(new Criteria("priceWithTax").greaterThanEqual(16.5)); +query.addRuntimeField(runtimeField); + +SearchHits searchHits = operations.search(query, SomethingToBuy.class); +---- +==== + +This works with every implementation of the `Query` interface. + +[[elasticsearch.misc.point-in-time]] +== Point In Time (PIT) API + +`ElasticsearchOperations` supports the point in time API of Elasticsearch (see https://www.elastic.co/guide/en/elasticsearch/reference/8.3/point-in-time-api.html). +The following code snippet shows how to use this feature with a fictional `Person` class: + +==== +[source,java] +---- +ElasticsearchOperations operations; // autowired +Duration tenSeconds = Duration.ofSeconds(10); + +String pit = operations.openPointInTime(IndexCoordinates.of("person"), tenSeconds); <.> + +// create query for the pit +Query query1 = new CriteriaQueryBuilder(Criteria.where("lastName").is("Smith")) + .withPointInTime(new Query.PointInTime(pit, tenSeconds)) <.> + .build(); +SearchHits searchHits1 = operations.search(query1, Person.class); +// do something with the data + +// create 2nd query for the pit, use the id returned in the previous result +Query query2 = new CriteriaQueryBuilder(Criteria.where("lastName").is("Miller")) + .withPointInTime( + new Query.PointInTime(searchHits1.getPointInTimeId(), tenSeconds)) <.> + .build(); +SearchHits searchHits2 = operations.search(query2, Person.class); +// do something with the data + +operations.closePointInTime(searchHits2.getPointInTimeId()); <.> + +---- + +<.> create a point in time for an index (can be multiple names) and a keep-alive duration and retrieve its id +<.> pass that id into the query to search together with the next keep-alive value +<.> for the next query, use the id returned from the previous search +<.> when done, close the point in time using the last returned id +==== + +[[elasticsearch.misc.searchtemplates]] +== Search Template support + +Use of the search template API is supported. +To use this, it first is necessary to create a stored script. +The `ElasticsearchOperations` interface extends `ScriptOperations` which provides the necessary functions. +The example used here assumes that we have `Person` entity with a property named `firstName`. +A search template script can be saved like this: + +==== +[source,java] +---- +import org.springframework.data.elasticsearch.core.ElasticsearchOperations; +import org.springframework.data.elasticsearch.core.script.Script; + +operations.putScript( <.> + Script.builder() + .withId("person-firstname") <.> + .withLanguage("mustache") <.> + .withSource(""" <.> + { + "query": { + "bool": { + "must": [ + { + "match": { + "firstName": "{{firstName}}" <.> + } + } + ] + } + }, + "from": "{{from}}", <.> + "size": "{{size}}" <.> + } + """) + .build() +); +---- + +<.> Use the `putScript()` method to store a search template script +<.> The name / id of the script +<.> Scripts that are used in search templates must be in the _mustache_ language. +<.> The script source +<.> The search parameter in the script +<.> Paging request offset +<.> Paging request size +==== + +To use a search template in a search query, Spring Data Elasticsearch provides the `SearchTemplateQuery`, an implementation of the `org.springframework.data.elasticsearch.core.query.Query` interface. + +NOTE: Although `SearchTemplateQuery` is an implementation of the `Query` interface, not all of the functionality provided by the base class is available for a `SearchTemplateQuery` like setting a `Pageable` or a `Sort`. Values for this functionality must be added to the stored script like shown in the following example for paging parameters. If these values are set on the `Query` object, they will be ignored. + +In the following code, we will add a call using a search template query to a custom repository implementation (see +xref:repositories/custom-implementations.adoc[]) as an example how this can be integrated into a repository call. + +We first define the custom repository fragment interface: + +==== +[source,java] +---- +interface PersonCustomRepository { + SearchPage findByFirstNameWithSearchTemplate(String firstName, Pageable pageable); +} +---- +==== + +The implementation of this repository fragment looks like this: + +==== +[source,java] +---- +public class PersonCustomRepositoryImpl implements PersonCustomRepository { + + private final ElasticsearchOperations operations; + + public PersonCustomRepositoryImpl(ElasticsearchOperations operations) { + this.operations = operations; + } + + @Override + public SearchPage findByFirstNameWithSearchTemplate(String firstName, Pageable pageable) { + + var query = SearchTemplateQuery.builder() <.> + .withId("person-firstname") <.> + .withParams( + Map.of( <.> + "firstName", firstName, + "from", pageable.getOffset(), + "size", pageable.getPageSize() + ) + ) + .build(); + + SearchHits searchHits = operations.search(query, Person.class); <.> + + return SearchHitSupport.searchPageFor(searchHits, pageable); + } +} +---- + +<.> Create a `SearchTemplateQuery` +<.> Provide the id of the search template +<.> The parameters are passed in a `Map` +<.> Do the search in the same way as with the other query types. +==== + +[[elasticsearch.misc.nested-sort]] +== Nested sort +Spring Data Elasticsearch supports sorting within nested objects (https://www.elastic.co/guide/en/elasticsearch/reference/8.9/sort-search-results.html#nested-sorting) + +The following example, taken from the `org.springframework.data.elasticsearch.core.query.sort.NestedSortIntegrationTests` class, shows how to define the nested sort. + +==== +[source,java] +---- +var filter = StringQuery.builder(""" + { "term": {"movies.actors.sex": "m"} } + """).build(); +var order = new org.springframework.data.elasticsearch.core.query.Order(Sort.Direction.DESC, + "movies.actors.yearOfBirth") + .withNested( + Nested.builder("movies") + .withNested( + Nested.builder("movies.actors") + .withFilter(filter) + .build()) + .build()); + +var query = Query.findAll().addSort(Sort.by(order)); + +---- +==== + +About the filter query: It is not possible to use a `CriteriaQuery` here, as this query would be converted into a Elasticsearch nested query which does not work in the filter context. So only `StringQuery` or `NativeQuery` can be used here. When using one of these, like the term query above, the Elasticsearch field names must be used, so take care, when these are redefined with the `@Field(name="...")` definition. + +For the definition of the order path and the nested paths, the Java entity property names should be used. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/object-mapping.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/object-mapping.adoc new file mode 100644 index 0000000000..6ca12728c0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/object-mapping.adoc @@ -0,0 +1,478 @@ +[[elasticsearch.mapping]] += Elasticsearch Object Mapping + +Spring Data Elasticsearch Object Mapping is the process that maps a Java object - the domain entity - into the JSON representation that is stored in Elasticsearch and back. +The class that is internally used for this mapping is the +`MappingElasticsearchConverter`. + +[[elasticsearch.mapping.meta-model]] +== Meta Model Object Mapping + +The Metamodel based approach uses domain type information for reading/writing from/to Elasticsearch. +This allows to register `Converter` instances for specific domain type mapping. + +[[elasticsearch.mapping.meta-model.annotations]] +=== Mapping Annotation Overview + +The `MappingElasticsearchConverter` uses metadata to drive the mapping of objects to documents. +The metadata is taken from the entity's properties which can be annotated. + +The following annotations are available: + +* `@Document`: Applied at the class level to indicate this class is a candidate for mapping to the database. +The most important attributes are (check the API documentation for the complete list of attributes): +** `indexName`: the name of the index to store this entity in. +This can contain a SpEL template expression like `"log-#{T(java.time.LocalDate).now().toString()}"` +** `createIndex`: flag whether to create an index on repository bootstrapping. +Default value is _true_. +See xref:elasticsearch/repositories/elasticsearch-repositories.adoc#elasticsearch.repositories.autocreation[Automatic creation of indices with the corresponding mapping] + + +* `@Id`: Applied at the field level to mark the field used for identity purpose. +* `@Transient`, `@ReadOnlyProperty`, `@WriteOnlyProperty`: see the following section xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations.read-write[Controlling which properties are written to and read from Elasticsearch] for detailed information. +* `@PersistenceConstructor`: Marks a given constructor - even a package protected one - to use when instantiating the object from the database. +Constructor arguments are mapped by name to the key values in the retrieved Document. +* `@Field`: Applied at the field level and defines properties of the field, most of the attributes map to the respective https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html[Elasticsearch Mapping] definitions (the following list is not complete, check the annotation Javadoc for a complete reference): +** `name`: The name of the field as it will be represented in the Elasticsearch document, if not set, the Java field name is used. +** `type`: The field type, can be one of _Text, Keyword, Long, Integer, Short, Byte, Double, Float, Half_Float, Scaled_Float, Date, Date_Nanos, Boolean, Binary, Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range, Ip_Range, Object, Nested, Ip, TokenCount, Percolator, Flattened, Search_As_You_Type_. +See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html[Elasticsearch Mapping Types]. +If the field type is not specified, it defaults to `FieldType.Auto`. +This means, that no mapping entry is written for the property and that Elasticsearch will add a mapping entry dynamically when the first data for this property is stored (check the Elasticsearch documentation for dynamic mapping rules). +** `format`: One or more built-in date formats, see the next section xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations.date-formats[Date format mapping]. +** `pattern`: One or more custom date formats, see the next section xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations.date-formats[Date format mapping]. +** `store`: Flag whether the original field value should be store in Elasticsearch, default value is _false_. +** `analyzer`, `searchAnalyzer`, `normalizer` for specifying custom analyzers and normalizer. +* `@GeoPoint`: Marks a field as _geo_point_ datatype. +Can be omitted if the field is an instance of the `GeoPoint` class. +* `@ValueConverter` defines a class to be used to convert the given property. +In difference to a registered Spring `Converter` this only converts the annotated property and not every property of the given type. + +The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic. + +[[elasticsearch.mapping.meta-model.annotations.read-write]] +==== Controlling which properties are written to and read from Elasticsearch + +This section details the annotations that define if the value of a property is written to or read from Elasticsearch. + +`@Transient`: A property annotated with this annotation will not be written to the mapping, it's value will not be sent to Elasticsearch and when documents are returned from Elasticsearch, this property will not be set in the resulting entity. + +`@ReadOnlyProperty`: A property with this annotation will not have its value written to Elasticsearch, but when returning data, the property will be filled with the value returned in the document from Elasticsearch. +One use case for this are runtime fields defined in the index mapping. + +`@WriteOnlyProperty`: A property with this annotation will have its value stored in Elasticsearch but will not be set with any value when reading document. +This can be used for example for synthesized fields which should go into the Elasticsearch index but are not used elsewhere. + +[[elasticsearch.mapping.meta-model.annotations.date-formats]] +==== Date format mapping + +Properties that derive from `TemporalAccessor` or are of type `java.util.Date` must either have a `@Field` annotation of type `FieldType.Date` or a custom converter must be registered for this type. +This paragraph describes the use of +`FieldType.Date`. + +There are two attributes of the `@Field` annotation that define which date format information is written to the mapping (also see https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html#built-in-date-formats[Elasticsearch Built In Formats] and https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html#custom-date-formats[Elasticsearch Custom Date Formats]) + +The `format` attribute is used to define at least one of the predefined formats. +If it is not defined, then a default value of __date_optional_time_ and _epoch_millis_ is used. + +The `pattern` attribute can be used to add additional custom format strings. +If you want to use only custom date formats, you must set the `format` property to empty `{}`. + +The following table shows the different attributes and the mapping created from their values: + +[cols=2*,options=header] +|=== +| annotation +| format string in Elasticsearch mapping + +| @Field(type=FieldType.Date) +| "date_optional_time\|\|epoch_millis", + +| @Field(type=FieldType.Date, format=DateFormat.basic_date) +| "basic_date" + +| @Field(type=FieldType.Date, format={DateFormat.basic_date, DateFormat.basic_time}) +| "basic_date\|\|basic_time" + +| @Field(type=FieldType.Date, pattern="dd.MM.uuuu") +| "date_optional_time\|\|epoch_millis\|\|dd.MM.uuuu", + +| @Field(type=FieldType.Date, format={}, pattern="dd.MM.uuuu") +| "dd.MM.uuuu" + +|=== + +NOTE: If you are using a custom date format, you need to use _uuuu_ for the year instead of _yyyy_. +This is due to a https://www.elastic.co/guide/en/elasticsearch/reference/current/migrate-to-java-time.html#java-time-migration-incompatible-date-formats[change in Elasticsearch 7]. + +Check the code of the `org.springframework.data.elasticsearch.annotations.DateFormat` enum for a complete list of predefined values and their patterns. + +[[elasticsearch.mapping.meta-model.annotations.range]] +==== Range types + +When a field is annotated with a type of one of _Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range,_ or _Ip_Range_ the field must be an instance of a class that will be mapped to an Elasticsearch range, for example: + +==== +[source,java] +---- +class SomePersonData { + + @Field(type = FieldType.Integer_Range) + private ValidAge validAge; + + // getter and setter +} + +class ValidAge { + @Field(name="gte") + private Integer from; + + @Field(name="lte") + private Integer to; + + // getter and setter +} +---- +==== + +As an alternative Spring Data Elasticsearch provides a `Range` class so that the previous example can be written as: + +==== +[source,java] +---- +class SomePersonData { + + @Field(type = FieldType.Integer_Range) + private Range validAge; + + // getter and setter +} +---- +==== + +Supported classes for the type `` are `Integer`, `Long`, `Float`, `Double`, `Date` and classes that implement the +`TemporalAccessor` interface. + +[[elasticsearch.mapping.meta-model.annotations.mapped-names]] +==== Mapped field names + +Without further configuration, Spring Data Elasticsearch will use the property name of an object as field name in Elasticsearch. +This can be changed for individual field by using the `@Field` annotation on that property. + +It is also possible to define a `FieldNamingStrategy` in the configuration of the client (xref:elasticsearch/clients.adoc[Elasticsearch Clients]). +If for example a `SnakeCaseFieldNamingStrategy` is configured, the property _sampleProperty_ of the object would be mapped to _sample_property_ in Elasticsearch. +A `FieldNamingStrategy` applies to all entities; it can be overwritten by setting a specific name with `@Field` on a property. + +[[elasticsearch.mapping.meta-model.annotations.non-field-backed-properties]] +==== Non-field-backed properties + +Normally the properties used in an entity are fields of the entity class. +There might be cases, when a property value is calculated in the entity and should be stored in Elasticsearch. +In this case, the getter method (`getProperty()`) can be annotated with the `@Field` annotation, in addition to that the method must be annotated with `@AccessType(AccessType.Type +.PROPERTY)`. +The third annotation that is needed in such a case is `@WriteOnlyProperty`, as such a value is only written to Elasticsearch. +A full example: + +==== +[source,java] +---- +@Field(type = Keyword) +@WriteOnlyProperty +@AccessType(AccessType.Type.PROPERTY) +public String getProperty() { + return "some value that is calculated here"; +} +---- +==== + +[[elasticsearch.mapping.meta-model.annotations.misc]] +==== Other property annotations + +[[indexedindexname]] +===== @IndexedIndexName + +This annotation can be set on a String property of an entity. +This property will not be written to the mapping, it will not be stored in Elasticsearch and its value will not be read from an Elasticsearch document. +After an entity is persisted, for example with a call to `ElasticsearchOperations.save(T entity)`, the entity returned from that call will contain the name of the index that an entity was saved to in that property. +This is useful when the index name is dynamically set by a bean, or when writing to a write alias. + +Putting some value into such a property does not set the index into which an entity is stored! + +[[elasticsearch.mapping.meta-model.rules]] +=== Mapping Rules + +[[elasticsearch.mapping.meta-model.rules.typehints]] +==== Type Hints + +Mapping uses _type hints_ embedded in the document sent to the server to allow generic type mapping. +Those type hints are represented as `_class` attributes within the document and are written for each aggregate root. + +.Type Hints +==== +[source,java] +---- +public class Person { <1> + @Id String id; + String firstname; + String lastname; +} +---- + +[source,json] +---- +{ + "_class" : "com.example.Person", <1> + "id" : "cb7bef", + "firstname" : "Sarah", + "lastname" : "Connor" +} +---- + +<1> By default the domain types class name is used for the type hint. +==== + +Type hints can be configured to hold custom information. +Use the `@TypeAlias` annotation to do so. + +NOTE: Make sure to add types with `@TypeAlias` to the initial entity set (`AbstractElasticsearchConfiguration#getInitialEntitySet`) to already have entity information available when first reading data from the store. + +.Type Hints with Alias +==== +[source,java] +---- +@TypeAlias("human") <1> +public class Person { + + @Id String id; + // ... +} +---- + +[source,json] +---- +{ + "_class" : "human", <1> + "id" : ... +} +---- + +<1> The configured alias is used when writing the entity. +==== + +NOTE: Type hints will not be written for nested Objects unless the properties type is `Object`, an interface or the actual value type does not match the properties declaration. + +[[disabling-type-hints]] +===== Disabling Type Hints + +It may be necessary to disable writing of type hints when the index that should be used already exists without having the type hints defined in its mapping and with the mapping mode set to strict. +In this case, writing the type hint will produce an error, as the field cannot be added automatically. + +Type hints can be disabled for the whole application by overriding the method `writeTypeHints()` in a configuration class derived from `AbstractElasticsearchConfiguration` (see xref:elasticsearch/clients.adoc[Elasticsearch Clients]). + +As an alternative they can be disabled for a single index with the `@Document` annotation: + +==== +[source,java] +---- +@Document(indexName = "index", writeTypeHint = WriteTypeHint.FALSE) +---- +==== + +WARNING: We strongly advise against disabling Type Hints. +Only do this if you are forced to. +Disabling type hints can lead to documents not being retrieved correctly from Elasticsearch in case of polymorphic data or document retrieval may fail completely. + +[[elasticsearch.mapping.meta-model.rules.geospatial]] +==== Geospatial Types + +Geospatial types like `Point` & `GeoPoint` are converted into _lat/lon_ pairs. + +.Geospatial types +==== +[source,java] +---- +public class Address { + String city, street; + Point location; +} +---- + +[source,json] +---- +{ + "city" : "Los Angeles", + "street" : "2800 East Observatory Road", + "location" : { "lat" : 34.118347, "lon" : -118.3026284 } +} +---- +==== + +[[elasticsearch.mapping.meta-model.rules.geojson]] +==== GeoJson Types + +Spring Data Elasticsearch supports the GeoJson types by providing an interface `GeoJson` and implementations for the different geometries. +They are mapped to Elasticsearch documents according to the GeoJson specification. +The corresponding properties of the entity are specified in the index mappings as `geo_shape` when the index mappings is written. (check the https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-shape.html[Elasticsearch documentation] as well) + +.GeoJson types +==== +[source,java] +---- +public class Address { + + String city, street; + GeoJsonPoint location; +} +---- + +[source,json] +---- +{ + "city": "Los Angeles", + "street": "2800 East Observatory Road", + "location": { + "type": "Point", + "coordinates": [-118.3026284, 34.118347] + } +} +---- +==== + +The following GeoJson types are implemented: + +* `GeoJsonPoint` +* `GeoJsonMultiPoint` +* `GeoJsonLineString` +* `GeoJsonMultiLineString` +* `GeoJsonPolygon` +* `GeoJsonMultiPolygon` +* `GeoJsonGeometryCollection` + +[[elasticsearch.mapping.meta-model.rules.collections]] +==== Collections + +For values inside Collections apply the same mapping rules as for aggregate roots when it comes to _type hints_ and xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.conversions[Custom Conversions]. + +.Collections +==== +[source,java] +---- +public class Person { + + // ... + + List friends; + +} +---- + +[source,json] +---- +{ + // ... + + "friends" : [ { "firstname" : "Kyle", "lastname" : "Reese" } ] +} +---- +==== + +[[elasticsearch.mapping.meta-model.rules.maps]] +==== Maps + +For values inside Maps apply the same mapping rules as for aggregate roots when it comes to _type hints_ and xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.conversions[Custom Conversions]. +However the Map key needs to a String to be processed by Elasticsearch. + +.Collections +==== +[source,java] +---- +public class Person { + + // ... + + Map knownLocations; + +} +---- + +[source,json] +---- +{ + // ... + + "knownLocations" : { + "arrivedAt" : { + "city" : "Los Angeles", + "street" : "2800 East Observatory Road", + "location" : { "lat" : 34.118347, "lon" : -118.3026284 } + } + } +} +---- +==== + +[[elasticsearch.mapping.meta-model.conversions]] +=== Custom Conversions + +Looking at the `Configuration` from the xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[previous section] `ElasticsearchCustomConversions` allows registering specific rules for mapping domain and simple types. + +.Meta Model Object Mapping Configuration +==== +[source,java] +---- +@Configuration +public class Config extends ElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() // + .connectedTo("localhost:9200") // + .build(); + } + + @Bean + @Override + public ElasticsearchCustomConversions elasticsearchCustomConversions() { + return new ElasticsearchCustomConversions( + Arrays.asList(new AddressToMap(), new MapToAddress())); <1> + } + + @WritingConverter <2> + static class AddressToMap implements Converter> { + + @Override + public Map convert(Address source) { + + LinkedHashMap target = new LinkedHashMap<>(); + target.put("ciudad", source.getCity()); + // ... + + return target; + } + } + + @ReadingConverter <3> + static class MapToAddress implements Converter, Address> { + + @Override + public Address convert(Map source) { + + // ... + return address; + } + } +} +---- + +[source,json] +---- +{ + "ciudad" : "Los Angeles", + "calle" : "2800 East Observatory Road", + "localidad" : { "lat" : 34.118347, "lon" : -118.3026284 } +} +---- + +<1> Add `Converter` implementations. +<2> Set up the `Converter` used for writing `DomainType` to Elasticsearch. +<3> Set up the `Converter` used for reading `DomainType` from search result. +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/reactive-template.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/reactive-template.adoc new file mode 100644 index 0000000000..8be04ff584 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/reactive-template.adoc @@ -0,0 +1,65 @@ +[[elasticsearch.reactive.operations]] += Reactive Elasticsearch Operations + +`ReactiveElasticsearchOperations` is the gateway to executing high level commands against an Elasticsearch cluster using the `ReactiveElasticsearchClient`. + +The `ReactiveElasticsearchTemplate` is the default implementation of `ReactiveElasticsearchOperations`. + +To get started the `ReactiveElasticsearchOperations` needs to know about the actual client to work with. +Please see xref:elasticsearch/clients.adoc#elasticsearch.clients.reactiverestclient[Reactive Rest Client] for details on the client and how to configure it. + +[[elasticsearch.reactive.operations.usage]] +== Reactive Operations Usage + +`ReactiveElasticsearchOperations` lets you save, find and delete your domain objects and map those objects to documents stored in Elasticsearch. + +Consider the following: + +.Use the ReactiveElasticsearchOperations +==== +[source,java] +---- +@Document(indexName = "marvel") +public class Person { + + private @Id String id; + private String name; + private int age; + // Getter/Setter omitted... +} +---- + +[source,java] +---- + +ReactiveElasticsearchOperations operations; + +// ... + +operations.save(new Person("Bruce Banner", 42)) <.> + .doOnNext(System.out::println) + .flatMap(person -> operations.get(person.id, Person.class)) <.> + .doOnNext(System.out::println) + .flatMap(person -> operations.delete(person)) <.> + .doOnNext(System.out::println) + .flatMap(id -> operations.count(Person.class)) <.> + .doOnNext(System.out::println) + .subscribe(); <.> +---- + +The above outputs the following sequence on the console. + +[source,text] +---- +> Person(id=QjWCWWcBXiLAnp77ksfR, name=Bruce Banner, age=42) +> Person(id=QjWCWWcBXiLAnp77ksfR, name=Bruce Banner, age=42) +> QjWCWWcBXiLAnp77ksfR +> 0 +---- + +<.> Insert a new `Person` document into the _marvel_ index . The `id` is generated on server side and set into the instance returned. +<.> Lookup the `Person` with matching `id` in the _marvel_ index. +<.> Delete the `Person` with matching `id`, extracted from the given instance, in the _marvel_ index. +<.> Count the total number of documents in the _marvel_ index. +<.> Don't forget to _subscribe()_. +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/cdi-integration.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/cdi-integration.adoc new file mode 100644 index 0000000000..003c394dda --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/cdi-integration.adoc @@ -0,0 +1,35 @@ +[[elasticsearch.cdi]] += CDI Integration + +The Spring Data Elasticsearch repositories can also be set up using CDI functionality. + +.Spring Data Elasticsearch repositories using CDI +==== +[source,java] +---- +class ElasticsearchTemplateProducer { + + @Produces + @ApplicationScoped + public ElasticsearchOperations createElasticsearchTemplate() { + // ... <1> + } +} + +class ProductService { + + private ProductRepository repository; <2> + public Page findAvailableBookByName(String name, Pageable pageable) { + return repository.findByAvailableTrueAndNameStartingWith(name, pageable); + } + @Inject + public void setRepository(ProductRepository repository) { + this.repository = repository; + } +} +---- + +<1> Create a component by using the same calls as are used in the xref:elasticsearch/template.adoc[Elasticsearch Operations] chapter. +<2> Let the CDI framework inject the Repository into your class. + +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc new file mode 100644 index 0000000000..1d08868c10 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc @@ -0,0 +1,197 @@ +[[elasticsearch.repositories]] += Elasticsearch Repositories + +This chapter includes details of the Elasticsearch repository implementation. + +.The sample `Book` entity +==== +[source,java] +---- +@Document(indexName="books") +class Book { + @Id + private String id; + + @Field(type = FieldType.Text) + private String name; + + @Field(type = FieldType.Text) + private String summary; + + @Field(type = FieldType.Integer) + private Integer price; + + // getter/setter ... +} +---- +==== + +[[elasticsearch.repositories.autocreation]] +== Automatic creation of indices with the corresponding mapping + +The `@Document` annotation has an argument `createIndex`. +If this argument is set to true - which is the default value - Spring Data Elasticsearch will during bootstrapping the repository support on application startup check if the index defined by the `@Document` annotation exists. + +If it does not exist, the index will be created and the mappings derived from the entity's annotations (see xref:elasticsearch/object-mapping.adoc[Elasticsearch Object Mapping]) will be written to the newly created index. +Details of the index that will be created can be set by using the `@Setting` annotation, refer to xref:elasticsearch/misc.adoc#elasticsearc.misc.index.settings[Index settings] for further information. + + + +[[elasticsearch.repositories.annotations]] +== Annotations for repository methods + +[[elasticsearch.repositories.annotations.highlight]] +=== @Highlight + +The `@Highlight` annotation on a repository method defines for which fields of the returned entity highlighting should be included.To search for some text in a `Book` 's name or summary and have the found data highlighted, the following repository method can be used: + +==== +[source,java] +---- +interface BookRepository extends Repository { + + @Highlight(fields = { + @HighlightField(name = "name"), + @HighlightField(name = "summary") + }) + SearchHits findByNameOrSummary(String text, String summary); +} +---- +==== + +It is possible to define multiple fields to be highlighted like above, and both the `@Highlight` and the `@HighlightField` annotation can further be customized with a `@HighlightParameters` annotation. Check the Javadocs for the possible configuration options. + +In the search results the highlight data can be retrieved from the `SearchHit` class. + +[[elasticsearch.repositories.annotations.sourcefilters]] +=== @SourceFilters + +Sometimes the user does not need to have all the properties of an entity returned from a search but only a subset. +Elasticsearch provides source filtering to reduce the amount of data that is transferred across the network to the +application. + +When working with `Query` implementations and the `ElasticsearchOperations` this is easily possible by setting a +source filter on the query. + +When using repository methods there is the `@SourceFilters` annotation: + +==== +[source,java] +---- +interface BookRepository extends Repository { + + @SourceFilters(includes = "name") + SearchHits findByName(String text); +} +---- +==== + +In this example, all the properties of the returned `Book` objects would be `null` except the name. + +[[elasticsearch.annotation]] +== Annotation based configuration + +The Spring Data Elasticsearch repositories support can be activated using an annotation through JavaConfig. + +.Spring Data Elasticsearch repositories using JavaConfig +==== +[source,java] +---- +@Configuration +@EnableElasticsearchRepositories( <1> + basePackages = "org.springframework.data.elasticsearch.repositories" + ) +static class Config { + + @Bean + public ElasticsearchOperations elasticsearchTemplate() { <2> + // ... + } +} + +class ProductService { + + private ProductRepository repository; <3> + + public ProductService(ProductRepository repository) { + this.repository = repository; + } + + public Page findAvailableBookByName(String name, Pageable pageable) { + return repository.findByAvailableTrueAndNameStartingWith(name, pageable); + } +} +---- + +<1> The `EnableElasticsearchRepositories` annotation activates the Repository support. +If no base package is configured, it will use the one of the configuration class it is put on. +<2> Provide a Bean named `elasticsearchTemplate` of type `ElasticsearchOperations` by using one of the configurations shown in the xref:elasticsearch/template.adoc[Elasticsearch Operations] chapter. +<3> Let Spring inject the Repository bean into your class. +==== + + +[[elasticsearch.namespace]] +== Spring Namespace + +The Spring Data Elasticsearch module contains a custom namespace allowing definition of repository beans as well as elements for instantiating a `ElasticsearchServer` . + +Using the `repositories` element looks up Spring Data repositories as described in xref:repositories/create-instances.adoc[]. + +.Setting up Elasticsearch repositories using Namespace +==== +[source,xml] +---- + + + + + + +---- +==== + +Using the `Transport Client` or `Rest Client` element registers an instance of `Elasticsearch Server` in the context. + +.Transport Client using Namespace +==== +[source,xml] +---- + + + + + + +---- +==== + +.Rest Client using Namespace +==== +[source,xml] +---- + + + + + + +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repository-queries.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repository-queries.adoc new file mode 100644 index 0000000000..b22e17522d --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repository-queries.adoc @@ -0,0 +1,592 @@ +[[elasticsearch.query-methods]] += Query methods + +[[elasticsearch.query-methods.finders]] +== Query lookup strategies + +The Elasticsearch module supports all basic query building feature as string queries, native search queries, criteria based queries or have it being derived from the method name. + +[[elasticsearch.query-methods.finders.declared]] +=== Declared queries + +Deriving the query from the method name is not always sufficient and/or may result in unreadable method names. +In this case one might make use of the `@Query` annotation (see xref:elasticsearch/repositories/elasticsearch-repository-queries.adoc#elasticsearch.query-methods.at-query[Using the @Query Annotation] ). + +Another possibility is the use of a search-template, (see xref:elasticsearch/repositories/elasticsearch-repository-queries.adoc#elasticsearch.query-methods.at-searchtemplate-query[Using the @SearchTemplateQuery Annotation] ). + +[[elasticsearch.query-methods.criterions]] +== Query creation + +Generally the query creation mechanism for Elasticsearch works as described in xref:repositories/query-methods-details.adoc[]. +Here's a short example of what a Elasticsearch query method translates into: + +.Query creation from method names +==== +[source,java] +---- +interface BookRepository extends Repository { + List findByNameAndPrice(String name, Integer price); +} +---- +==== + +The method name above will be translated into the following Elasticsearch json query + +[source] +---- +{ + "query": { + "bool" : { + "must" : [ + { "query_string" : { "query" : "?", "fields" : [ "name" ] } }, + { "query_string" : { "query" : "?", "fields" : [ "price" ] } } + ] + } + } +} +---- + +A list of supported keywords for Elasticsearch is shown below. + +[cols="1,2,3",options="header"] +.Supported keywords inside method names +|=== +| Keyword +| Sample +| Elasticsearch Query String + +| `And` +| `findByNameAndPrice` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "?", "fields" : [ "name" ] } }, +{ "query_string" : { "query" : "?", "fields" : [ "price" ] } } +] +} +}}` + +| `Or` +| `findByNameOrPrice` +| `{ "query" : { +"bool" : { +"should" : [ +{ "query_string" : { "query" : "?", "fields" : [ "name" ] } }, +{ "query_string" : { "query" : "?", "fields" : [ "price" ] } } +] +} +}}` + +| `Is` +| `findByName` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "?", "fields" : [ "name" ] } } +] +} +}}` + +| `Not` +| `findByNameNot` +| `{ "query" : { +"bool" : { +"must_not" : [ +{ "query_string" : { "query" : "?", "fields" : [ "name" ] } } +] +} +}}` + +| `Between` +| `findByPriceBetween` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : ?, "to" : ?, "include_lower" : true, "include_upper" : true } } } +] +} +}}` + +| `LessThan` +| `findByPriceLessThan` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : null, "to" : ?, "include_lower" : true, "include_upper" : false } } } +] +} +}}` + +| `LessThanEqual` +| `findByPriceLessThanEqual` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : null, "to" : ?, "include_lower" : true, "include_upper" : true } } } +] +} +}}` + +| `GreaterThan` +| `findByPriceGreaterThan` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : ?, "to" : null, "include_lower" : false, "include_upper" : true } } } +] +} +}}` + + +| `GreaterThanEqual` +| `findByPriceGreaterThanEqual` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : ?, "to" : null, "include_lower" : true, "include_upper" : true } } } +] +} +}}` + +| `Before` +| `findByPriceBefore` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : null, "to" : ?, "include_lower" : true, "include_upper" : true } } } +] +} +}}` + +| `After` +| `findByPriceAfter` +| `{ "query" : { +"bool" : { +"must" : [ +{"range" : {"price" : {"from" : ?, "to" : null, "include_lower" : true, "include_upper" : true } } } +] +} +}}` + +| `Like` +| `findByNameLike` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "?*", "fields" : [ "name" ] }, "analyze_wildcard": true } +] +} +}}` + +| `StartingWith` +| `findByNameStartingWith` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "?*", "fields" : [ "name" ] }, "analyze_wildcard": true } +] +} +}}` + +| `EndingWith` +| `findByNameEndingWith` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "*?", "fields" : [ "name" ] }, "analyze_wildcard": true } +] +} +}}` + +| `Contains/Containing` +| `findByNameContaining` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "\*?*", "fields" : [ "name" ] }, "analyze_wildcard": true } +] +} +}}` + +| `In` (when annotated as FieldType.Keyword) +| `findByNameIn(Collectionnames)` +| `{ "query" : { +"bool" : { +"must" : [ +{"bool" : {"must" : [ +{"terms" : {"name" : ["?","?"]}} +] +} +} +] +} +}}` + + +| `In` +| `findByNameIn(Collectionnames)` +| `{ "query": {"bool": {"must": [{"query_string":{"query": "\"?\" \"?\"", "fields": ["name"]}}]}}}` + +| `NotIn` (when annotated as FieldType.Keyword) +| `findByNameNotIn(Collectionnames)` +| `{ "query" : { +"bool" : { +"must" : [ +{"bool" : {"must_not" : [ +{"terms" : {"name" : ["?","?"]}} +] +} +} +] +} +}}` + +| `NotIn` +| `findByNameNotIn(Collectionnames)` +| `{"query": {"bool": {"must": [{"query_string": {"query": "NOT(\"?\" \"?\")", "fields": ["name"]}}]}}}` + +| `True` +| `findByAvailableTrue` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "true", "fields" : [ "available" ] } } +] +} +}}` + +| `False` +| `findByAvailableFalse` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "false", "fields" : [ "available" ] } } +] +} +}}` + +| `OrderBy` +| `findByAvailableTrueOrderByNameDesc` +| `{ "query" : { +"bool" : { +"must" : [ +{ "query_string" : { "query" : "true", "fields" : [ "available" ] } } +] +} +}, "sort":[{"name":{"order":"desc"}}] +}` + +| `Exists` +| `findByNameExists` +| `{"query":{"bool":{"must":[{"exists":{"field":"name"}}]}}}` + +| `IsNull` +| `findByNameIsNull` +| `{"query":{"bool":{"must_not":[{"exists":{"field":"name"}}]}}}` + +| `IsNotNull` +| `findByNameIsNotNull` +| `{"query":{"bool":{"must":[{"exists":{"field":"name"}}]}}}` + +| `IsEmpty` +| `findByNameIsEmpty` +| `{"query":{"bool":{"must":[{"bool":{"must":[{"exists":{"field":"name"}}],"must_not":[{"wildcard":{"name":{"wildcard":"*"}}}]}}]}}}` + +| `IsNotEmpty` +| `findByNameIsNotEmpty` +| `{"query":{"bool":{"must":[{"wildcard":{"name":{"wildcard":"*"}}}]}}}` + +|=== + +NOTE: Methods names to build Geo-shape queries taking `GeoJson` parameters are not supported. +Use `ElasticsearchOperations` with `CriteriaQuery` in a custom repository implementation if you need to have such a function in a repository. + +[[elasticsearch.query-methods.return-types]] +== Method return types + +Repository methods can be defined to have the following return types for returning multiple Elements: + +* `List` +* `Stream` +* `SearchHits` +* `List>` +* `Stream>` +* `SearchPage` + +[[elasticsearch.query-methods.at-query]] +== Using the @Query Annotation + +.Declare query on the method using the `@Query` annotation. +==== +The arguments passed to the method can be inserted into placeholders in the query string. +The placeholders are of the form `?0`, `?1`, `?2` etc. for the first, second, third parameter and so on. + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query("{\"match\": {\"name\": {\"query\": \"?0\"}}}") + Page findByName(String name,Pageable pageable); +} +---- + +The String that is set as the annotation argument must be a valid Elasticsearch JSON query. +It will be sent to Easticsearch as value of the query element; if for example the function is called with the parameter _John_, it would produce the following query body: + +[source,json] +---- +{ + "query": { + "match": { + "name": { + "query": "John" + } + } + } +} +---- +==== + +.`@Query` annotation on a method taking a Collection argument +==== +A repository method such as + +[source,java] +---- +@Query("{\"ids\": {\"values\": ?0 }}") +List getByIds(Collection ids); +---- + +would make an https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-ids-query.html[IDs query] to return all the matching documents. +So calling the method with a `List` of `["id1", "id2", "id3"]` would produce the query body + +[source,json] +---- +{ + "query": { + "ids": { + "values": ["id1", "id2", "id3"] + } + } +} +---- +==== + +[[elasticsearch.query-methods.at-query.spel]] +=== Using SpEL Expressions + +.Declare query on the method using the `@Query` annotation with SpEL expression. +==== +https://docs.spring.io/spring-framework/reference/core/expressions.html[SpEL expression] is also supported when defining query in `@Query`. + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query(""" + { + "bool":{ + "must":[ + { + "term":{ + "name": "#{#name}" + } + } + ] + } + } + """) + Page findByName(String name, Pageable pageable); +} +---- + +If for example the function is called with the parameter _John_, it would produce the following query body: + +[source,json] +---- +{ + "bool":{ + "must":[ + { + "term":{ + "name": "John" + } + } + ] + } +} +---- +==== + +.accessing parameter property. +==== +Supposing that we have the following class as query parameter type: + +[source,java] +---- +public record QueryParameter(String value) { +} +---- + +It's easy to access the parameter by `#` symbol, then reference the property `value` with a simple `.`: + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query(""" + { + "bool":{ + "must":[ + { + "term":{ + "name": "#{#parameter.value}" + } + } + ] + } + } + """) + Page findByName(QueryParameter parameter, Pageable pageable); +} +---- + +We can pass `new QueryParameter("John")` as the parameter now, and it will produce the same query string as above. +==== + +.accessing bean property. +==== +https://docs.spring.io/spring-framework/reference/core/expressions/language-ref/bean-references.html[Bean property] is also supported to access. +Given that there is a bean named `queryParameter` of type `QueryParameter`, we can access the bean with symbol `@` rather than `#`, and there is no need to declare a parameter of type `QueryParameter` in the query method: + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query(""" + { + "bool":{ + "must":[ + { + "term":{ + "name": "#{@queryParameter.value}" + } + } + ] + } + } + """) + Page findByName(Pageable pageable); +} +---- +==== + +.SpEL and `Collection` param. +==== +`Collection` parameter is also supported and is as easy to use as normal `String`, such as the following `terms` query: + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query(""" + { + "bool":{ + "must":[ + { + "terms":{ + "name": #{#names} + } + } + ] + } + } + """) + Page findByName(Collection names, Pageable pageable); +} +---- + +NOTE: collection values should not be quoted when declaring the elasticsearch json query. + +A collection of `names` like `List.of("name1", "name2")` will produce the following terms query: + +[source,json] +---- +{ + "bool":{ + "must":[ + { + "terms":{ + "name": ["name1", "name2"] + } + } + ] + } +} +---- +==== + +.access property in the `Collection` param. +==== +https://docs.spring.io/spring-framework/reference/core/expressions/language-ref/collection-projection.html[SpEL Collection Projection] is convenient to use when values in the `Collection` parameter is not plain `String`: + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query(""" + { + "bool":{ + "must":[ + { + "terms":{ + "name": #{#parameters.![value]} + } + } + ] + } + } + """) + Page findByName(Collection parameters, Pageable pageable); +} +---- + +This will extract all the `value` property values as a new `Collection` from `QueryParameter` collection, thus takes the same effect as above. +==== + +.alter parameter name by using `@Param` +==== +When accessing the parameter by SpEL, it's also useful to alter the parameter name to another one by `@Param` annotation in Sping Data: + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @Query(""" + { + "bool":{ + "must":[ + { + "terms":{ + "name": #{#another.![value]} + } + } + ] + } + } + """) + Page findByName(@Param("another") Collection parameters, Pageable pageable); +} +---- + +==== + +[[elasticsearch.query-methods.at-searchtemplate-query]] +== Using the @SearchTemplateQuery Annotation + +When using Elasticsearch search templates - (see xref:elasticsearch/misc.adoc#elasticsearch.misc.searchtemplates [Search Template support]) it is possible to specify that a repository method should use a template by adding the `@SearchTemplateQuery` annotation to that method. + +Let's assume that there is a search template stored with the name "book-by-title" and this template need a parameter named "title", then a repository method using that search template can be defined like this: + +[source,java] +---- +interface BookRepository extends ElasticsearchRepository { + @SearchTemplateQuery(id = "book-by-title") + SearchHits findByTitle(String title); +} +---- + +The parameters of the repository method are sent to the seacrh template as key/value pairs where the key is the parameter name and the value is taken from the actual value when the method is invoked. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/reactive-elasticsearch-repositories.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/reactive-elasticsearch-repositories.adoc new file mode 100644 index 0000000000..beb39df0e6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/reactive-elasticsearch-repositories.adoc @@ -0,0 +1,128 @@ +[[elasticsearch.reactive.repositories]] += Reactive Elasticsearch Repositories + +Reactive Elasticsearch repository support builds on the core repository support explained in xref:repositories.adoc[] utilizing operations provided via xref:elasticsearch/reactive-template.adoc[] executed by a xref:elasticsearch/clients.adoc#elasticsearch.clients.reactiverestclient[Reactive REST Client]. + +Spring Data Elasticsearch reactive repository support uses https://projectreactor.io/[Project Reactor] as its reactive composition library of choice. + +There are 3 main interfaces to be used: + +* `ReactiveRepository` +* `ReactiveCrudRepository` +* `ReactiveSortingRepository` + +[[elasticsearch.reactive.repositories.usage]] +== Usage + +To access domain objects stored in a Elasticsearch using a `Repository`, just create an interface for it. +Before you can actually go on and do that you will need an entity. + +.Sample `Person` entity +==== +[source,java] +---- +public class Person { + + @Id + private String id; + private String firstname; + private String lastname; + private Address address; + + // … getters and setters omitted +} +---- +==== + +NOTE: Please note that the `id` property needs to be of type `String`. + +.Basic repository interface to persist Person entities +==== +[source] +---- +interface ReactivePersonRepository extends ReactiveSortingRepository { + + Flux findByFirstname(String firstname); <1> + + Flux findByFirstname(Publisher firstname); <2> + + Flux findByFirstnameOrderByLastname(String firstname); <3> + + Flux findByFirstname(String firstname, Sort sort); <4> + + Flux findByFirstname(String firstname, Pageable page); <5> + + Mono findByFirstnameAndLastname(String firstname, String lastname); <6> + + Mono findFirstByLastname(String lastname); <7> + + @Query("{ \"bool\" : { \"must\" : { \"term\" : { \"lastname\" : \"?0\" } } } }") + Flux findByLastname(String lastname); <8> + + Mono countByFirstname(String firstname) <9> + + Mono existsByFirstname(String firstname) <10> + + Mono deleteByFirstname(String firstname) <11> +} +---- +<1> The method shows a query for all people with the given `lastname`. +<2> Finder method awaiting input from `Publisher` to bind parameter value for `firstname`. +<3> Finder method ordering matching documents by `lastname`. +<4> Finder method ordering matching documents by the expression defined via the `Sort` parameter. +<5> Use `Pageable` to pass offset and sorting parameters to the database. +<6> Finder method concating criteria using `And` / `Or` keywords. +<7> Find the first matching entity. +<8> The method shows a query for all people with the given `lastname` looked up by running the annotated `@Query` with given +parameters. +<9> Count all entities with matching `firstname`. +<10> Check if at least one entity with matching `firstname` exists. +<11> Delete all entities with matching `firstname`. +==== + +[[elasticsearch.reactive.repositories.configuration]] +== Configuration + +For Java configuration, use the `@EnableReactiveElasticsearchRepositories` annotation. If no base package is configured, +the infrastructure scans the package of the annotated configuration class. + +The following listing shows how to use Java configuration for a repository: + +.Java configuration for repositories +==== +[source,java] +---- +@Configuration +@EnableReactiveElasticsearchRepositories +public class Config extends AbstractReactiveElasticsearchConfiguration { + + @Override + public ReactiveElasticsearchClient reactiveElasticsearchClient() { + return ReactiveRestClients.create(ClientConfiguration.localhost()); + } +} +---- +==== + +Because the repository from the previous example extends `ReactiveSortingRepository`, all CRUD operations are available +as well as methods for sorted access to the entities. Working with the repository instance is a matter of dependency +injecting it into a client, as the following example shows: + +.Sorted access to Person entities +==== +[source,java] +---- +public class PersonRepositoryTests { + + @Autowired ReactivePersonRepository repository; + + @Test + public void sortsElementsCorrectly() { + + Flux persons = repository.findAll(Sort.by(new Order(ASC, "lastname"))); + + // ... + } +} +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/routing.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/routing.adoc new file mode 100644 index 0000000000..6b2b977a0d --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/routing.adoc @@ -0,0 +1,106 @@ +[[elasticsearch.routing]] += Routing values + +When Elasticsearch stores a document in an index that has multiple shards, it determines the shard to you use based on the _id_ of the document. +Sometimes it is necessary to predefine that multiple documents should be indexed on the same shard (join-types, faster search for related data). +For this Elasticsearch offers the possibility to define a routing, which is the value that should be used to calculate the shard from instead of the _id_. + +Spring Data Elasticsearch supports routing definitions on storing and retrieving data in the following ways: + +[[elasticsearch.routing.join-types]] +== Routing on join-types + +When using join-types (see xref:elasticsearch/join-types.adoc[Join-Type implementation]), Spring Data Elasticsearch will automatically use the `parent` property of the entity's `JoinField` property as the value for the routing. + +This is correct for all the use-cases where the parent-child relationship has just one level. +If it is deeper, like a child-parent-grandparent relationship - like in the above example from _vote_ -> _answer_ -> _question_ - then the routing needs to explicitly specified by using the techniques described in the next section (the _vote_ needs the _question.id_ as routing value). + +[[elasticsearch.routing.custom]] +== Custom routing values + +To define a custom routing for an entity, Spring Data Elasticsearch provides a `@Routing` annotation (reusing the `Statement` class from above): + +==== +[source,java] +---- +@Document(indexName = "statements") +@Routing("routing") <.> +public class Statement { + @Id + private String id; + + @Field(type = FieldType.Text) + private String text; + + @JoinTypeRelations( + relations = + { + @JoinTypeRelation(parent = "question", children = {"answer", "comment"}), + @JoinTypeRelation(parent = "answer", children = "vote") + } + ) + private JoinField relation; + + @Nullable + @Field(type = FieldType.Keyword) + private String routing; <.> + + // getter/setter... +} +---- +<.> This defines _"routing"_ as routing specification +<.> a property with the name _routing_ +==== + +If the `routing` specification of the annotation is a plain string and not a SpEL expression, it is interpreted as the name of a property of the entity, in the example it's the _routing_ property. +The value of this property will then be used as the routing value for all requests that use the entity. + +It is also possible to us a SpEL expression in the `@Document` annotation like this: + +==== +[source,java] +---- +@Document(indexName = "statements") +@Routing("@myBean.getRouting(#entity)") +public class Statement{ + // all the needed stuff +} +---- +==== + +In this case the user needs to provide a bean with the name _myBean_ that has a method `String getRouting(Object)`. To reference the entity _"#entity"_ must be used in the SpEL expression, and the return value must be `null` or the routing value as a String. + +If plain property's names and SpEL expressions are not enough to customize the routing definitions, it is possible to define provide an implementation of the `RoutingResolver` interface. This can then be set on the `ElasticOperations` instance: + +==== +[source,java] +---- +RoutingResolver resolver = ...; + +ElasticsearchOperations customOperations= operations.withRouting(resolver); + +---- +==== + +The `withRouting()` functions return a copy of the original `ElasticsearchOperations` instance with the customized routing set. + + +When a routing has been defined on an entity when it is stored in Elasticsearch, the same value must be provided when doing a _get_ or _delete_ operation. For methods that do not use an entity - like `get(ID)` or `delete(ID)` - the `ElasticsearchOperations.withRouting(RoutingResolver)` method can be used like this: + +==== +[source,java] +---- +String id = "someId"; +String routing = "theRoutingValue"; + +// get an entity +Statement s = operations + .withRouting(RoutingResolver.just(routing)) <.> + .get(id, Statement.class); + +// delete an entity +operations.withRouting(RoutingResolver.just(routing)).delete(id); + +---- +<.> `RoutingResolver.just(s)` returns a resolver that will just return the given String. +==== diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/scripted-and-runtime-fields.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/scripted-and-runtime-fields.adoc new file mode 100644 index 0000000000..64d4a0c003 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/scripted-and-runtime-fields.adoc @@ -0,0 +1,228 @@ +[[elasticsearch.misc.scripted-and-runtime-fields]] += Scripted and runtime fields + +Spring Data Elasticsearch supports scripted fields and runtime fields. +Please refer to the Elasticsearch documentation about scripting (https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html) and runtime fields (https://www.elastic.co/guide/en/elasticsearch/reference/8.9/runtime.html) for detailed information about this. +In the context of Spring Data Elasticsearch you can use + +* scripted fields that are used to return fields that are calculated on the result documents and added to the returned document. +* runtime fields that are calculated on the stored documents and can be used in a query and/or be returned in the search result. + +The following code snippets will show what you can do (these show imperative code, but the reactive implementation works similar). + +[[the-person-entity]] +== The person entity + +The enity that is used in these examples is a `Person` entity. +This entity has a `birthDate` and an `age` property. +Whereas the birthdate is fix, the age depends on the time when a query is issued and needs to be calculated dynamically. + +==== +[source,java] +---- +import org.jspecify.annotations.Nullable; +import org.springframework.data.annotation.Id; +import org.springframework.data.elasticsearch.annotations.DateFormat; +import org.springframework.data.elasticsearch.annotations.Document; +import org.springframework.data.elasticsearch.annotations.Field; +import org.springframework.data.elasticsearch.annotations.ScriptedField; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; + +import static org.springframework.data.elasticsearch.annotations.FieldType.*; + +import java.lang.Integer; + +@Document(indexName = "persons") +public record Person( + @Id + @Nullable + String id, + @Field(type = Text) + String lastName, + @Field(type = Text) + String firstName, + @Field(type = Keyword) + String gender, + @Field(type = Date, format = DateFormat.basic_date) + LocalDate birthDate, + @Nullable + @ScriptedField Integer age <.> +) { + public Person(String id,String lastName, String firstName, String gender, String birthDate) { + this(id, <.> + lastName, + firstName, + LocalDate.parse(birthDate, DateTimeFormatter.ISO_LOCAL_DATE), + gender, + null); + } +} + +---- + +<.> the `age` property will be calculated and filled in search results. +<.> a convenience constructor to set up the test data. +==== + +Note that the `age` property is annotated with `@ScriptedField`. +This inhibits the writing of a corresponding entry in the index mapping and marks the property as a target to put a calculated field from a search response. + +[[the-repository-interface]] +== The repository interface + +The repository used in this example: + +==== +[source,java] +---- +public interface PersonRepository extends ElasticsearchRepository { + + SearchHits findAllBy(ScriptedField scriptedField); + + SearchHits findByGenderAndAgeLessThanEqual(String gender, Integer age, RuntimeField runtimeField); +} + +---- +==== + +[[the-service-class]] +== The service class + +The service class has a repository injected and an `ElasticsearchOperations` instance to show several ways of populating and using the `age` property. +We show the code split up in different pieces to put the explanations in + +==== +[source,java] +---- +import org.springframework.data.elasticsearch.core.ElasticsearchOperations; +import org.springframework.data.elasticsearch.core.SearchHits; +import org.springframework.data.elasticsearch.core.query.Criteria; +import org.springframework.data.elasticsearch.core.query.CriteriaQuery; +import org.springframework.data.elasticsearch.core.query.FetchSourceFilter; +import org.springframework.data.elasticsearch.core.query.RuntimeField; +import org.springframework.data.elasticsearch.core.query.ScriptData; +import org.springframework.data.elasticsearch.core.query.ScriptType; +import org.springframework.data.elasticsearch.core.query.ScriptedField; +import org.springframework.data.elasticsearch.core.query.StringQuery; +import org.springframework.stereotype.Service; + +import java.util.List; + +@Service +public class PersonService { + private final ElasticsearchOperations operations; + private final PersonRepository repository; + + public PersonService(ElasticsearchOperations operations, SaRPersonRepository repository) { + this.operations = operations; + this.repository = repository; + } + + public void save() { <.> + List persons = List.of( + new Person("1", "Smith", "Mary", "f", "1987-05-03"), + new Person("2", "Smith", "Joshua", "m", "1982-11-17"), + new Person("3", "Smith", "Joanna", "f", "2018-03-27"), + new Person("4", "Smith", "Alex", "m", "2020-08-01"), + new Person("5", "McNeill", "Fiona", "f", "1989-04-07"), + new Person("6", "McNeill", "Michael", "m", "1984-10-20"), + new Person("7", "McNeill", "Geraldine", "f", "2020-03-02"), + new Person("8", "McNeill", "Patrick", "m", "2022-07-04")); + + repository.saveAll(persons); + } +---- + +<.> a utility method to store some data in Elasticsearch. +==== + +[[scripted-fields]] +=== Scripted fields + +The next piece shows how to use a scripted field to calculate and return the age of the persons. +Scripted fields can only add something to the returned data, the age cannot be used in the query (see runtime fields for that). + +==== +[source,java] +---- + public SearchHits findAllWithAge() { + + var scriptedField = ScriptedField.of("age", <.> + ScriptData.of(b -> b + .withType(ScriptType.INLINE) + .withScript(""" + Instant currentDate = Instant.ofEpochMilli(new Date().getTime()); + Instant startDate = doc['birth-date'].value.toInstant(); + return (ChronoUnit.DAYS.between(startDate, currentDate) / 365); + """))); + + // version 1: use a direct query + var query = new StringQuery(""" + { "match_all": {} } + """); + query.addScriptedField(scriptedField); <.> + query.addSourceFilter(FetchSourceFilter.of(b -> b.withIncludes("*"))); <.> + + var result1 = operations.search(query, Person.class); <.> + + // version 2: use the repository + var result2 = repository.findAllBy(scriptedField); <.> + + return result1; + } +---- + +<.> define the `ScriptedField` that calculates the age of a person. +<.> when using a `Query`, add the scripted field to the query. +<.> when adding a scripted field to a `Query`, an additional source filter is needed to also retrieve the _normal_ fields from the document source. +<.> get the data where the `Person` entities now have the values set in their `age` property. +<.> when using the repository, all that needs to be done is adding the scripted field as method parameter. +==== + +[[runtime-fields]] +=== Runtime fields + +When using runtime fields, the calculated value can be used in the query itself. +In the following code this is used to run a query for a given gender and maximum age of persons: + +==== +[source,java] +---- + public SearchHits findWithGenderAndMaxAge(String gender, Integer maxAge) { + + var runtimeField = new RuntimeField("age", "long", """ <.> + Instant currentDate = Instant.ofEpochMilli(new Date().getTime()); + Instant startDate = doc['birthDate'].value.toInstant(); + emit (ChronoUnit.DAYS.between(startDate, currentDate) / 365); + """); + + // variant 1 : use a direct query + var query = CriteriaQuery.builder(Criteria + .where("gender").is(gender) + .and("age").lessThanEqual(maxAge)) + .withRuntimeFields(List.of(runtimeField)) <.> + .withFields("age") <.> + .withSourceFilter(FetchSourceFilter.of(b -> b.withIncludes("*"))) <.> + .build(); + + var result1 = operations.search(query, Person.class); <.> + + // variant 2: use the repository <.> + var result2 = repository.findByGenderAndAgeLessThanEqual(gender, maxAge, runtimeField); + + return result1; + } +} +---- + +<.> define the runtime field that calculates the age of a person. // see https://asciidoctor.org/docs/user-manual/#builtin-attributes for builtin attributes. +<.> when using `Query`, add the runtime field. +<.> when adding a scripted field to a `Query`, an additional field parameter is needed to have the calculated value returned. +<.> when adding a scripted field to a `Query`, an additional source filter is needed to also retrieve the _normal_ fields from the document source. +<.> get the data filtered with the query and where the returned entites have the age property set. +<.> when using the repository, all that needs to be done is adding the runtime field as method parameter. +==== + +In addition to define a runtime fields on a query, they can also be defined in the index by setting the `runtimeFieldsPath` property of the `@Mapping` annotation to point to a JSON file that contains the runtime field definitions. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/template.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/template.adoc new file mode 100644 index 0000000000..d5cbec5d09 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/template.adoc @@ -0,0 +1,241 @@ +[[elasticsearch.operations]] += Elasticsearch Operations + +Spring Data Elasticsearch uses several interfaces to define the operations that can be called against an Elasticsearch index (for a description of the reactive interfaces see xref:elasticsearch/reactive-template.adoc[]). + +* javadoc:org.springframework.data.elasticsearch.core.IndexOperations[] defines actions on index level like creating or deleting an index. +* javadoc:org.springframework.data.elasticsearch.core.DocumentOperations[] defines actions to store, update and retrieve entities based on their id. +* javadoc:org.springframework.data.elasticsearch.core.SearchOperations[] define the actions to search for multiple entities using queries +* javadoc:org.springframework.data.elasticsearch.core.ElasticsearchOperations[] combines the `DocumentOperations` and `SearchOperations` interfaces. + +These interfaces correspond to the structuring of the https://www.elastic.co/guide/en/elasticsearch/reference/current/rest-apis.html[Elasticsearch API]. + +The default implementations of the interfaces offer: + +* index management functionality. +* Read/Write mapping support for domain types. +* A rich query and criteria api. +* Resource management and Exception translation. + +[NOTE] +==== +.Index management and automatic creation of indices and mappings. +The `IndexOperations` interface and the provided implementation which can be obtained from an `ElasticsearchOperations` instance - for example with a call to `operations.indexOps(clazz)`- give the user the ability to create indices, put mappings or store template and alias information in the Elasticsearch cluster. +Details of the index that will be created can be set by using the `@Setting` annotation, refer to xref:elasticsearch/misc.adoc#elasticsearc.misc.index.settings[Index settings] for further information. + +**None of these operations are done automatically** by the implementations of `IndexOperations` or `ElasticsearchOperations`. +It is the user's responsibility to call the methods. + +There is support for automatic creation of indices and writing the mappings when using Spring Data Elasticsearch repositories, see xref:elasticsearch/repositories/elasticsearch-repositories.adoc#elasticsearch.repositories.autocreation[Automatic creation of indices with the corresponding mapping] + +==== + +[[elasticsearch.operations.usage]] +== Usage examples + +The example shows how to use an injected `ElasticsearchOperations` instance in a Spring REST controller. +The example assumes that `Person` is a class that is annotated with `@Document`, `@Id` etc (see xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations[Mapping Annotation Overview]). + +.ElasticsearchOperations usage +==== +[source,java] +---- +@RestController +@RequestMapping("/") +public class TestController { + + private ElasticsearchOperations elasticsearchOperations; + + public TestController(ElasticsearchOperations elasticsearchOperations) { <.> + this.elasticsearchOperations = elasticsearchOperations; + } + + @PostMapping("/person") + public String save(@RequestBody Person person) { <.> + Person savedEntity = elasticsearchOperations.save(person); + return savedEntity.getId(); + } + + @GetMapping("/person/{id}") + public Person findById(@PathVariable("id") Long id) { <.> + Person person = elasticsearchOperations.get(id.toString(), Person.class); + return person; + } +} + +---- + +<.> Let Spring inject the provided `ElasticsearchOperations` bean in the constructor. +<.> Store some entity in the Elasticsearch cluster. +The id is read from the returned entity, as it might have been null in the `person` object and been created by Elasticsearch. +<.> Retrieve the entity with a get by id. +==== + +To see the full possibilities of `ElasticsearchOperations` please refer to the API documentation. + + +[[elasticsearch.operations.searchresulttypes]] +== Search Result Types + +When a document is retrieved with the methods of the `DocumentOperations` interface, just the found entity will be returned. +When searching with the methods of the `SearchOperations` interface, additional information is available for each entity, for example the _score_ or the _sortValues_ of the found entity. + +In order to return this information, each entity is wrapped in a `SearchHit` object that contains this entity-specific additional information. +These `SearchHit` objects themselves are returned within a `SearchHits` object which additionally contains informations about the whole search like the _maxScore_ or requested aggregations or the execution duration it took to complete the request. +The following classes and interfaces are now available: + +.SearchHit +Contains the following information: + +* Id +* Score +* Sort Values +* Highlight fields +* Inner hits (this is an embedded `SearchHits` object containing eventually returned inner hits) +* The retrieved entity of type + +.SearchHits +Contains the following information: + +* Number of total hits +* Total hits relation +* Maximum score +* A list of `SearchHit` objects +* Returned aggregations +* Returned suggest results + +.SearchPage +Defines a Spring Data `Page` that contains a `SearchHits` element and can be used for paging access using repository methods. + +.SearchScrollHits +Returned by the low level scroll API functions in `ElasticsearchRestTemplate`, it enriches a `SearchHits` with the Elasticsearch scroll id. + +.SearchHitsIterator +An Iterator returned by the streaming functions of the `SearchOperations` interface. + +.ReactiveSearchHits +`ReactiveSearchOperations` has methods returning a `Mono>`, this contains the same information as a `SearchHits` object, but will provide the contained `SearchHit` objects as a `Flux>` and not as a list. + +[[elasticsearch.operations.queries]] +== Queries + +Almost all of the methods defined in the `SearchOperations` and `ReactiveSearchOperations` interface take a `Query` parameter that defines the query to execute for searching. `Query` is an interface and Spring Data Elasticsearch provides three implementations: `CriteriaQuery`, `StringQuery` and `NativeQuery`. + +[[elasticsearch.operations.criteriaquery]] +=== CriteriaQuery + +`CriteriaQuery` based queries allow the creation of queries to search for data without knowing the syntax or basics of Elasticsearch queries. +They allow the user to build queries by simply chaining and combining `Criteria` objects that specify the criteria the searched documents must fulfill. + +NOTE: when talking about AND or OR when combining criteria keep in mind, that in Elasticsearch AND are converted to a **must** condition and OR to a **should** + +`Criteria` and their usage are best explained by example (let's assume we have a `Book` entity with a `price` property): + +.Get books with a given price +==== +[source,java] +---- +Criteria criteria = new Criteria("price").is(42.0); +Query query = new CriteriaQuery(criteria); +---- +==== + +Conditions for the same field can be chained, they will be combined with a logical AND: + +.Get books with a given price +==== +[source,java] +---- +Criteria criteria = new Criteria("price").greaterThan(42.0).lessThan(34.0); +Query query = new CriteriaQuery(criteria); +---- +==== + +When chaining `Criteria`, by default a AND logic is used: + +.Get all persons with first name _James_ and last name _Miller_: +==== +[source,java] +---- +Criteria criteria = new Criteria("lastname").is("Miller") <1> + .and("firstname").is("James") <2> +Query query = new CriteriaQuery(criteria); +---- + +<1> the first `Criteria` +<2> the and() creates a new `Criteria` and chaines it to the first one. +==== + +If you want to create nested queries, you need to use subqueries for this. +Let's assume we want to find all persons with a last name of _Miller_ and a first name of either _Jack_ or _John_: + +.Nested subqueries +==== +[source,java] +---- +Criteria miller = new Criteria("lastName").is("Miller") <.> + .subCriteria( <.> + new Criteria().or("firstName").is("John") <.> + .or("firstName").is("Jack") <.> + ); +Query query = new CriteriaQuery(criteria); +---- + +<.> create a first `Criteria` for the last name +<.> this is combined with AND to a subCriteria +<.> This sub Criteria is an OR combination for the first name _John_ +<.> and the first name Jack +==== + +Please refer to the API documentation of the `Criteria` class for a complete overview of the different available operations. + +[[elasticsearch.operations.stringquery]] +=== StringQuery + +This class takes an Elasticsearch query as JSON String. +The following code shows a query that searches for persons having the first name "Jack": + +==== +[source,java] +---- + +Query query = new StringQuery("{ \"match\": { \"firstname\": { \"query\": \"Jack\" } } } "); +SearchHits searchHits = operations.search(query, Person.class); + +---- +==== + +Using `StringQuery` may be appropriate if you already have an Elasticsearch query to use. + +[[elasticsearch.operations.nativequery]] +=== NativeQuery + +`NativeQuery` is the class to use when you have a complex query, or a query that cannot be expressed by using the `Criteria` API, for example when building queries and using aggregates. +It allows to use all the different `co.elastic.clients.elasticsearch._types.query_dsl.Query` implementations from the Elasticsearch library therefore named "native". + +The following code shows how to search for persons with a given `firstName` and for the found documents have a terms aggregation that counts the number of occurrences of the `lastName` for these persons: + +==== +[source,java] +---- +Query query = NativeQuery.builder() + .withAggregation("lastNames", Aggregation.of(a -> a + .terms(ta -> ta.field("lastName").size(10)))) + .withQuery(q -> q + .match(m -> m + .field("firstName") + .query(firstName) + ) + ) + .withPageable(pageable) + .build(); + +SearchHits searchHits = operations.search(query, Person.class); +---- +==== + +[[elasticsearch.operations.searchtemplatequery]] +=== SearchTemplateQuery + +This is a special implementation of the `Query` interface to be used in combination with a stored search template. +See xref:elasticsearch/misc.adoc#elasticsearch.misc.searchtemplates[Search Template support] for further information. diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc new file mode 100644 index 0000000000..8fb6d72617 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc @@ -0,0 +1,27 @@ +[[preface.versions]] += Versions + +The following table shows the Elasticsearch and Spring versions that are used by Spring Data release trains and the version of Spring Data Elasticsearch included in that. + +[cols="^,^,^,^",options="header"] +|=== +| Spring Data Release Train | Spring Data Elasticsearch | Elasticsearch | Spring Framework +| 2025.1 (in development) | 6.0.x | 9.0.1 | 7.0.x +| 2025.0 | 5.5.x | 8.18.1 | 6.2.x +| 2024.1 | 5.4.x | 8.15.5 | 6.1.x +| 2024.0 | 5.3.xfootnote:oom[Out of maintenance] | 8.13.4 | 6.1.x +| 2023.1 (Vaughan) | 5.2.xfootnote:oom[] | 8.11.1 | 6.1.x +| 2023.0 (Ullmann) | 5.1.xfootnote:oom[] | 8.7.1 | 6.0.x +| 2022.0 (Turing) | 5.0.xfootnote:oom[] | 8.5.3 | 6.0.x +| 2021.2 (Raj) | 4.4.xfootnote:oom[] | 7.17.3 | 5.3.x +| 2021.1 (Q) | 4.3.xfootnote:oom[] | 7.15.2 | 5.3.x +| 2021.0 (Pascal) | 4.2.xfootnote:oom[] | 7.12.0 | 5.3.x +| 2020.0 (Ockham) | 4.1.xfootnote:oom[] | 7.9.3 | 5.3.2 +| Neumann | 4.0.xfootnote:oom[] | 7.6.2 | 5.2.12 +| Moore | 3.2.xfootnote:oom[] |6.8.12 | 5.2.12 +| Lovelace | 3.1.xfootnote:oom[] | 6.2.2 | 5.1.19 +| Kay | 3.0.xfootnote:oom[] | 5.5.0 | 5.0.13 +| Ingalls | 2.1.xfootnote:oom[] | 2.4.0 | 4.3.25 +|=== + +Support for upcoming versions of Elasticsearch is being tracked and general compatibility should be given assuming the usage of the xref:elasticsearch/template.adoc[ElasticsearchOperations interface]. diff --git a/src/main/antora/modules/ROOT/pages/index.adoc b/src/main/antora/modules/ROOT/pages/index.adoc new file mode 100644 index 0000000000..f6c613729c --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/index.adoc @@ -0,0 +1,22 @@ +[[spring-data-elasticsearch-reference-documentation]] += Spring Data Elasticsearch +:revnumber: {version} +:revdate: {localdate} +:feature-scroll: true + +_Spring Data Elasticsearch provides repository support for the Elasticsearch database. +It eases development of applications with a consistent programming model that need to access Elasticsearch data sources._ + +[horizontal] +xref:elasticsearch/versions.adoc[Versions] :: Version Compatibility Matrix +xref:elasticsearch/clients.adoc[Clients] :: Elasticsearch Client Configuration +xref:elasticsearch.adoc[Elasticsearch] :: Elasticsearch support +xref:repositories.adoc[Repositories] :: Elasticsearch Repositories +xref:migration-guides.adoc[Migration] :: Migration Guides +https://github.com/spring-projects/spring-data-commons/wiki[Wiki] :: What's New, Upgrade Notes, Supported Versions, additional cross-version information. + +BioMed Central Development Team; Oliver Drotbohm; Greg Turnquist; Christoph Strobl; Peter-Josef Meisch + +(C) 2008-{copyright-year} VMware, Inc. + +Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides.adoc b/src/main/antora/modules/ROOT/pages/migration-guides.adoc new file mode 100644 index 0000000000..45749c3ff3 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides.adoc @@ -0,0 +1,8 @@ +[[elasticsearch.migration]] += Migration Guides +:page-section-summary-toc: 1 + +This section contains version-specific migration guides explaining how to upgrade across versions. + + + diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-3.2-4.0.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-3.2-4.0.adoc new file mode 100644 index 0000000000..3a3e93b5fe --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-3.2-4.0.adoc @@ -0,0 +1,144 @@ +[[elasticsearch-migration-guide-3.2-4.0]] += Upgrading from 3.2.x to 4.0.x + +This section describes breaking changes from version 3.2.x to 4.0.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-3.2-4.0.jackson-removal]] +== Removal of the used Jackson Mapper + +One of the changes in version 4.0.x is that Spring Data Elasticsearch does not use the Jackson Mapper anymore to map an entity to the JSON representation needed for Elasticsearch (see xref:elasticsearch/object-mapping.adoc[Elasticsearch Object Mapping]). +In version 3.2.x the Jackson Mapper was the default that was used. +It was possible to switch to the meta-model based converter (named `ElasticsearchEntityMapper`) by explicitly configuring it (xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[Meta Model Object Mapping]). + +In version 4.0.x the meta-model based converter is the only one that is available and does not need to be configured explicitly. +If you had a custom configuration to enable the meta-model converter by providing a bean like this: + +[source,java] +---- +@Bean +@Override +public EntityMapper entityMapper() { + + ElasticsearchEntityMapper entityMapper = new ElasticsearchEntityMapper( + elasticsearchMappingContext(), new DefaultConversionService() + ); + entityMapper.setConversions(elasticsearchCustomConversions()); + + return entityMapper; +} +---- + +You now have to remove this bean, the `ElasticsearchEntityMapper` interface has been removed. + +.Entity configuration +Some users had custom Jackson annotations on the entity class, for example in order to define a custom name for the mapped document in Elasticsearch or to configure date conversions. +These are not taken into account anymore. +The needed functionality is now provided with Spring Data Elasticsearch's `@Field` annotation. +Please see xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations[Mapping Annotation Overview] for detailed information. + +[[elasticsearch-migration-guide-3.2-4.0.implicit-index-name]] +== Removal of implicit index name from query objects + +In 3.2.x the different query classes like `IndexQuery` or `SearchQuery` had properties that were taking the index name or index names that they were operating upon.If these were not set, the passed in entity was inspected to retrieve the index name that was set in the `@Document` annotation. + +In 4.0.x the index name(s) must now be provided in an additional parameter of type `IndexCoordinates`.By separating this, it now is possible to use one query object against different indices. + +So for example the following code: + +[source,java] +---- +IndexQuery indexQuery = new IndexQueryBuilder() + .withId(person.getId().toString()) + .withObject(person) + .build(); + +String documentId = elasticsearchOperations.index(indexQuery); +---- + +must be changed to: + +[source,java] +---- +IndexCoordinates indexCoordinates = elasticsearchOperations.getIndexCoordinatesFor(person.getClass()); + +IndexQuery indexQuery = new IndexQueryBuilder() + .withId(person.getId().toString()) + .withObject(person) + .build(); + +String documentId = elasticsearchOperations.index(indexQuery, indexCoordinates); +---- + +To make it easier to work with entities and use the index name that is contained in the entitie's `@Document` annotation, new methods have been added like `DocumentOperations.save(T entity)`; + +[[elasticsearch-migration-guide-3.2-4.0.new-operations]] +== The new Operations interfaces + +In version 3.2 there was the `ElasticsearchOperations` interface that defined all the methods for the `ElasticsearchTemplate` class. In version 4 the functions have been split into different interfaces, aligning these interfaces with the Elasticsearch API: + +* `DocumentOperations` are the functions related documents like saving, or deleting +* `SearchOperations` contains the functions to search in Elasticsearch +* `IndexOperations` define the functions to operate on indexes, like index creation or mappings creation. + +`ElasticsearchOperations` now extends `DocumentOperations` and `SearchOperations` and has methods get access to an `IndexOperations` instance. + +NOTE: All the functions from the `ElasticsearchOperations` interface in version 3.2 that are now moved to the `IndexOperations` interface are still available, they are marked as deprecated and have default implementations that delegate to the new implementation: + +[source,java] +---- +/** + * Create an index for given indexName. + * + * @param indexName the name of the index + * @return {@literal true} if the index was created + * @deprecated since 4.0, use {@link IndexOperations#create()} + */ +@Deprecated +default boolean createIndex(String indexName) { + return indexOps(IndexCoordinates.of(indexName)).create(); +} +---- + +[[elasticsearch-migration-guide-3.2-4.0.deprecations]] +== Deprecations + +[[elasticsearch-migration-guide-3.2-4.0.deprecations.methods-classes]] +=== Methods and classes + +Many functions and classes have been deprecated. These functions still work, but the Javadocs show with what they should be replaced. + +.Example from ElasticsearchOperations +[source,java] +---- +/* + * Retrieves an object from an index. + * + * @param query the query defining the id of the object to get + * @param clazz the type of the object to be returned + * @return the found object + * @deprecated since 4.0, use {@link #get(String, Class, IndexCoordinates)} + */ +@Deprecated +@Nullable + T queryForObject(GetQuery query, Class clazz); +---- + +[[elasticsearch-migration-guide-3.2-4.0.deprecations.elasticsearch]] +=== Elasticsearch deprecations + +Since version 7 the Elasticsearch `TransportClient` is deprecated, it will be removed with Elasticsearch version 8. Spring Data Elasticsearch deprecates the `ElasticsearchTemplate` class which uses the `TransportClient` in version 4.0. + +Mapping types were removed from Elasticsearch 7, they still exist as deprecated values in the Spring Data `@Document` annotation and the `IndexCoordinates` class but they are not used anymore internally. + +[[elasticsearch-migration-guide-3.2-4.0.removal]] +== Removals + +* As already described, the `ElasticsearchEntityMapper` interface has been removed. + +* The `SearchQuery` interface has been merged into it's base interface `Query`, so it's occurrences can just be replaced with `Query`. + +* The method `org.springframework.data.elasticsearch.core.ElasticsearchOperations.query(SearchQuery query, ResultsExtractor resultsExtractor);` and the `org.springframework.data.elasticsearch.core.ResultsExtractor` interface have been removed. +These could be used to parse the result from Elasticsearch for cases in which the response mapping done with the Jackson based mapper was not enough. +Since version 4.0, there are the new xref:elasticsearch/template.adoc#elasticsearch.operations.searchresulttypes[Search Result Types] to return the information from an Elasticsearch response, so there is no need to expose this low level functionality. + +* The low level methods `startScroll`, `continueScroll` and `clearScroll` have been removed from the `ElasticsearchOperations` interface. +For low level scroll API access, there now are `searchScrollStart`, `searchScrollContinue` and `searchScrollClear` methods on the `ElasticsearchRestTemplate` class. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.0-4.1.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.0-4.1.adoc new file mode 100644 index 0000000000..6fc1407bfc --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.0-4.1.adoc @@ -0,0 +1,48 @@ +[[elasticsearch-migration-guide-4.0-4.1]] += Upgrading from 4.0.x to 4.1.x + +This section describes breaking changes from version 4.0.x to 4.1.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-4.0-4.1.deprecations]] +== Deprecations + +.Definition of the id property +It is possible to define a property of en entity as the id property by naming it either `id` or `document`. +This behaviour is now deprecated and will produce a warning. +Please use the `@Id` annotation to mark a property as being the id property. + +.Index mappings +In the `ReactiveElasticsearchClient.Indices` interface the `updateMapping` methods are deprecated in favour of the `putMapping` methods. +They do the same, but `putMapping` is consistent with the naming in the Elasticsearch API: + +.Alias handling +In the `IndexOperations` interface the methods `addAlias(AliasQuery)`, `removeAlias(AliasQuery)` and `queryForAlias()` have been deprecated. +The new methods `alias(AliasAction)`, `getAliases(String...)` and `getAliasesForIndex(String...)` offer more functionality and a cleaner API. + +.Parent-ID +Usage of a parent-id has been removed from Elasticsearch since version 6. We now deprecate the corresponding fields and methods. + +[[elasticsearch-migration-guide-4.0-4.1.removal]] +== Removals + +.Type mappings +The _type mappings_ parameters of the `@Document` annotation and the `IndexCoordinates` object were removed. +They had been deprecated in Spring Data Elasticsearch 4.0 and their values weren't used anymore. + +[[elasticsearch-migration-guide-4.0-4.1.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-4.0-4.1.breaking-changes.returntypes-1]] +=== Return types of ReactiveElasticsearchClient.Indices methods + +The methods in the `ReactiveElasticsearchClient.Indices` were not used up to now. +With the introduction of the `ReactiveIndexOperations` it became necessary to change some of the return types: + +* the `createIndex` variants now return a `Mono` instead of a `Mono` to signal successful index creation. +* the `updateMapping` variants now return a `Mono` instead of a `Mono` to signal successful mappings storage. + +[[elasticsearch-migration-guide-4.0-4.1.breaking-changes.returntypes-2]] +=== Return types of DocumentOperations.bulkIndex methods + +These methods were returning a `List` containing the ids of the new indexed records. +Now they return a `List`; these objects contain the id and information about optimistic locking (seq_no and primary_term) diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.1-4.2.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.1-4.2.adoc new file mode 100644 index 0000000000..2d00e2ef06 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.1-4.2.adoc @@ -0,0 +1,74 @@ +[[elasticsearch-migration-guide-4.1-4.2]] += Upgrading from 4.1.x to 4.2.x + +This section describes breaking changes from version 4.1.x to 4.2.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-4.1-4.2.deprecations]] +== Deprecations + +[[elasticsearch-migration-guide-4.1-4.2.deprecations.document]] +=== @Document parameters + +The parameters of the `@Document` annotation that are relevant for the index settings (`useServerConfiguration`, `shards`. `replicas`, `refreshIntervall` and `indexStoretype`) have been moved to the `@Setting` annotation. Use in `@Document` is still possible but deprecated. + +[[elasticsearch-migration-guide-4.1-4.2.removal]] +== Removals + +The `@Score` annotation that was used to set the score return value in an entity was deprecated in version 4.0 and has been removed. +Score values are returned in the `SearchHit` instances that encapsulate the returned entities. + +The `org.springframework.data.elasticsearch.ElasticsearchException` class has been removed. +The remaining usages have been replaced with `org.springframework.data.mapping.MappingException` and `org.springframework.dao.InvalidDataAccessApiUsageException`. + +The deprecated `ScoredPage`, `ScrolledPage` `@AggregatedPage` and implementations has been removed. + +The deprecated `GetQuery` and `DeleteQuery` have been removed. + +The deprecated `find` methods from `ReactiveSearchOperations` and `ReactiveDocumentOperations` have been removed. + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.refresh-policy]] +=== RefreshPolicy + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.refresh-policy.enum]] +==== Enum package changed + +It was possible in 4.1 to configure the refresh policy for the `ReactiveElasticsearchTemplate` by overriding the method `AbstractReactiveElasticsearchConfiguration.refreshPolicy()` in a custom configuration class. +The return value of this method was an instance of the class `org.elasticsearch.action.support.WriteRequest.RefreshPolicy`. + +Now the configuration must return `org.springframework.data.elasticsearch.core.RefreshPolicy`. +This enum has the same values and triggers the same behaviour as before, so only the `import` statement has to be adjusted. + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.refresh-policy.behaviour]] +==== Refresh behaviour + +`ElasticsearchOperations` and `ReactiveElasticsearchOperations` now explicitly use the `RefreshPolicy` set on the template for write requests if not null. +If the refresh policy is null, then nothing special is done, so the cluster defaults are used. `ElasticsearchOperations` was always using the cluster default before this version. + +The provided implementations for `ElasticsearchRepository` and `ReactiveElasticsearchRepository` will do an explicit refresh when the refresh policy is null. +This is the same behaviour as in previous versions. +If a refresh policy is set, then it will be used by the repositories as well. + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.refresh-policy.configuration]] +==== Refresh configuration + +When configuring Spring Data Elasticsearch like described in xref:elasticsearch/clients.adoc[Elasticsearch Clients] by using `ElasticsearchConfigurationSupport`, `AbstractElasticsearchConfiguration` or `AbstractReactiveElasticsearchConfiguration` the refresh policy will be initialized to `null`. +Previously the reactive code initialized this to `IMMEDIATE`, now reactive and non-reactive code show the same behaviour. + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.method-return-types]] +=== Method return types + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.method-return-types.delete]] +==== delete methods that take a Query + +The reactive methods previously returned a `Mono` with the number of deleted documents, the non reactive versions were void. They now return a `Mono` which contains much more detailed information about the deleted documents and errors that might have occurred. + +[[elasticsearch-migration-guide-4.1-4.2.breaking-changes.method-return-types.multiget]] +==== multiget methods + +The implementations of _multiget_ previousl only returned the found entities in a `List` for non-reactive implementations and in a `Flux` for reactive implementations. If the request contained ids that were not found, the information that these are missing was not available. The user needed to compare the returned ids to the requested ones to find +which ones were missing. + +Now the `multiget` methods return a `MultiGetItem` for every requested id. This contains information about failures (like non existing indices) and the information if the item existed (then it is contained in the `MultiGetItem) or not. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.2-4.3.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.2-4.3.adoc new file mode 100644 index 0000000000..330b912def --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.2-4.3.adoc @@ -0,0 +1,87 @@ +[[elasticsearch-migration-guide-4.2-4.3]] += Upgrading from 4.2.x to 4.3.x + +This section describes breaking changes from version 4.2.x to 4.3.x and how removed features can be replaced by new introduced features. + +[NOTE] +==== +Elasticsearch is working on a new Client that will replace the `RestHighLevelClient` because the `RestHighLevelClient` uses code from Elasticsearch core libraries which are not Apache 2 licensed anymore. +Spring Data Elasticsearch is preparing for this change as well. +This means that internally the implementations for the `*Operations` interfaces need to change - which should be no problem if users program against the interfaces like `ElasticsearchOperations` or `ReactiveElasticsearchOperations`. +If you are using the implementation classes like `ElasticsearchRestTemplate` directly, you will need to adapt to these changes. + +Spring Data Elasticsearch also removes or replaces the use of classes from the `org.elasticsearch` packages in it's API classes and methods, only using them in the implementation where the access to Elasticsearch is implemented. +For the user that means, that some enum classes that were used are replaced by enums that live in `org.springframework.data.elasticsearch` with the same values, these are internally mapped onto the Elasticsearch ones. + +Places where classes are used that cannot easily be replaced, this usage is marked as deprecated, we are working on replacements. + +Check the sections on xref:migration-guides/migration-guide-4.2-4.3.adoc#elasticsearch-migration-guide-4.2-4.3.deprecations[Deprecations] and xref:migration-guides/migration-guide-4.2-4.3.adoc#elasticsearch-migration-guide-4.2-4.3.breaking-changes[Breaking Changes] for further details. +==== + +[[elasticsearch-migration-guide-4.2-4.3.deprecations]] +== Deprecations + +[[elasticsearch-migration-guide-4.2-4.3.deprecations.suggest]] +=== suggest methods + +In `SearchOperations`, and so in `ElasticsearchOperations` as well, the `suggest` methods taking a `org.elasticsearch.search.suggest.SuggestBuilder` as argument and returning a `org.elasticsearch.action.search.SearchResponse` have been deprecated. +Use `SearchHits search(Query query, Class clazz)` instead, passing in a `NativeSearchQuery` which can contain a `SuggestBuilder` and read the suggest results from the returned `SearchHit`. + +In `ReactiveSearchOperations` the new `suggest` methods return a `Mono` now. +Here as well the old methods are deprecated. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.1]] +=== Removal of `org.elasticsearch` classes from the API. + +* In the `org.springframework.data.elasticsearch.annotations.CompletionContext` annotation the property `type()` has changed from `org.elasticsearch.search.suggest.completion.context.ContextMapping.Type` to `org.springframework.data.elasticsearch.annotations.CompletionContext.ContextMappingType`, the available enum values are the same. +* In the `org.springframework.data.elasticsearch.annotations.Document` annotation the `versionType()` property has changed to `org.springframework.data.elasticsearch.annotations.Document.VersionType`, the available enum values are the same. +* In the `org.springframework.data.elasticsearch.core.query.Query` interface the `searchType()` property has changed to `org.springframework.data.elasticsearch.core.query.Query.SearchType`, the available enum values are the same. +* In the `org.springframework.data.elasticsearch.core.query.Query` interface the return value of `timeout()` was changed to `java.time.Duration`. +* The `SearchHits`class does not contain the `org.elasticsearch.search.aggregations.Aggregations` anymore. +Instead it now contains an instance of the `org.springframework.data.elasticsearch.core.AggregationsContainer` class where `T` is the concrete aggregations type from the underlying client that is used. +Currently this will be a `org +.springframework.data.elasticsearch.core.clients.elasticsearch7.ElasticsearchAggregations` object; later different implementations will be available. +The same change has been done to the `ReactiveSearchOperations.aggregate()` functions, the now return a `Flux>`. +Programs using the aggregations need to be changed to cast the returned value to the appropriate class to further proces it. +* methods that might have thrown a `org.elasticsearch.ElasticsearchStatusException` now will throw `org.springframework.data.elasticsearch.RestStatusException` instead. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.2]] +=== Handling of field and sourceFilter properties of Query + +Up to version 4.2 the `fields` property of a `Query` was interpreted and added to the include list of the `sourceFilter`. +This was not correct, as these are different things for Elasticsearch. +This has been corrected. +As a consequence code might not work anymore that relies on using `fields` to specify which fields should be returned from the document's `_source' and should be changed to use the `sourceFilter`. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.3]] +=== search_type default value + +The default value for the `search_type` in Elasticsearch is `query_then_fetch`. +This now is also set as default value in the `Query` implementations, it was previously set to `dfs_query_then_fetch`. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.4]] +=== BulkOptions changes + +Some properties of the `org.springframework.data.elasticsearch.core.query.BulkOptions` class have changed their type: + +* the type of the `timeout` property has been changed to `java.time.Duration`. +* the type of the`refreshPolicy` property has been changed to `org.springframework.data.elasticsearch.core.RefreshPolicy`. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.5]] +=== IndicesOptions change + +Spring Data Elasticsearch now uses `org.springframework.data.elasticsearch.core.query.IndicesOptions` instead of `org.elasticsearch.action.support.IndicesOptions`. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.6]] +=== Completion classes + +The classes from the package `org.springframework.data.elasticsearch.core.completion` have been moved to `org.springframework.data.elasticsearch.core.suggest`. + +[[elasticsearch-migration-guide-4.2-4.3.breaking-changes.7]] +=== Other renamings + +The `org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentPropertyConverter` interface has been renamed to `org.springframework.data.elasticsearch.core.mapping.PropertyValueConverter`. +Likewise the implementations classes named _XXPersistentPropertyConverter_ have been renamed to _XXPropertyValueConverter_. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.3-4.4.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.3-4.4.adoc new file mode 100644 index 0000000000..ad0a1c8724 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.3-4.4.adoc @@ -0,0 +1,183 @@ +[[elasticsearch-migration-guide-4.3-4.4]] += Upgrading from 4.3.x to 4.4.x + +This section describes breaking changes from version 4.3.x to 4.4.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-4.3-4.4.deprecations]] +== Deprecations + +[[elasticsearch-migration-guide-4.3-4.4.deprecations.reactive-operations]] +=== org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations + +The method ` Publisher execute(ClientCallback> callback)` has been deprecated. +As there now are multiple implementations using different client libraries the `execute` method is still available in the different implementations, but there is no more method in the interface, because there is no common callback interface for the different clients. + +[[elasticsearch-migration-guide-4.3-4.4.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-4.3-4.4.breaking-changes.1]] +=== Removal of deprecated classes + +[[org-springframework-data-elasticsearch-core-elasticsearchtemplate-has-been-removed]] +==== `org.springframework.data.elasticsearch.core.ElasticsearchTemplate` has been removed + +As of version 4.4 Spring Data Elasticsearch does not use the `TransportClient` from Elasticsearch anymore (which itself is deprecated since Elasticsearch 7.0). +This means that the `org.springframework.data.elasticsearch.core.ElasticsearchTemplate` class which was deprecated since Spring Data Elasticsearch 4.0 has been removed. +This was the implementation of the `ElasticsearchOperations` interface that was using the `TransportClient`. +Connections to Elasticsearch must be made using either the imperative `ElasticsearchRestTemplate` or the reactive `ReactiveElasticsearchTemplate`. + +[[elasticsearch-migration-guide-4.3-4.4.breaking-changes.2]] +=== Package changes + +In 4.3 two classes (`ElasticsearchAggregations` and `ElasticsearchAggregation`) had been moved to the `org.springframework.data.elasticsearch.core.clients.elasticsearch7` package in preparation for the integration of the new Elasticsearch client. +The were moved back to the `org.springframework.data.elasticsearch.core` package as we keep the classes use the old Elasticsearch client where they were. + +[[elasticsearch-migration-guide-4.3-4.4.breaking-changes.3]] +=== Behaviour change + +The `ReactiveElasticsearchTemplate`, when created directly or by Spring Boot configuration had a default refresh policy of IMMEDIATE. +This could cause performance issues on heavy indexing and was different than the default behaviour of Elasticsearch. +This has been changed to that now the default refresh policy is NONE. +When the +`ReactiveElasticsearchTemplate` was provided by using the configuration like described in xref:elasticsearch/clients.adoc#elasticsearch.clients.reactiverestclient[Reactive REST Client] the default refresh policy already was set to NONE. + +[[elasticsearch-migration-guide-4.3-4.4.new-clients]] +== New Elasticsearch client + +Elasticsearch has introduced it's new `ElasticsearchClient` and has deprecated the previous `RestHighLevelClient`. +Spring Data Elasticsearch 4.4 still uses the old client as the default client for the following reasons: + +* The new client forces applications to use the `jakarta.json.spi.JsonProvider` package whereas Spring Boot will stick to `javax.json.spi.JsonProvider` until version 3. So switching the default implementaiton in Spring Data Elasticsearch can only come with Spring Data Elasticsearch 5 (Spring Data 3, Spring 6). +* There are still some bugs in the Elasticsearch client which need to be resolved +* The implementation using the new client in Spring Data Elasticsearch is not yet complete, due to limited resources working on that - remember Spring Data Elasticsearch is a community driven project that lives from public contributions. + +[[elasticsearch-migration-guide-4.3-4.4.new-clients.how-to]] +=== How to use the new client + +CAUTION: The implementation using the new client is not complete, some operations will throw a `java.lang.UnsupportedOperationException` or might throw NPE (for example when the Elasticsearch cannot parse a response from the server, this still happens sometimes) + +Use the new client to test the implementations but do not use it in productive code yet! + +In order to try and use the new client the following steps are necessary: + +[[elasticsearch-migration-guide-4.3-4.4.new-clients.how-to.not]] +==== Make sure not to configure the existing default client + +If using Spring Boot, exclude Spring Data Elasticsearch from the autoconfiguration + +==== +[source,java] +---- +@SpringBootApplication(exclude = ElasticsearchDataAutoConfiguration.class) +public class SpringdataElasticTestApplication { + // ... +} + +---- +==== + +Remove Spring Data Elasticsearch related properties from your application configuration. +If Spring Data Elasticsearch was configured using a programmatic configuration (see xref:elasticsearch/clients.adoc[Elasticsearch Clients]), remove these beans from the Spring application context. + +[[elasticsearch-migration-guide-4.3-4.4.new-clients.how-to.dependencies]] +==== Add dependencies + +The dependencies for the new Elasticsearch client are still optional in Spring Data Elasticsearch so they need to be added explicitly: + +==== +[source,xml] +---- + + + co.elastic.clients + elasticsearch-java + 7.17.3 + + + commons-logging + commons-logging + + + + + org.elasticsearch.client + elasticsearch-rest-client + 7.17.3 + + + commons-logging + commons-logging + + + + +---- +==== + +When using Spring Boot, it is necessary to set the following property in the _pom.xml_. + +==== +[source,xml] +---- + + 2.0.1 + +---- +==== + +[[elasticsearch-migration-guide-4.3-4.4.new-clients.how-to.configuration]] +==== New configuration classes + +[[elasticsearch-migration-guide-4.3-4.4.new-clients.how-to.configuration.imperative]] +===== Imperative style + +In order configure Spring Data Elasticsearch to use the new client, it is necessary to create a configuration bean that derives from `org.springframework.data.elasticsearch.client.elc.ElasticsearchConfiguration`: + +==== +[source,java] +---- +@Configuration +public class NewRestClientConfig extends ElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() // + .connectedTo("localhost:9200") // + .build(); + } +} +---- +==== + +The configuration is done in the same way as with the old client, but it is not necessary anymore to create more than the configuration bean. +With this configuration, the following beans will be available in the Spring application context: + +* a `RestClient` bean, that is the configured low level `RestClient` that is used by the Elasticsearch client +* an `ElasticsearchClient` bean, this is the new client that uses the `RestClient` +* an `ElasticsearchOperations` bean, available with the bean names _elasticsearchOperations_ and _elasticsearchTemplate_, this uses the `ElasticsearchClient` + +[[elasticsearch-migration-guide-4.3-4.4.new-clients.how-to.configuration.reactive]] +===== Reactive style + +To use the new client in a reactive environment the only difference is the class from which to derive the configuration: + +==== +[source,java] +---- +@Configuration +public class NewRestClientConfig extends ReactiveElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() // + .connectedTo("localhost:9200") // + .build(); + } +} +---- +==== + +With this configuration, the following beans will be available in the Spring application context: + +* a `RestClient` bean, that is the configured low level `RestClient` that is used by the Elasticsearch client +* an `ReactiveElasticsearchClient` bean, this is the new reactive client that uses the `RestClient` +* an `ReactiveElasticsearchOperations` bean, available with the bean names _reactiveElasticsearchOperations_ and _reactiveElasticsearchTemplate_, this uses the `ReactiveElasticsearchClient` diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc new file mode 100644 index 0000000000..e11955fefa --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc @@ -0,0 +1,167 @@ +[[elasticsearch-migration-guide-4.4-5.0]] += Upgrading from 4.4.x to 5.0.x + +This section describes breaking changes from version 4.4.x to 5.0.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-4.4-4.5.deprecations]] +== Deprecations + +[[custom-trace-level-logging]] +=== Custom trace level logging + +Logging by setting the property `logging.level.org.springframework.data.elasticsearch.client.WIRE=trace` is deprecated now, the Elasticsearch `RestClient` provides a better solution that can be activated by setting the logging level of the `tracer` package to "trace". + +[[elasticsearch-migration-guide-4.4-4.5.deprecations.package]] +=== `org.springframework.data.elasticsearch.client.erhlc` package + +See xref:migration-guides/migration-guide-4.4-5.0.adoc#elasticsearch-migration-guide-4.4-5.0.breaking-changes-packages[Package changes], all classes in this package have been deprecated, as the default client implementations to use are the ones based on the new Java Client from Elasticsearch, see xref:migration-guides/migration-guide-4.4-5.0.adoc#elasticsearch-migration-guide-4.4-5.0.new-clients[New Elasticsearch client] + +[[elasticsearch-migration-guide-4.4-4.5.deprecations.code]] +=== Removal of deprecated code + +`DateFormat.none` and `DateFormat.custom` had been deprecated since version 4.2 and have been removed. + +The properties of `@Document` that were deprecated since 4.2 have been removed. +Use the `@Settings` annotation for these. + +`@DynamicMapping` and `@DynamicMappingValue` have been removed. +Use `@Document.dynamic` or `@Field.dynamic` instead. + +[[elasticsearch-migration-guide-4.4-5.0.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-4.4-5.0.breaking-changes.deprecated-calls]] +=== Removal of deprecated calls + +[[elasticsearch-migration-guide-4.4-5.0.breaking-changes.deprecated-calls.1]] +==== suggest calls in operations interfaces have been removed + +Both `SearchOperations` and `ReactiveSearchOperations` had deprecated calls that were using Elasticsearch classes as parameters. +These now have been removed and so the dependency on Elasticsearch classes in these APIs has been cleaned. + +[[elasticsearch-migration-guide-4.4-5.0.breaking-changes-packages]] +=== Package changes + +All the classes that are using or depend on the deprecated Elasticsearch `RestHighLevelClient` have been moved to the package `org.springframework.data.elasticsearch.client.erhlc`. +By this change we now have a clear separation of code using the old deprecated Elasticsearch libraries, code using the new Elasticsearch client and code that is independent of the client implementation. +Also the reactive implementation that was provided up to now has been moved here, as this implementation contains code that was copied and adapted from Elasticsearch libraries. + +If you are using `ElasticsearchRestTemplate` directly and not the `ElasticsearchOperations` interface you'll need to adjust your imports as well. + +When working with the `NativeSearchQuery` class, you'll need to switch to the `NativeQuery` class, which can take a +`Query` instance coming from the new Elasticsearch client libraries. +You'll find plenty of examples in the test code. + +[[elasticsearch-migration-guide-4.4-5.0.breaking-changes-records]] +=== Conversion to Java 17 records + +The following classes have been converted to `Record`, you might need to adjust the use of getter methods from +`getProp()` to `prop()`: + +* `org.springframework.data.elasticsearch.core.AbstractReactiveElasticsearchTemplate.IndexResponseMetaData` +* `org.springframework.data.elasticsearch.core.ActiveShardCount` +* `org.springframework.data.elasticsearch.support.Version` +* `org.springframework.data.elasticsearch.support.ScoreDoc` +* `org.springframework.data.elasticsearch.core.query.ScriptData` +* `org.springframework.data.elasticsearch.core.query.SeqNoPrimaryTerm` + +[[elasticsearch-migration-guide-4.4-5.0.breaking-changes-http-headers]] +=== New HttpHeaders class + +Until version 4.4 the client configuration used the `HttpHeaders` class from the `org.springframework:spring-web` +project. +This introduces a dependency on that artifact. +Users that do not use spring-web then face an error as this class cannot be found. + +In version 5.0 we introduce our own `HttpHeaders` to configure the clients. + +So if you are using headers in the client configuration, you need to replace `org.springframework.http.HttpHeaders` +with `org.springframework.data.elasticsearch.support.HttpHeaders`. + +Hint: You can pass a `org.springframework.http +.HttpHeaders` to the `addAll()` method of `org.springframework.data.elasticsearch.support.HttpHeaders`. + +[[elasticsearch-migration-guide-4.4-5.0.new-clients]] +== New Elasticsearch client + +Spring Data Elasticsearch now uses the new `ElasticsearchClient` and has deprecated the use of the previous `RestHighLevelClient`. + +[[elasticsearch-migration-guide-4.4-5.0.new-clients.imperative]] +=== Imperative style configuration + +To configure Spring Data Elasticsearch to use the new client, it is necessary to create a configuration bean that derives from `org.springframework.data.elasticsearch.client.elc.ElasticsearchConfiguration`: + +==== +[source,java] +---- +@Configuration +public class NewRestClientConfig extends ElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() // + .connectedTo("localhost:9200") // + .build(); + } +} +---- +==== + +The configuration is done in the same way as with the old client, but it is not necessary anymore to create more than the configuration bean. +With this configuration, the following beans will be available in the Spring application context: + +* a `RestClient` bean, that is the configured low level `RestClient` that is used by the Elasticsearch client +* an `ElasticsearchClient` bean, this is the new client that uses the `RestClient` +* an `ElasticsearchOperations` bean, available with the bean names _elasticsearchOperations_ and _elasticsearchTemplate_, this uses the `ElasticsearchClient` + +[[elasticsearch-migration-guide-4.4-5.0.new-clients.reactive]] +=== Reactive style configuration + +To use the new client in a reactive environment the only difference is the class from which to derive the configuration: + +==== +[source,java] +---- +@Configuration +public class NewRestClientConfig extends ReactiveElasticsearchConfiguration { + + @Override + public ClientConfiguration clientConfiguration() { + return ClientConfiguration.builder() // + .connectedTo("localhost:9200") // + .build(); + } +} +---- +==== + +With this configuration, the following beans will be available in the Spring application context: + +* a `RestClient` bean, that is the configured low level `RestClient` that is used by the Elasticsearch client +* an `ReactiveElasticsearchClient` bean, this is the new reactive client that uses the `RestClient` +* an `ReactiveElasticsearchOperations` bean, available with the bean names _reactiveElasticsearchOperations_ and _reactiveElasticsearchTemplate_, this uses the `ReactiveElasticsearchClient` + +[[elasticsearch-migration-guide-4.4-5.0.old-client]] +=== Still want to use the old client? + +The old deprecated `RestHighLevelClient` can still be used, but you will need to add the dependency explicitly to your application as Spring Data Elasticsearch does not pull it in automatically anymore: + +==== +[source,xml] +---- + + + org.elasticsearch.client + elasticsearch-rest-high-level-client + 7.17.5 + + + commons-logging + commons-logging + + + +---- +==== + +Make sure to specify the version 7.17.6 explicitly, otherwise maven will resolve to 8.5.0, and this does not exist. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.0-5.1.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.0-5.1.adoc new file mode 100644 index 0000000000..5ad1b3696a --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.0-5.1.adoc @@ -0,0 +1,27 @@ +[[elasticsearch-migration-guide-5.0-5.1]] += Upgrading from 5.0.x to 5.1.x + +This section describes breaking changes from version 5.0.x to 5.1.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-5.0-5.1.breaking-changes]] +== Breaking Changes + +In the `org.springframework.data.elasticsearch.core.index.AliasData` class, which is used for alias information returned from Elasticsearch, the property `filter` (of type `Document`) is replaced by `filterQuery` which is of type +`org.springframework.data.elasticsearch.core.query.Query`. + +`org.springframework.data.elasticsearch.annotations.Similarity` was an enum class until 5.1. This enum was used in the `@Field` annotation to specify a similarity value. +But besides the values defined by the enum, it is possible to have similarities with custom names in Elasticsearch. +Therefore, the annotation property was changed from the type of the enum to a simple `String`. +The previous enum values like `Similarity.Default` do still exist as String constants, so existing code will compile unmodified. +Adaptions are necessary when this enum was used at other places than as a property of the `@Field` annotation. + +[[elasticsearch-migration-guide-5.0-5.1.deprecations]] +== Deprecations + +[[template-functions]] +=== template functions + +The functions in the `IndexOperations` and `ReactiverIndexOperations` to manage index templates that were introduced in Spring Data Elasticsearch 4.1 +have been deprecated. They were using the old Elasticsearch API that was deprecated in Elasticsearch version 7.8. + +Please use the new functions that are based on the composable index template API instead. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.1-5.2.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.1-5.2.adoc new file mode 100644 index 0000000000..d1e1e2b637 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.1-5.2.adoc @@ -0,0 +1,40 @@ +[[elasticsearch-migration-guide-5.1-5.2]] += Upgrading from 5.1.x to 5.2.x + +This section describes breaking changes from version 5.1.x to 5.2.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-5.1-5.2.breaking-changes]] +== Breaking Changes + +[[bulk-failures]] +=== Bulk failures +In the `org.springframework.data.elasticsearch.BulkFailureException` class, the return type of the `getFailedDocuments` is changed from `Map` +to `Map`, which allows to get additional details about failure reasons. + +The definition of the `FailureDetails` class (inner to `BulkFailureException`): +[source,java] +public record FailureDetails(Integer status, String errorMessage) { +} + +[[scripted-and-runtime-fields]] +=== scripted and runtime fields + +The classes `org.springframework.data.elasticsearch.core.RuntimeField` and `org.springframework.data.elasticsearch.core.query.ScriptType` have been moved to the subpackage `org.springframework.data.elasticsearch.core.query`. + +The `type` parameter of the `ScriptData` constructor is not nullable any longer. + +[[elasticsearch-migration-guide-5.1-5.2.deprecations]] +== Deprecations + +[[removal-of-deprecated-code]] +=== Removal of deprecated code + +* All the code using the old deprecated `RestHighLevelClient` has been removed. +The default Elasticsearch client used since version 5.0 is the (not so) new Elasticsearch Java client. +* The `org.springframework.data.elasticsearch.client.ClientLogger` class has been removed. +This logger was configured with the `org.springframework.data.elasticsearch.client.WIRE` setting, but was not working with all clients. +From version 5 on, use the trace logger available in the Elasticsearch Java client, see xref:elasticsearch/clients.adoc#elasticsearch.clients.logging[Client Logging]. +* The method `org.springframework.data.elasticsearch.core.ElasticsearchOperations.stringIdRepresentation(Object)` has been removed, use the `convertId(Object)` method defined in the same interface instead. +* The class `org.springframework.data.elasticsearch.core.Range` has been removed, use `org.springframework.data.domain.Range` instead. +* The methods `org.springframework.data.elasticsearch.core.query.IndexQuery.getParentId() and `setParentId(String)` have been removed, they weren't used anymore and were no-ops. +It has been removed from the `org.springframework.data.elasticsearch.core.query.IndexQuery` class as well. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.2-5.3.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.2-5.3.adoc new file mode 100644 index 0000000000..808578cb59 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.2-5.3.adoc @@ -0,0 +1,21 @@ +[[elasticsearch-migration-guide-5.2-5.3]] += Upgrading from 5.2.x to 5.3.x + +This section describes breaking changes from version 5.2.x to 5.3.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-5.2-5.3.breaking-changes]] +== Breaking Changes + +During the parameter replacement in `@Query` annotated repository methods previous versions wrote the String `"null"` into the query that was sent to Elasticsearch when the actual parameter value was `null`. +As Elasticsearch does not store `null` values, this behaviour could lead to problems, for example whent the fields to be searched contains the string `"null"`. +In Version 5.3 a `null` value in a parameter will cause a `ConversionException` to be thrown. +If you are using `"null"` as the +`null_value` defined in a field mapping, then pass that string into the query instead of a Java `null`. + +[[elasticsearch-migration-guide-5.2-5.3.deprecations]] +== Deprecations + +=== Removals + +The deprecated classes `org.springframework.data.elasticsearch.ELCQueries` +and `org.springframework.data.elasticsearch.client.elc.QueryBuilders` have been removed, use `org.springframework.data.elasticsearch.client.elc.Queries` instead. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.3-5.4.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.3-5.4.adoc new file mode 100644 index 0000000000..c5178ff75d --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.3-5.4.adoc @@ -0,0 +1,23 @@ +[[elasticsearch-migration-guide-5.3-5.4]] += Upgrading from 5.3.x to 5.4.x + +This section describes breaking changes from version 5.3.x to 5.4.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-5.3-5.4.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-5.3-5.4.breaking-changes.knn-search]] +=== knn search +The `withKnnQuery` method in `NativeQueryBuilder` has been replaced with `withKnnSearches` to build a `NativeQuery` with knn search. + +`KnnQuery` and `KnnSearch` are two different classes in elasticsearch java client and are used for different queries, with different parameters supported: + +- `KnnSearch`: is https://www.elastic.co/guide/en/elasticsearch/reference/8.13/search-search.html#search-api-knn[the top level `knn` query] in the elasticsearch request; +- `KnnQuery`: is https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-knn-query.html[the `knn` query inside `query` clause]; + +If `KnnQuery` is still preferable, please be sure to construct it inside `query` clause manually, by means of `withQuery(co.elastic.clients.elasticsearch._types.query_dsl.Query query)` clause in `NativeQueryBuilder`. + +[[elasticsearch-migration-guide-5.3-5.4.deprecations]] +== Deprecations + +=== Removals diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.4-5.5.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.4-5.5.adoc new file mode 100644 index 0000000000..38b2b4af2b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.4-5.5.adoc @@ -0,0 +1,30 @@ +[[elasticsearch-migration-guide-5.4-5.5]] += Upgrading from 5.4.x to 5.5.x + +This section describes breaking changes from version 5.4.x to 5.5.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-5.4-5.5.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-5.4-5.5.deprecations]] +== Deprecations + +Some classes that probably are not used by a library user have been renamed, the classes with the old names are still there, but are deprecated: + +|=== +|old name|new name + +|ElasticsearchPartQuery|RepositoryPartQuery +|ElasticsearchStringQuery|RepositoryStringQuery +|ReactiveElasticsearchStringQuery|ReactiveRepositoryStringQuery +|=== + +=== Removals + +The following methods that had been deprecated since release 5.3 have been removed: +``` +DocumentOperations.delete(Query, Class) +DocumentOperations.delete(Query, Class, IndexCoordinates) +ReactiveDocumentOperations.delete(Query, Class) +ReactiveDocumentOperations.delete(Query, Class, IndexCoordinates) +``` diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.5-6.0.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.5-6.0.adoc new file mode 100644 index 0000000000..7667701a17 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-5.5-6.0.adoc @@ -0,0 +1,21 @@ +[[elasticsearch-migration-guide-5.5-6.0]] += Upgrading from 5.5.x to 6.0.x + +This section describes breaking changes from version 5.5.x to 6.0.x and how removed features can be replaced by new introduced features. + +[[elasticsearch-migration-guide-5.5-6.0.breaking-changes]] +== Breaking Changes + +[[elasticsearch-migration-guide-5.5-6.0.deprecations]] +== Deprecations + + +=== Removals + +The `org.springframework.data.elasticsearch.core.query.ScriptType` enum has been removed. To distinguish between an inline and a stored script set the appropriate values in the `org.springframework.data.elasticsearch.core.query.ScriptData` record. + +These methods have been removed because the Elasticsearch Client 9 does not support them anymore: +``` +org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchIndicesClient.unfreeze(UnfreezeRequest) +org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchIndicesClient.unfreeze(Function>) +``` diff --git a/src/main/antora/modules/ROOT/pages/repositories.adoc b/src/main/antora/modules/ROOT/pages/repositories.adoc new file mode 100644 index 0000000000..7c4e810e45 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories.adoc @@ -0,0 +1,8 @@ +[[elasticsearch.repositories]] += Repositories +:page-section-summary-toc: 1 + +This chapter explains the basic foundations of Spring Data repositories and Elasticsearch specifics. +Before continuing to the Elasticsearch specifics, make sure you have a sound understanding of the basic concepts. + +The goal of the Spring Data repository abstraction is to significantly reduce the amount of boilerplate code required to implement data access layers for various persistence stores. diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc new file mode 100644 index 0000000000..77d18f1908 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc @@ -0,0 +1,4 @@ +include::{commons}@data-commons::page$repositories/core-concepts.adoc[] + +[[elasticsearch.entity-persistence.state-detection-strategies]] +include::{commons}@data-commons::page$is-new-state-detection.adoc[leveloffset=+1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc new file mode 100644 index 0000000000..f84313e9da --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/core-domain-events.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc new file mode 100644 index 0000000000..a7c2ff8d3c --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/core-extensions.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc new file mode 100644 index 0000000000..2ae01801b1 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/create-instances.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc new file mode 100644 index 0000000000..c7615191a6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/custom-implementations.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/definition.adoc b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc new file mode 100644 index 0000000000..bd65a8af83 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/definition.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc new file mode 100644 index 0000000000..081bac9f61 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/null-handling.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/projections.adoc b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc new file mode 100644 index 0000000000..840ba7c816 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc @@ -0,0 +1,4 @@ +[[elasticsearch.projections]] += Projections + +include::{commons}@data-commons::page$repositories/projections.adoc[leveloffset=+1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc new file mode 100644 index 0000000000..e495eddc6b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-keywords-reference.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc new file mode 100644 index 0000000000..dfe4814955 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-methods-details.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc new file mode 100644 index 0000000000..a73c3201d0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-return-types-reference.adoc[] diff --git a/src/main/antora/resources/antora-resources/antora.yml b/src/main/antora/resources/antora-resources/antora.yml new file mode 100644 index 0000000000..0f79f52be7 --- /dev/null +++ b/src/main/antora/resources/antora-resources/antora.yml @@ -0,0 +1,21 @@ +version: ${antora-component.version} +prerelease: ${antora-component.prerelease} + +asciidoc: + attributes: + copyright-year: ${current.year} + version: ${project.version} + springversionshort: ${spring.short} + springversion: ${spring} + attribute-missing: 'warn' + commons: ${springdata.commons.docs} + include-xml-namespaces: false + spring-data-commons-docs-url: https://docs.spring.io/spring-data/commons/reference + spring-data-commons-javadoc-base: https://docs.spring.io/spring-data/commons/docs/${springdata.commons}/api/ + springdocsurl: https://docs.spring.io/spring-framework/reference/{springversionshort} + springjavadocurl: https://docs.spring.io/spring-framework/docs/${spring}/javadoc-api + spring-framework-docs: '{springdocsurl}' + spring-framework-javadoc: '{springjavadocurl}' + springhateoasversion: ${spring-hateoas} + releasetrainversion: ${releasetrain} + store: Elasticsearch diff --git a/src/main/asciidoc/index.adoc b/src/main/asciidoc/index.adoc deleted file mode 100644 index 147693b63d..0000000000 --- a/src/main/asciidoc/index.adoc +++ /dev/null @@ -1,36 +0,0 @@ -= Spring Data Elasticsearch -BioMed Central Development Team -:revnumber: {version} -:revdate: {localdate} -:toc: -:toc-placement!: -:spring-data-commons-docs: ../../../../spring-data-commons/src/main/asciidoc - -(C) 2013-2015 The original author(s). - -NOTE: Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. - -toc::[] - -include::preface.adoc[] -:leveloffset: +1 -include::{spring-data-commons-docs}/repositories.adoc[] -:leveloffset: -1 - -[[reference]] -= Reference Documentation - -:leveloffset: +1 -include::reference/data-elasticsearch.adoc[] -include::reference/elasticsearch-misc.adoc[] -:leveloffset: -1 - -[[appendix]] -= Appendix -:numbered!: -:leveloffset: +1 -include::{spring-data-commons-docs}/repository-namespace-reference.adoc[] -include::{spring-data-commons-docs}/repository-populator-namespace-reference.adoc[] -include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[] -include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[] -:leveloffset: -1 diff --git a/src/main/asciidoc/preface.adoc b/src/main/asciidoc/preface.adoc deleted file mode 100644 index e1d2f8812a..0000000000 --- a/src/main/asciidoc/preface.adoc +++ /dev/null @@ -1,20 +0,0 @@ -= Preface - -The Spring Data Elasticsearch project applies core Spring concepts to the development of solutions using the Elasticsearch Search Engine. We have povided a "template" as a high-level abstraction for storing,querying,sorting and faceting documents. You will notice similarities to the Spring data solr and mongodb support in the Spring Framework. - -[[project]] -[preface] -== Project Metadata - -* Version Control - https://github.com/spring-projects/spring-data-elasticsearch -* Bugtracker - https://jira.spring.io/browse/DATAES -* Release repository - https://repo.spring.io/libs-release -* Milestone repository - https://repo.spring.io/libs-milestone -* Snapshot repository - https://repo.spring.io/libs-snapshot - -[[requirements]] -[preface] -== Requirements - -Requires http://www.elasticsearch.org/download/[Elasticsearch] 0.20.2 and above or optional dependency or not even that if you are using Embedded Node Client - diff --git a/src/main/asciidoc/reference/data-elasticsearch.adoc b/src/main/asciidoc/reference/data-elasticsearch.adoc deleted file mode 100644 index c14d9b929a..0000000000 --- a/src/main/asciidoc/reference/data-elasticsearch.adoc +++ /dev/null @@ -1,288 +0,0 @@ -[[elasticsearch.repositories]] -= Elasticsearch Repositories - -This chapter includes details of the Elasticsearch repository implementation. - -[[elasticsearch.introduction]] -== Introduction - -[[elasticsearch.namespace]] -=== Spring Namespace - -The Spring Data Elasticsearch module contains a custom namespace allowing definition of repository beans as well as elements for instantiating a `ElasticsearchServer` . - -Using the `repositories` element looks up Spring Data repositories as described in <> . - -.Setting up Elasticsearch repositories using Namespace -==== -[source,xml] ----- - - - - - - ----- -==== - -Using the `Transport Client` or `Node Client` element registers an instance of `Elasticsearch Server` in the context. - -.Transport Client using Namespace -==== -[source,xml] ----- - - - - - - ----- -==== - -.Node Client using Namespace -==== -[source,xml] ----- - - - - - - ----- -==== - -[[elasticsearch.annotation]] -=== Annotation based configuration - -The Spring Data Elasticsearch repositories support cannot only be activated through an XML namespace but also using an annotation through JavaConfig. - -.Spring Data Elasticsearch repositories using JavaConfig -==== -[source,java] ----- -@Configuration -@EnableElasticsearchRepositories(basePackages = "org/springframework/data/elasticsearch/repositories") -static class Config { - - @Bean - public ElasticsearchOperations elasticsearchTemplate() { - return new ElasticsearchTemplate(nodeBuilder().local(true).node().client()); - } -} ----- -==== - -The configuration above sets up an `Embedded Elasticsearch Server` which is used by the `ElasticsearchTemplate` . Spring Data Elasticsearch Repositories are activated using the `@EnableElasticsearchRepositories` annotation, which essentially carries the same attributes as the XML namespace does. If no base package is configured, it will use the one the configuration class resides in. - -[[elasticsearch.cdi]] -=== Elasticsearch Repositores using CDI - -The Spring Data Elasticsearch repositories can also be set up using CDI functionality. - -.Spring Data Elasticsearch repositories using JavaConfig -==== -[source,java] ----- -class ElasticsearchTemplateProducer { - - @Produces - @ApplicationScoped - public ElasticsearchOperations createElasticsearchTemplate() { - return new ElasticsearchTemplate(nodeBuilder().local(true).node().client()); - } -} - -class ProductService { - - private ProductRepository repository; - - public Page findAvailableBookByName(String name, Pageable pageable) { - return repository.findByAvailableTrueAndNameStartingWith(name, pageable); - } - - @Inject - public void setRepository(ProductRepository repository) { - this.repository = repository; - } -} ----- -==== - -[[elasticsearch.query-methods]] -== Query methods - -[[elasticsearch.query-methods.finders]] -=== Query lookup strategies - -The Elasticsearch module supports all basic query building feature as String,Abstract,Criteria or have it being derived from the method name. - -==== Declared queries - -Deriving the query from the method name is not always sufficient and/or may result in unreadable method names. In this case one might make either use of `@Query` annotation (see <> ). - -[[elasticsearch.query-methods.criterions]] -=== Query creation - -Generally the query creation mechanism for Elasticsearch works as described in <> . Here's a short example of what a Elasticsearch query method translates into: - -.Query creation from method names -==== -[source,java] ----- -public interface BookRepository extends Repository -{ - List findByNameAndPrice(String name, Integer price); -} ----- -==== - -The method name above will be translated into the following Elasticsearch json query - -[source] ----- -{ "bool" : - { "must" : - [ - { "field" : {"name" : "?"} }, - { "field" : {"price" : "?"} } - ] - } -} ----- - -A list of supported keywords for Elasticsearch is shown below. - -[cols="1,2,3", options="header"] -.Supported keywords inside method names -|=== -| Keyword -| Sample -| Elasticsearch Query String| `And` -| `findByNameAndPrice` -| `{"bool" : {"must" : [ {"field" : {"name" : "?"}}, - {"field" : {"price" : "?"}} ]}}` - -| `Or` -| `findByNameOrPrice` -| `{"bool" : {"should" : [ {"field" : {"name" : "?"}}, - {"field" : {"price" : "?"}} ]}}` - -| `Is` -| `findByName` -| `{"bool" : {"must" : {"field" : {"name" : "?"}}}}` - -| `Not` -| `findByNameNot` -| `{"bool" : {"must_not" : {"field" : {"name" : "?"}}}}` - -| `Between` -| `findByPriceBetween` -| `{"bool" : {"must" : {"range" : {"price" : {"from" : - ?,"to" : ?,"include_lower" : true,"include_upper" : true}}}}}` - -| `LessThanEqual` -| `findByPriceLessThan` -| `{"bool" : {"must" : {"range" : {"price" : {"from" : - null,"to" : ?,"include_lower" : true,"include_upper" : - true}}}}}` - -| `GreaterThanEqual` -| `findByPriceGreaterThan` -| `{"bool" : {"must" : {"range" : {"price" : {"from" : - ?,"to" : null,"include_lower" : true,"include_upper" : - true}}}}}` - -| `Before` -| `findByPriceBefore` -| `{"bool" : {"must" : {"range" : {"price" : {"from" : - null,"to" : ?,"include_lower" : true,"include_upper" : - true}}}}}` - -| `After` -| `findByPriceAfter` -| `{"bool" : {"must" : {"range" : {"price" : {"from" : - ?,"to" : null,"include_lower" : true,"include_upper" : - true}}}}}` - -| `Like` -| `findByNameLike` -| `{"bool" : {"must" : {"field" : {"name" : {"query" : - "?*","analyze_wildcard" : true}}}}}` - -| `StartingWith` -| `findByNameStartingWith` -| `{"bool" : {"must" : {"field" : {"name" : {"query" : - "?*","analyze_wildcard" : true}}}}}` - -| `EndingWith` -| `findByNameEndingWith` -| `{"bool" : {"must" : {"field" : {"name" : {"query" : - "*?","analyze_wildcard" : true}}}}}` - -| `Contains/Containing` -| `findByNameContaining` -| `{"bool" : {"must" : {"field" : {"name" : {"query" : - "*?*","analyze_wildcard" : true}}}}}` - -| `In` -| `findByNameIn(Collectionnames)` -| `{"bool" : {"must" : {"bool" : {"should" : [ {"field" : - {"name" : "?"}}, {"field" : {"name" : "?"}} ]}}}}` - -| `NotIn` -| `findByNameNotIn(Collectionnames)` -| `{"bool" : {"must_not" : {"bool" : {"should" : {"field" : - {"name" : "?"}}}}}}` - -| `Near` -| `findByStoreNear` -| `Not Supported Yet !` - -| `True` -| `findByAvailableTrue` -| `{"bool" : {"must" : {"field" : {"available" : true}}}}` - -| `False` -| `findByAvailableFalse` -| `{"bool" : {"must" : {"field" : {"available" : false}}}}` - -| `OrderBy` -| `findByAvailableTrueOrderByNameDesc` -| `{"sort" : [{ "name" : {"order" : "desc"} }],"bool" : - {"must" : {"field" : {"available" : true}}}}` -|=== - -[[elasticsearch.query-methods.at-query]] -=== Using @Query Annotation - -.Declare query at the method using the `@Query` annotation. -==== -[source,java] ----- -public interface BookRepository extends ElasticsearchRepository { - @Query("{"bool" : {"must" : {"field" : {"name" : "?0"}}}}") - Page findByName(String name,Pageable pageable); -} ----- -==== diff --git a/src/main/asciidoc/reference/elasticsearch-misc.adoc b/src/main/asciidoc/reference/elasticsearch-misc.adoc deleted file mode 100644 index 49d4c8c264..0000000000 --- a/src/main/asciidoc/reference/elasticsearch-misc.adoc +++ /dev/null @@ -1,72 +0,0 @@ -[[elasticsearch.misc]] -= Miscellaneous Elasticsearch Operation Support - -This chapter covers additional support for Elasticsearch operations that cannot be directly accessed via the repository interface. It is recommended to add those operations as custom implementation as described in <> . - -[[elasticsearch.misc.filter]] -== Filter Builder - -Filter Builder improves query speed. - -==== -[source,java] ----- -private ElasticsearchTemplate elasticsearchTemplate; - -SearchQuery searchQuery = new NativeSearchQueryBuilder() - .withQuery(matchAllQuery()) - .withFilter(boolFilter().must(termFilter("id", documentId))) - .build(); - -Page sampleEntities = - elasticsearchTemplate.queryForPage(searchQuery,SampleEntity.class); ----- -==== - -[[elasticsearch.scan.and.scroll]] -== Using Scan And Scroll For Big Result Set - -Elasticsearch has scan and scroll feature for getting big result set in chunks. `ElasticsearchTemplate` has scan and scroll methods that can be used as below. - -.Using Scan and Scroll -==== -[source,java] ----- -SearchQuery searchQuery = new NativeSearchQueryBuilder() - .withQuery(matchAllQuery()) - .withIndices("test-index") - .withTypes("test-type") - .withPageable(new PageRequest(0,1)) - .build(); -String scrollId = elasticsearchTemplate.scan(searchQuery,1000,false); -List sampleEntities = new ArrayList(); -boolean hasRecords = true; -while (hasRecords){ - Page page = elasticsearchTemplate.scroll(scrollId, 5000L , new ResultsMapper() - { - @Override - public Page mapResults(SearchResponse response) { - List chunk = new ArrayList(); - for(SearchHit searchHit : response.getHits()){ - if(response.getHits().getHits().length <= 0) { - return null; - } - SampleEntity user = new SampleEntity(); - user.setId(searchHit.getId()); - user.setMessage((String)searchHit.getSource().get("message")); - chunk.add(user); - } - return new PageImpl(chunk); - } - }); - if(page != null) { - sampleEntities.addAll(page.getContent()); - hasRecords = page.hasNextPage(); - } - else{ - hasRecords = false; - } - } -} ----- -==== diff --git a/src/main/java/org/springframework/data/elasticsearch/BulkFailureException.java b/src/main/java/org/springframework/data/elasticsearch/BulkFailureException.java new file mode 100644 index 0000000000..6d40bca631 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/BulkFailureException.java @@ -0,0 +1,47 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.springframework.dao.DataRetrievalFailureException; + +import java.util.Map; + +/** + * @author Peter-Josef Meisch + * @author Illia Ulianov + * @since 4.1 + */ +public class BulkFailureException extends DataRetrievalFailureException { + private final Map failedDocuments; + + public BulkFailureException(String msg, Map failedDocuments) { + super(msg); + this.failedDocuments = failedDocuments; + } + + public Map getFailedDocuments() { + return failedDocuments; + } + + /** + * Details about a document saving failure. + * + * @author Illia Ulianov + * @since 5.2 + */ + public record FailureDetails(Integer status, String errorMessage) { + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/ElasticsearchErrorCause.java b/src/main/java/org/springframework/data/elasticsearch/ElasticsearchErrorCause.java new file mode 100644 index 0000000000..22dbbfd7c0 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/ElasticsearchErrorCause.java @@ -0,0 +1,78 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import java.util.List; + +import org.jspecify.annotations.Nullable; + +/** + * Object describing an Elasticsearch error + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ElasticsearchErrorCause { + @Nullable private final String type; + + private final String reason; + + @Nullable private final String stackTrace; + + @Nullable private final ElasticsearchErrorCause causedBy; + + private final List rootCause; + + private final List suppressed; + + public ElasticsearchErrorCause(@Nullable String type, String reason, @Nullable String stackTrace, + @Nullable ElasticsearchErrorCause causedBy, List rootCause, + List suppressed) { + this.type = type; + this.reason = reason; + this.stackTrace = stackTrace; + this.causedBy = causedBy; + this.rootCause = rootCause; + this.suppressed = suppressed; + } + + @Nullable + public String getType() { + return type; + } + + public String getReason() { + return reason; + } + + @Nullable + public String getStackTrace() { + return stackTrace; + } + + @Nullable + public ElasticsearchErrorCause getCausedBy() { + return causedBy; + } + + public List getRootCause() { + return rootCause; + } + + public List getSuppressed() { + return suppressed; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java b/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java deleted file mode 100644 index b3b0ddb203..0000000000 --- a/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.elasticsearch; - -import java.util.Map; - -/** - * ElasticsearchException - * - * @author Rizwan Idrees - * @author Mohsin Husen - */ -public class ElasticsearchException extends RuntimeException { - - private Map failedDocuments; - - public ElasticsearchException(String message) { - super(message); - } - - public ElasticsearchException(String message, Throwable cause) { - super(message, cause); - } - - public ElasticsearchException(String message, Throwable cause, Map failedDocuments) { - super(message, cause); - this.failedDocuments = failedDocuments; - } - - public ElasticsearchException(String message, Map failedDocuments) { - super(message); - this.failedDocuments = failedDocuments; - } - - public Map getFailedDocuments() { - return failedDocuments; - } -} diff --git a/src/main/java/org/springframework/data/elasticsearch/NoSuchIndexException.java b/src/main/java/org/springframework/data/elasticsearch/NoSuchIndexException.java new file mode 100644 index 0000000000..c1eab9bcf7 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/NoSuchIndexException.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.springframework.dao.NonTransientDataAccessResourceException; + +/** + * @author Christoph Strobl + * @since 3.2 + */ +public class NoSuchIndexException extends NonTransientDataAccessResourceException { + + private final String index; + + /** + * @since 4.4 + */ + public NoSuchIndexException(String index) { + super(String.format("Index %s not found.", index)); + this.index = index; + } + + public NoSuchIndexException(String index, Throwable cause) { + super(String.format("Index %s not found.", index), cause); + this.index = index; + } + + public String getIndex() { + return index; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/ResourceFailureException.java b/src/main/java/org/springframework/data/elasticsearch/ResourceFailureException.java new file mode 100644 index 0000000000..493d3b4b7b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/ResourceFailureException.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.springframework.core.NestedRuntimeException; + +/** + * @author Peter-Josef Meisch + */ +public class ResourceFailureException extends NestedRuntimeException { + public ResourceFailureException(String msg) { + super(msg); + } + + public ResourceFailureException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/ResourceNotFoundException.java b/src/main/java/org/springframework/data/elasticsearch/ResourceNotFoundException.java new file mode 100644 index 0000000000..5e97b4e00b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/ResourceNotFoundException.java @@ -0,0 +1,29 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.springframework.dao.NonTransientDataAccessResourceException; + +/** + * @author Peter-Josef Meisch + * @since 5.1 + */ +public class ResourceNotFoundException extends NonTransientDataAccessResourceException { + + public ResourceNotFoundException(String msg) { + super(msg); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/RestStatusException.java b/src/main/java/org/springframework/data/elasticsearch/RestStatusException.java new file mode 100644 index 0000000000..c707686098 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/RestStatusException.java @@ -0,0 +1,49 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.springframework.dao.DataAccessException; + +/** + * Exception class for REST status exceptions independent from the used client/backend. + * + * @author Peter-Josef Meisch + * @since 4.3 + */ +public class RestStatusException extends DataAccessException { + + // we do not use a dedicated status class from Elasticsearch, OpenSearch, Spring web or webflux here + private final int status; + + public RestStatusException(int status, String msg) { + super(msg); + this.status = status; + } + + public RestStatusException(int status, String msg, Throwable cause) { + super(msg, cause); + this.status = status; + } + + public int getStatus() { + return status; + } + + @Override + public String toString() { + return "RestStatusException{" + "status=" + status + "} " + super.toString(); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/UncategorizedElasticsearchException.java b/src/main/java/org/springframework/data/elasticsearch/UncategorizedElasticsearchException.java new file mode 100644 index 0000000000..ffc71ef7ba --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/UncategorizedElasticsearchException.java @@ -0,0 +1,71 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.jspecify.annotations.Nullable; +import org.springframework.dao.UncategorizedDataAccessException; + +/** + * @author Peter-Josef Meisch + * @since 4.0 + */ +public class UncategorizedElasticsearchException extends UncategorizedDataAccessException { + + /** + * the response status code from Elasticsearch if available + * + * @since 4.4 + */ + @Nullable private final Integer statusCode; + + /** + * The response body from Elasticsearch if available + * + * @since 4.4 + */ + @Nullable final String responseBody; + + public UncategorizedElasticsearchException(String msg) { + this(msg, null); + } + + public UncategorizedElasticsearchException(String msg, @Nullable Throwable cause) { + this(msg, null, null, cause); + } + + public UncategorizedElasticsearchException(String msg, @Nullable Integer statusCode, @Nullable String responseBody, + @Nullable Throwable cause) { + super(msg, cause); + this.statusCode = statusCode; + this.responseBody = responseBody; + } + + /** + * @since 4.4 + */ + @Nullable + public Integer getStatusCode() { + return statusCode; + } + + /** + * @since 4.4 + */ + @Nullable + public String getResponseBody() { + return responseBody; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/VersionConflictException.java b/src/main/java/org/springframework/data/elasticsearch/VersionConflictException.java new file mode 100644 index 0000000000..b3f31d3550 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/VersionConflictException.java @@ -0,0 +1,34 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch; + +import org.springframework.dao.DataIntegrityViolationException; + +/** + * Exception that is thrown when a version conflict from the server is detected. + * + * @author Peter-Josef Meisch + * @since 5.2 + */ +public class VersionConflictException extends DataIntegrityViolationException { + public VersionConflictException(String msg) { + super(msg); + } + + public VersionConflictException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Alias.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Alias.java new file mode 100644 index 0000000000..0f707e942e --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Alias.java @@ -0,0 +1,79 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Repeatable; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Identifies an alias for the index. + * + * @author Youssef Aouichaoui + * @since 5.4 + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@Repeatable(Aliases.class) +public @interface Alias { + /** + * @return Index alias name. Alias for {@link #alias}. + */ + @AliasFor("alias") + String value() default ""; + + /** + * @return Index alias name. Alias for {@link #value}. + */ + @AliasFor("value") + String alias() default ""; + + /** + * @return Query used to limit documents the alias can access. + */ + Filter filter() default @Filter; + + /** + * @return Used to route indexing operations to a specific shard. + */ + String indexRouting() default ""; + + /** + * @return Used to route indexing and search operations to a specific shard. + */ + String routing() default ""; + + /** + * @return Used to route search operations to a specific shard. + */ + String searchRouting() default ""; + + /** + * @return Is the alias hidden? + */ + boolean isHidden() default false; + + /** + * @return Is it the 'write index' for the alias? + */ + boolean isWriteIndex() default false; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Aliases.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Aliases.java new file mode 100644 index 0000000000..ea5d895294 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Aliases.java @@ -0,0 +1,36 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Container annotation that aggregates several {@link Alias} annotations. + * + * @author Youssef Aouichaoui + * @see Alias + * @since 5.4 + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +public @interface Aliases { + Alias[] value(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionContext.java b/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionContext.java new file mode 100644 index 0000000000..da27c1245b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionContext.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Based on reference doc - https://www.elastic.co/guide/en/elasticsearch/reference/current/suggester-context.html + * + * @author Robert Gruendler + * @author Peter-Josef Meisch + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Documented +@Inherited +public @interface CompletionContext { + + String name(); + + ContextMappingType type(); + + String precision() default ""; + + String path() default ""; + + /** + * @since 4.3 + */ + enum ContextMappingType { + CATEGORY("category"), GEO("geo"); + + private final String mappedName; + + ContextMappingType(String mappedName) { + this.mappedName = mappedName; + } + + public String getMappedName() { + return mappedName; + } + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionContextType.java b/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionContextType.java new file mode 100644 index 0000000000..0666accfc6 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionContextType.java @@ -0,0 +1,12 @@ +package org.springframework.data.elasticsearch.annotations; + +/** + * Based on reference doc - https://www.elastic.co/guide/en/elasticsearch/reference/current/suggester-context.html + * + * @author Robert Gruendler + */ +public enum CompletionContextType { + + CATEGORY, GEO + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionField.java index 559ce55080..94ca1ea724 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionField.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/CompletionField.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,36 @@ */ package org.springframework.data.elasticsearch.annotations; -import java.lang.annotation.*; +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** - * Based on the reference doc - http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-suggesters-completion.html + * Based on the reference doc - + * https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-completion.html * * @author Mewes Kochheim + * @author Robert Gruendler + * @author Peter-Josef Meisch */ @Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Documented @Inherited public @interface CompletionField { String searchAnalyzer() default "simple"; - String indexAnalyzer() default "simple"; - - boolean payloads() default false; + String analyzer() default "simple"; boolean preserveSeparators() default true; boolean preservePositionIncrements() default true; int maxInputLength() default 50; + + CompletionContext[] contexts() default {}; } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/CountQuery.java b/src/main/java/org/springframework/data/elasticsearch/annotations/CountQuery.java new file mode 100644 index 0000000000..80bb7c15f9 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/CountQuery.java @@ -0,0 +1,39 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Alias for a @Query annotation with the count parameter set to true. + * + * @author Peter-Josef Meisch + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@Query(count = true) +public @interface CountQuery { + + @AliasFor(annotation = Query.class) + String value() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java b/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java index 056dc14822..9f3b7f9d78 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,16 +16,207 @@ package org.springframework.data.elasticsearch.annotations; /** + * Values based on Elasticsearch reference + * documentation. The patterns are taken from this documentation and slightly adapted so that a Java + * {@link java.time.format.DateTimeFormatter} produces the same values as the Elasticsearch formatter. Use + * format = {} to disable built-in date formats in the {@link Field} annotation. If you want to use only a + * custom date format pattern, you must set the format property to empty {}. + * * @author Jakub Vavrik - * Values based on reference doc - http://www.elasticsearch.org/guide/reference/mapping/date-format/ + * @author Tim te Beek + * @author Peter-Josef Meisch + * @author Sascha Woo */ public enum DateFormat { - none, custom, basic_date, basic_date_time, basic_date_time_no_millis, basic_ordinal_date, basic_ordinal_date_time, - basic_ordinal_date_time_no_millis, basic_time, basic_time_no_millis, basic_t_time, basic_t_time_no_millis, - basic_week_date, basic_week_date_time, basic_week_date_time_no_millis, date, date_hour, date_hour_minute, - date_hour_minute_second, date_hour_minute_second_fraction, date_hour_minute_second_millis, date_optional_time, - date_time, date_time_no_millis, hour, hour_minute, hour_minute_second, hour_minute_second_fraction, - hour_minute_second_millis, ordinal_date, ordinal_date_time, ordinal_date_time_no_millis, time, time_no_millis, - t_time, t_time_no_millis, week_date, week_date_time, weekDateTimeNoMillis, week_year, weekyearWeek, - weekyearWeekDay, year, year_month, year_month_day + basic_date("uuuuMMdd"), // + basic_date_time("uuuuMMdd'T'HHmmss.SSSXXX"), // + basic_date_time_no_millis("uuuuMMdd'T'HHmmssXXX"), // + basic_ordinal_date("uuuuDDD"), // + basic_ordinal_date_time("yyyyDDD'T'HHmmss.SSSXXX"), // + basic_ordinal_date_time_no_millis("yyyyDDD'T'HHmmssXXX"), // + basic_time("HHmmss.SSSXXX"), // + basic_time_no_millis("HHmmssXXX"), // + basic_t_time("'T'HHmmss.SSSXXX"), // + basic_t_time_no_millis("'T'HHmmssXXX"), // + basic_week_date("YYYY'W'wwe"), // week-based-year! + /** + * @since 5.3 + */ + strict_basic_week_date("YYYY'W'wwe"), // week-based-year! + basic_week_date_time("YYYY'W'wwe'T'HHmmss.SSSX"), // here Elasticsearch uses a different zone format + /** + * @since 5.3 + */ + strict_basic_week_date_time("YYYY'W'wwe'T'HHmmss.SSSX"), // here Elasticsearch uses a different zone format + basic_week_date_time_no_millis("YYYY'W'wwe'T'HHmmssX"), // + /** + * @since 5.3 + */ + strict_basic_week_date_time_no_millis("YYYY'W'wwe'T'HHmmssX"), // + date("uuuu-MM-dd"), // + /** + * @since 5.3 + */ + strict_date("uuuu-MM-dd"), // + date_hour("uuuu-MM-dd'T'HH"), // + /** + * @since 5.3 + */ + strict_date_hour("uuuu-MM-dd'T'HH"), // + date_hour_minute("uuuu-MM-dd'T'HH:mm"), // + /** + * @since 5.3 + */ + strict_date_hour_minute("uuuu-MM-dd'T'HH:mm"), // + date_hour_minute_second("uuuu-MM-dd'T'HH:mm:ss"), // + /** + * @since 5.3 + */ + strict_date_hour_minute_second("uuuu-MM-dd'T'HH:mm:ss"), // + date_hour_minute_second_fraction("uuuu-MM-dd'T'HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_date_hour_minute_second_fraction("uuuu-MM-dd'T'HH:mm:ss.SSS"), // + date_hour_minute_second_millis("uuuu-MM-dd'T'HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_date_hour_minute_second_millis("uuuu-MM-dd'T'HH:mm:ss.SSS"), // + date_optional_time("uuuu-MM-dd['T'HH:mm:ss.SSSXXX]"), // + /** + * @since 5.3 + */ + strict_date_optional_time("uuuu-MM-dd['T'HH:mm:ss.SSSXXX]"), // + strict_date_optional_time_nanos("uuuu-MM-dd['T'HH:mm:ss.SSSSSSXXX]"), // + date_time("uuuu-MM-dd'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_date_time("uuuu-MM-dd'T'HH:mm:ss.SSSXXX"), // + date_time_no_millis("uuuu-MM-dd'T'HH:mm:ssVV"), // here Elasticsearch uses the zone-id in its implementation + /** + * @since 5.3 + */ + strict_date_time_no_millis("uuuu-MM-dd'T'HH:mm:ssVV"), // here Elasticsearch uses the zone-id in its implementation + epoch_millis("epoch_millis"), // + epoch_second("epoch_second"), // + hour("HH"), // + /** + * @since 5.3 + */ + strict_hour("HH"), // + hour_minute("HH:mm"), // + /** + * @since 5.3 + */ + strict_hour_minute("HH:mm"), // + hour_minute_second("HH:mm:ss"), // + /** + * @since 5.3 + */ + strict_hour_minute_second("HH:mm:ss"), // + hour_minute_second_fraction("HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_hour_minute_second_fraction("HH:mm:ss.SSS"), // + hour_minute_second_millis("HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_hour_minute_second_millis("HH:mm:ss.SSS"), // + ordinal_date("uuuu-DDD"), // + /** + * @since 5.3 + */ + strict_ordinal_date("uuuu-DDD"), // + ordinal_date_time("uuuu-DDD'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_ordinal_date_time("uuuu-DDD'T'HH:mm:ss.SSSXXX"), // + ordinal_date_time_no_millis("uuuu-DDD'T'HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_ordinal_date_time_no_millis("uuuu-DDD'T'HH:mm:ssXXX"), // + time("HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_time("HH:mm:ss.SSSXXX"), // + time_no_millis("HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_time_no_millis("HH:mm:ssXXX"), // + t_time("'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_t_time("'T'HH:mm:ss.SSSXXX"), // + t_time_no_millis("'T'HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_t_time_no_millis("'T'HH:mm:ssXXX"), // + week_date("YYYY-'W'ww-e"), // + /** + * @since 5.3 + */ + strict_week_date("YYYY-'W'ww-e"), // + week_date_time("YYYY-'W'ww-e'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_week_date_time("YYYY-'W'ww-e'T'HH:mm:ss.SSSXXX"), // + week_date_time_no_millis("YYYY-'W'ww-e'T'HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_week_date_time_no_millis("YYYY-'W'ww-e'T'HH:mm:ssXXX"), // + weekyear(""), // no TemporalAccessor available for these 3 + /** + * @since 5.3 + */ + strict_weekyear(""), // no TemporalAccessor available for these 3 + weekyear_week(""), // + /** + * @since 5.3 + */ + strict_weekyear_week(""), // + weekyear_week_day(""), // + /** + * @since 5.3 + */ + strict_strict_weekyear_week_day(""), // + year("uuuu"), // + /** + * @since 5.3 + */ + strict_year("uuuu"), // + year_month("uuuu-MM"), // + /** + * @since 5.3 + */ + strict_year_month("uuuu-MM"), // + year_month_day("uuuu-MM-dd"), // + /** + * @since 5.3 + */ + strict_year_month_day("uuuu-MM-dd"); // + + private final String pattern; + + DateFormat(String pattern) { + this.pattern = pattern; + } + + /** + * @since 4.2 + */ + public String getPattern() { + return pattern; + } } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java index ef32e7e386..1131b2cd59 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java @@ -1,46 +1,132 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.elasticsearch.annotations; - -import java.lang.annotation.*; - -import org.springframework.data.annotation.Persistent; - -/** - * Document - * - * @author Rizwan Idrees - * @author Mohsin Husen - */ - -@Persistent -@Inherited -@Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE}) -public @interface Document { - - String indexName(); - - String type() default ""; - - short shards() default 5; - - short replicas() default 1; - - String refreshInterval() default "1s"; - - String indexStoreType() default "fs"; -} +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.Persistent; + +/** + * Identifies a domain object to be persisted to Elasticsearch. + * + * @author Rizwan Idrees + * @author Mohsin Husen + * @author Mason Chan + * @author Ivan Greene + * @author Mark Paluch + * @author Peter-Josef Meisch + * @author Sascha Woo + */ +@Persistent +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +public @interface Document { + + /** + * Name of the Elasticsearch index. + *
    + *
  • Lowercase only
  • + *
  • Cannot include \, /, *, ?, ", >, <, |, ` ` (space character), ,, #
  • + *
  • Cannot start with -, _, +
  • + *
  • Cannot be . or ..
  • + *
  • Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit + * faster)
  • + *
+ */ + String indexName(); + + /** + * Configuration whether to create an index on repository bootstrapping. + */ + boolean createIndex() default true; + + /** + * If true, the index mapping will be written on repository bootstrapping even when the index already exists. This + * allows for automatically updating the mapping with new properties. Changes on existing properties will lead to an + * error from the Elasticsearch server. + */ + boolean alwaysWriteMapping() default false; + + /** + * Configuration of version management. + */ + VersionType versionType() default VersionType.EXTERNAL; + + /** + * Defines if type hints should be written. {@see WriteTypeHint}. + * + * @since 4.3 + */ + WriteTypeHint writeTypeHint() default WriteTypeHint.DEFAULT; + + /** + * Controls how Elasticsearch dynamically adds fields to the document. + * + * @since 4.3 + */ + Dynamic dynamic() default Dynamic.INHERIT; + + /** + * Specifies if the id property should also be stored in the Elasticsearch document source. Default value is + * {@literal true} + * + * @since 5.1 + */ + boolean storeIdInSource() default true; + + /** + * Specifies if the version property should also be stored in the Elasticsearch document source. Default value is + * true. + * + * @since 5.1 + */ + boolean storeVersionInSource() default true; + + /** + * Aliases for the index. + * + * @since 5.4 + */ + Alias[] aliases() default {}; + + /** + * @since 4.3 + */ + enum VersionType { + INTERNAL("internal"), // + EXTERNAL("external"), // + EXTERNAL_GTE("external_gte"), // + /** + * @since 4.4 + */ + FORCE("force"); + + private final String esName; + + VersionType(String esName) { + this.esName = esName; + } + + public String getEsName() { + return esName; + } + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Dynamic.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Dynamic.java new file mode 100644 index 0000000000..9868c6e3c6 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Dynamic.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * Values for the {@code dynamic} mapping parameter. + * + * @author Sascha Woo + * @since 4.3 + */ +public enum Dynamic { + /** + * New fields are added to the mapping. + */ + TRUE("true"), + /** + * New fields are added to the mapping as + * runtime fields. These + * fields are not indexed, and are loaded from {@code _source} at query time. + */ + RUNTIME("runtime"), + /** + * New fields are ignored. These fields will not be indexed or searchable, but will still appear in the + * {@code _source} field of returned hits. These fields will not be added to the mapping, and new fields must be added + * explicitly. + */ + FALSE("false"), + /** + * If new fields are detected, an exception is thrown and the document is rejected. New fields must be explicitly + * added to the mapping. + */ + STRICT("strict"), + /** + * Inherit the dynamic setting from their parent object or from the mapping type. + */ + INHERIT("inherit"); + + private final String mappedName; + + Dynamic(String mappedName) { + this.mappedName = mappedName; + } + + public String getMappedName() { + return mappedName; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/DynamicTemplates.java b/src/main/java/org/springframework/data/elasticsearch/annotations/DynamicTemplates.java new file mode 100644 index 0000000000..107ee94be2 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/DynamicTemplates.java @@ -0,0 +1,25 @@ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.Persistent; + +/** + * Elasticsearch dynamic templates mapping. This annotation is handy if you prefer apply dynamic templates on fields + * with annotation e.g. {@link Field} with type = FieldType.Object etc. instead of static mapping on Document via + * {@link Mapping} annotation. DynamicTemplates annotation is omitted if {@link Mapping} annotation is used. + * + * @author Petr Kukral + */ +@Persistent +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +public @interface DynamicTemplates { + + String mappingPath() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java index f4cec2b380..97815477ae 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java @@ -1,56 +1,255 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.elasticsearch.annotations; - -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * @author Rizwan Idrees - * @author Mohsin Husen - * @author Artur Konczak - * @author Jonathan Yan - * @author Jakub Vavrik - * @author Kevin Leturc - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -@Documented -@Inherited -public @interface Field { - - FieldType type() default FieldType.Auto; - - FieldIndex index() default FieldIndex.analyzed; - - DateFormat format() default DateFormat.none; - - String pattern() default ""; - - boolean store() default false; - - String searchAnalyzer() default ""; - - String indexAnalyzer() default ""; - - String[] ignoreFields() default {}; - - boolean includeInParent() default false; -} +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * @author Rizwan Idrees + * @author Mohsin Husen + * @author Artur Konczak + * @author Jonathan Yan + * @author Jakub Vavrik + * @author Kevin Leturc + * @author Peter-Josef Meisch + * @author Xiao Yu + * @author Aleksei Arsenev + * @author Brian Kimmig + * @author Morgan Lutz + * @author Sascha Woo + * @author Haibo Liu + * @author Andriy Redko + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE, ElementType.METHOD }) +@Documented +@Inherited +public @interface Field { + + /** + * Alias for {@link #name}. + * + * @since 3.2 + */ + @AliasFor("name") + String value() default ""; + + /** + * The name to be used to store the field inside the document. If not set, the name of the annotated property + * is used. + * + * @since 3.2 + */ + @AliasFor("value") + String name() default ""; + + FieldType type() default FieldType.Auto; + + boolean index() default true; + + DateFormat[] format() default { DateFormat.date_optional_time, DateFormat.epoch_millis }; + + String[] pattern() default {}; + + boolean store() default false; + + boolean fielddata() default false; + + String searchAnalyzer() default ""; + + String analyzer() default ""; + + String normalizer() default ""; + + String[] ignoreFields() default {}; + + boolean includeInParent() default false; + + String[] copyTo() default {}; + + /** + * @since 4.0 + */ + int ignoreAbove() default -1; + + /** + * @since 4.0 + */ + boolean coerce() default true; + + /** + * @since 4.0 + */ + boolean docValues() default true; + + /** + * @since 4.0 + */ + boolean ignoreMalformed() default false; + + /** + * @since 4.0 + */ + IndexOptions indexOptions() default IndexOptions.none; + + /** + * @since 4.0 + */ + boolean indexPhrases() default false; + + /** + * implemented as array to enable the empty default value + * + * @since 4.0 + */ + IndexPrefixes[] indexPrefixes() default {}; + + /** + * @since 4.0 + */ + boolean norms() default true; + + /** + * NOte that null_value setting are not supported in Elasticsearch for all types. For example setting a null_value on + * a field with type text will throw an exception in the server when the mapping is written to Elasticsearch. Alas, + * the Elasticsearch documentation does not specify on which types it is allowed on which it is not. + * + * @since 4.0 + */ + String nullValue() default ""; + + /** + * @since 4.0 + */ + int positionIncrementGap() default -1; + + /** + * @since 4.0 + */ + String similarity() default Similarity.Default; + + /** + * @since 4.0 + */ + TermVector termVector() default TermVector.none; + + /** + * @since 4.0 + */ + double scalingFactor() default 1; + + /** + * @since 4.0 + */ + int maxShingleSize() default -1; + + /** + * if true, the field will be stored in Elasticsearch even if it has a null value + * + * @since 4.1 + */ + boolean storeNullValue() default false; + + /** + * to be used in combination with {@link FieldType#Rank_Feature} + * + * @since 4.1 + */ + boolean positiveScoreImpact() default true; + + /** + * to be used in combination with {@link FieldType#Object} + * + * @since 4.1 + */ + boolean enabled() default true; + + /** + * @since 4.1 + */ + boolean eagerGlobalOrdinals() default false; + + /** + * @since 4.1 + */ + NullValueType nullValueType() default NullValueType.String; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 4.2 + */ + int dims() default -1; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 5.4 + */ + String elementType() default FieldElementType.DEFAULT; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 5.4 + */ + KnnSimilarity knnSimilarity() default KnnSimilarity.DEFAULT; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 5.4 + */ + KnnIndexOptions[] knnIndexOptions() default {}; + + /** + * Controls how Elasticsearch dynamically adds fields to the inner object within the document.
+ * To be used in combination with {@link FieldType#Object} or {@link FieldType#Nested} + * + * @since 4.3 + */ + Dynamic dynamic() default Dynamic.INHERIT; + + /** + * marks this field to be excluded from the _source in Elasticsearch + * (https://www.elastic.co/guide/en/elasticsearch/reference/7.15/mapping-source-field.html#include-exclude) + * + * @since 4.3 + */ + boolean excludeFromSource() default false; + + /** + * when this field is a {{@link String}}, a {{@link java.util.Collection}} or a {{@link java.util.Map}} that is empty + * this property controlls whether the empty value is sent to Elasticsearch. + * + * @since 5.1 + */ + boolean storeEmptyValue() default true; + + /** + * overrides the field type in the mapping which otherwise will be taken from corresponding {@link FieldType} + * + * @since 5.4 + */ + String mappedTypeName() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldElementType.java b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldElementType.java new file mode 100644 index 0000000000..49271764ba --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldElementType.java @@ -0,0 +1,26 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * @author Haibo Liu + * @since 5.4 + */ +public final class FieldElementType { + public final static String DEFAULT = ""; + public final static String FLOAT = "float"; + public final static String BYTE = "byte"; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java index d475397491..f701948d6d 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,82 @@ * @author Rizwan Idrees * @author Mohsin Husen * @author Artur Konczak + * @author Zeng Zetang + * @author Peter-Josef Meisch + * @author Aleksei Arsenev + * @author Brian Kimmig + * @author Morgan Lutz */ public enum FieldType { - String, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested, Ip + Auto("auto"), // + Text("text"), // + Keyword("keyword"), // + Long("long"), // + Integer("integer"), // + Short("short"), // + Byte("byte"), // + Double("double"), // + Float("float"), // + Half_Float("half_float"), // + Scaled_Float("scaled_float"), // + Date("date"), // + Date_Nanos("date_nanos"), // + Boolean("boolean"), // + Binary("binary"), // + Integer_Range("integer_range"), // + Float_Range("float_range"), // + Long_Range("long_range"), // + Double_Range("double_range"), // + Date_Range("date_range"), // + Ip_Range("ip_range"), // + Object("object"), // + Nested("nested"), // + Ip("ip"), // + TokenCount("token_count"), // + Percolator("percolator"), // + Flattened("flattened"), // + Search_As_You_Type("search_as_you_type"), // + /** @since 4.1 */ + Rank_Feature("rank_feature"), // + /** @since 4.1 */ + Rank_Features("rank_features"), // + /** since 4.2 */ + Wildcard("wildcard"), // + /** @since 4.2 */ + Dense_Vector("dense_vector"), // + /** + * @since 5.2 + */ + Constant_Keyword("constant_keyword"), // + /** + * @since 5.2 + */ + Alias("alias"), // + /** + * @since 5.2 + */ + Version("version"), // + /** + * @since 5.2 + */ + Murmur3("murmur3"), // + /** + * @since 5.2 + */ + Match_Only_Text("match_only_text"), // + /** + * @since 5.2 + */ + Annotated_Text("annotated_text") // + ; + + private final String mappedName; + + FieldType(String mappedName) { + this.mappedName = mappedName; + } + + public String getMappedName() { + return mappedName; + } } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Filter.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Filter.java new file mode 100644 index 0000000000..7f07df55d1 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Filter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import org.springframework.core.annotation.AliasFor; + +/** + * Query used to limit documents. + * + * @author Youssef Aouichaoui + * @since 5.4 + */ +public @interface Filter { + /** + * @return Query used to limit documents. Alias for {@link #query}. + */ + @AliasFor("query") + String value() default ""; + + /** + * @return Query used to limit documents. Alias for {@link #value}. + */ + @AliasFor("value") + String query() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/GeoPointField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/GeoPointField.java index bdb8025bb9..05695abc98 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/GeoPointField.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/GeoPointField.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/GeoShapeField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/GeoShapeField.java new file mode 100644 index 0000000000..0121b07ee1 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/GeoShapeField.java @@ -0,0 +1,44 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Lukas Vorisek + * @author Peter-Josef Meisch + * @since 4.1 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Documented +public @interface GeoShapeField { + Orientation orientation() default Orientation.ccw; + + boolean ignoreMalformed() default false; + + boolean ignoreZValue() default true; + + boolean coerce() default false; + + enum Orientation { + right, ccw, counterclockwise, left, cw, clockwise + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Highlight.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Highlight.java new file mode 100644 index 0000000000..30312ab434 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Highlight.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Peter-Josef Meisch + * @since 4.0 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +public @interface Highlight { + + HighlightParameters parameters() default @HighlightParameters; + + HighlightField[] fields(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightField.java new file mode 100644 index 0000000000..f6318be98a --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightField.java @@ -0,0 +1,37 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** + * @author Peter-Josef Meisch + * @since 4.0 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +public @interface HighlightField { + + /** + * The name of the field to apply highlighting to. This must be the field name of the entity's property, not the name + * of the field in the index mappings. + */ + String name() default ""; + + HighlightParameters parameters() default @HighlightParameters; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java b/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java new file mode 100644 index 0000000000..d4e8bbfd2b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** + * @author Peter-Josef Meisch + * @author Haibo Liu + * @since 4.0 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +public @interface HighlightParameters { + String boundaryChars() default ""; + + int boundaryMaxScan() default -1; + + String boundaryScanner() default ""; + + String boundaryScannerLocale() default ""; + + /** + * only used for {@link Highlight}s. + */ + String encoder() default ""; + + boolean forceSource() default false; + + String fragmenter() default ""; + + /** + * only used for {@link HighlightField}s. + */ + int fragmentOffset() default -1; + + int fragmentSize() default -1; + + /** + * only used for {@link HighlightField}s. + */ + String[] matchedFields() default {}; + + int noMatchSize() default -1; + + int numberOfFragments() default -1; + + Query highlightQuery() default @Query; + + String order() default ""; + + int phraseLimit() default -1; + + String[] preTags() default {}; + + String[] postTags() default {}; + + boolean requireFieldMatch() default true; + + /** + * only used for {@link Highlight}s. + */ + String tagsSchema() default ""; + + String type() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java b/src/main/java/org/springframework/data/elasticsearch/annotations/IndexOptions.java similarity index 69% rename from src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java rename to src/main/java/org/springframework/data/elasticsearch/annotations/IndexOptions.java index e5642551a1..2de226c7b1 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/IndexOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,11 +16,9 @@ package org.springframework.data.elasticsearch.annotations; /** - * @author Artur Konczak - * @author Mohsin Husen - * @author Alexander Volz - * @author Dennis Maaß + * @author Peter-Josef Meisch + * @since 4.0 */ -public enum FieldIndex { - not_analyzed, analyzed, no +public enum IndexOptions { + none, docs, freqs, positions, offsets } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java b/src/main/java/org/springframework/data/elasticsearch/annotations/IndexPrefixes.java similarity index 62% rename from src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java rename to src/main/java/org/springframework/data/elasticsearch/annotations/IndexPrefixes.java index 940e6f04f8..01adc8fbb4 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/IndexPrefixes.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,15 @@ */ package org.springframework.data.elasticsearch.annotations; -import java.lang.annotation.*; - -import org.springframework.data.annotation.Persistent; - /** - * Parent - * - * @author Philipp Jardas + * @author Peter-Josef Meisch + * @since 4.0 */ +public @interface IndexPrefixes { + int MIN_DEFAULT = 2; + int MAX_DEFAULT = 2; -@Persistent -@Inherited -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface Parent { + int minChars() default MIN_DEFAULT; - String type(); + int maxChars() default MAX_DEFAULT; } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/IndexedIndexName.java b/src/main/java/org/springframework/data/elasticsearch/annotations/IndexedIndexName.java new file mode 100644 index 0000000000..4d76b97492 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/IndexedIndexName.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to mark a String property of an entity to be filled with the name of the index where the entity was stored + * after it is indexed into Elasticsearch. This can be used when the name of the index is dynamically created or when a + * document was indexed into a write alias. + *

+ * This can not be used to specify the index where an entity should be written to. + * + * @author Peter-Josef Meisch + * @since 5.1 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) +@Documented +@Field(type = FieldType.Auto) // prevents the property being written to the index mapping +public @interface IndexedIndexName { +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/InnerField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/InnerField.java new file mode 100644 index 0000000000..651bf5a825 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/InnerField.java @@ -0,0 +1,182 @@ +/* + * Copyright 2014-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Artur Konczak + * @author Mohsin Husen + * @author Sascha Woo + * @author Xiao Yu + * @author Peter-Josef Meisch + * @author Aleksei Arsenev + * @author Brian Kimmig + * @author Morgan Lutz + * @author Haibo Liu + * @author Andriy Redko + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.ANNOTATION_TYPE) +public @interface InnerField { + + String suffix(); + + FieldType type(); + + boolean index() default true; + + DateFormat[] format() default { DateFormat.date_optional_time, DateFormat.epoch_millis }; + + String[] pattern() default {}; + + boolean store() default false; + + boolean fielddata() default false; + + String searchAnalyzer() default ""; + + String analyzer() default ""; + + String normalizer() default ""; + + /** + * @since 4.0 + */ + int ignoreAbove() default -1; + + /** + * @since 4.0 + */ + boolean coerce() default true; + + /** + * @since 4.0 + */ + boolean docValues() default true; + + /** + * @since 4.0 + */ + boolean ignoreMalformed() default false; + + /** + * @since 4.0 + */ + IndexOptions indexOptions() default IndexOptions.none; + + /** + * @since 4.0 + */ + boolean indexPhrases() default false; + + /** + * implemented as array to enable the empty default value + * + * @since 4.0 + */ + IndexPrefixes[] indexPrefixes() default {}; + + /** + * @since 4.0 + */ + boolean norms() default true; + + /** + * @since 4.0 + */ + String nullValue() default ""; + + /** + * @since 4.0 + */ + int positionIncrementGap() default -1; + + /** + * @since 4.0 + */ + String similarity() default Similarity.Default; + + /** + * @since 4.0 + */ + TermVector termVector() default TermVector.none; + + /** + * @since 4.0 + */ + double scalingFactor() default 1; + + /** + * @since 4.0 + */ + int maxShingleSize() default -1; + + /** + * to be used in combination with {@link FieldType#Rank_Feature} + * + * @since 4.1 + */ + boolean positiveScoreImpact() default true; + + /** + * @since 4.1 + */ + boolean eagerGlobalOrdinals() default false; + + /** + * @since 4.1 + */ + NullValueType nullValueType() default NullValueType.String; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 4.2 + */ + int dims() default -1; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 5.4 + */ + String elementType() default FieldElementType.DEFAULT; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 5.4 + */ + KnnSimilarity knnSimilarity() default KnnSimilarity.DEFAULT; + + /** + * to be used in combination with {@link FieldType#Dense_Vector} + * + * @since 5.4 + */ + KnnIndexOptions[] knnIndexOptions() default {}; + + /** + * overrides the field type in the mapping which otherwise will be taken from corresponding {@link FieldType} + * + * @since 5.4 + */ + String mappedTypeName() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/JoinTypeRelation.java b/src/main/java/org/springframework/data/elasticsearch/annotations/JoinTypeRelation.java new file mode 100644 index 0000000000..eb2e1e4623 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/JoinTypeRelation.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Subhobrata Dey + * @since 4.1 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.ANNOTATION_TYPE) +public @interface JoinTypeRelation { + + String parent(); + + String[] children(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/JoinTypeRelations.java similarity index 70% rename from src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java rename to src/main/java/org/springframework/data/elasticsearch/annotations/JoinTypeRelations.java index 7a7f001f8c..2004200cf2 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/JoinTypeRelations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,22 @@ */ package org.springframework.data.elasticsearch.annotations; +import java.lang.annotation.Documented; import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * + * @author Subhobrata Dey + * @since 4.1 */ +@Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) -public @interface NestedField { - - String dotSuffix(); - - FieldType type(); - - FieldIndex index() default FieldIndex.analyzed; - - boolean store() default false; - - String searchAnalyzer() default ""; +@Inherited +public @interface JoinTypeRelations { - String indexAnalyzer() default ""; + JoinTypeRelation[] relations(); } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/KnnAlgorithmType.java b/src/main/java/org/springframework/data/elasticsearch/annotations/KnnAlgorithmType.java new file mode 100644 index 0000000000..6110e54be8 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/KnnAlgorithmType.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * @author Haibo Liu + * @since 5.4 + */ +public enum KnnAlgorithmType { + HNSW("hnsw"), + INT8_HNSW("int8_hnsw"), + FLAT("flat"), + INT8_FLAT("int8_flat"), + DEFAULT(""); + + private final String type; + + KnnAlgorithmType(String type) { + this.type = type; + } + + public String getType() { + return type; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/KnnIndexOptions.java b/src/main/java/org/springframework/data/elasticsearch/annotations/KnnIndexOptions.java new file mode 100644 index 0000000000..56d871d3b5 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/KnnIndexOptions.java @@ -0,0 +1,40 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * @author Haibo Liu + * @since 5.4 + */ +public @interface KnnIndexOptions { + + KnnAlgorithmType type() default KnnAlgorithmType.DEFAULT; + + /** + * Only applicable to {@link KnnAlgorithmType#HNSW} and {@link KnnAlgorithmType#INT8_HNSW} index types. + */ + int m() default -1; + + /** + * Only applicable to {@link KnnAlgorithmType#HNSW} and {@link KnnAlgorithmType#INT8_HNSW} index types. + */ + int efConstruction() default -1; + + /** + * Only applicable to {@link KnnAlgorithmType#INT8_HNSW} and {@link KnnAlgorithmType#INT8_FLAT} index types. + */ + float confidenceInterval() default -1F; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/KnnSimilarity.java b/src/main/java/org/springframework/data/elasticsearch/annotations/KnnSimilarity.java new file mode 100644 index 0000000000..d03c42a6fd --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/KnnSimilarity.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * @author Haibo Liu + * @since 5.4 + */ +public enum KnnSimilarity { + L2_NORM("l2_norm"), + DOT_PRODUCT("dot_product"), + COSINE("cosine"), + MAX_INNER_PRODUCT("max_inner_product"), + DEFAULT(""); + + private final String similarity; + + KnnSimilarity(String similarity) { + this.similarity = similarity; + } + + public String getSimilarity() { + return similarity; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Mapping.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Mapping.java index 122251ee65..c2d48c3884 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/Mapping.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Mapping.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,73 @@ */ package org.springframework.data.elasticsearch.annotations; -import java.lang.annotation.*; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + import org.springframework.data.annotation.Persistent; /** * Elasticsearch Mapping * * @author Mohsin Husen + * @author Peter-Josef Meisch */ @Persistent @Inherited @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE}) +@Target({ ElementType.TYPE, ElementType.FIELD }) public @interface Mapping { String mappingPath() default ""; + /** + * whether mappings are enabled + * + * @since 4.2 + */ + boolean enabled() default true; + + /** + * whether date_detection is enabled + * + * @since 4.3 + */ + Detection dateDetection() default Detection.DEFAULT; + + /** + * whether numeric_detection is enabled + * + * @since 4.3 + */ + Detection numericDetection() default Detection.DEFAULT; + + /** + * custom dynamic date formats + * + * @since 4.3 + */ + String[] dynamicDateFormats() default {}; + + /** + * classpath to a JSON file containing the values for a runtime mapping definition. The file must contain the JSON + * object that is written as the value of the runtime property. {@see elasticsearch doc} + * + * @since 4.3 + */ + String runtimeFieldsPath() default ""; + + /** + * field alias definitions to be written to the index mapping + * + * @since 5.3 + */ + MappingAlias[] aliases() default {}; + + enum Detection { + DEFAULT, TRUE, FALSE + } } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/MappingAlias.java b/src/main/java/org/springframework/data/elasticsearch/annotations/MappingAlias.java new file mode 100644 index 0000000000..791659e9d5 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/MappingAlias.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Defines a field alias in the index mapping. + * + * @author Peter-Josef Meisch + * @since 5.3 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Documented +@Inherited +public @interface MappingAlias { + /** + * the name of the alias. + */ + String name(); + + /** + * the path of the alias. + */ + String path(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java index 41c781e80c..9dff38c1f1 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,26 @@ */ package org.springframework.data.elasticsearch.annotations; -import java.lang.annotation.*; +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * @author Rizwan Idrees * @author Mohsin Husen * @author Artur Konczak * @author Jonathan Yan + * @author Xiao Yu + * @author Peter-Josef Meisch */ @Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE, ElementType.METHOD }) @Documented public @interface MultiField { - public Field mainField(); + Field mainField(); - public NestedField[] otherFields() default {}; + InnerField[] otherFields() default {}; } diff --git a/src/main/java/org/springframework/data/elasticsearch/core/facet/FacetType.java b/src/main/java/org/springframework/data/elasticsearch/annotations/NullValueType.java similarity index 65% rename from src/main/java/org/springframework/data/elasticsearch/core/facet/FacetType.java rename to src/main/java/org/springframework/data/elasticsearch/annotations/NullValueType.java index c913ee107f..a131b12a8e 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/facet/FacetType.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/NullValueType.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2020-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,14 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.elasticsearch.core.facet; +package org.springframework.data.elasticsearch.annotations; /** - * @author Artur Konczak - * @author Petar Tahchiev + * @author Peter-Josef Meisch + * @since 4.1 */ -public enum FacetType { - - term, range, histogram, statistical - +public enum NullValueType { + String, Integer, Long, Double } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java index 3c91cdf293..9f1b755c35 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java @@ -1,45 +1,62 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.elasticsearch.annotations; - -import java.lang.annotation.*; - -/** - * Query - * - * @author Rizwan Idrees - * @author Mohsin Husen - */ - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) -@Documented -public @interface Query { - - /** - * Elasticsearch query to be used when executing query. May contain placeholders eg. ?0 - * - * @return - */ - String value() default ""; - - /** - * Named Query Named looked up by repository. - * - * @return - */ - String name() default ""; -} +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.QueryAnnotation; + +/** + * Query + * + * @author Rizwan Idrees + * @author Mohsin Husen + * @author Peter-Josef Meisch + * @author Steven Pearce + */ + +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@QueryAnnotation +public @interface Query { + + /** + * @return Elasticsearch query to be used when executing query. May contain placeholders eg. ?0. Alias for query. + */ + @AliasFor("query") + String value() default ""; + + /** + * @return Elasticsearch query to be used when executing query. May contain placeholders eg. ?0. Alias for value + * @since 5.0 + */ + @AliasFor("value") + String query() default ""; + + /** + * Returns whether the query defined should be executed as count projection. + * + * @return {@literal false} by default. + * @since 4.2 + */ + boolean count() default false; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Routing.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Routing.java new file mode 100644 index 0000000000..ea71e50564 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Routing.java @@ -0,0 +1,43 @@ +/* + * Copyright2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.Persistent; + +/** + * Annotation to enable custom routing values for an entity. + * + * @author Peter-Josef Meisch + * @since 4.2 + */ +@Persistent +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +public @interface Routing { + + /** + * defines how the routing is determined. Can be either the name of a property or a SpEL expression. See the reference + * documentation for examples how to use this annotation. + */ + String value(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/ScriptedField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/ScriptedField.java new file mode 100644 index 0000000000..cc596c54f3 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/ScriptedField.java @@ -0,0 +1,20 @@ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.*; + +/** + * Marks a property to be populated with the result of a scripted field retrieved from an Elasticsearch response. + * @author Ryan Murfitt + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Documented +public @interface ScriptedField { + + /** + * (Optional) The name of the scripted field. Defaults to + * the field name. + */ + String name() default ""; + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/SearchTemplateQuery.java b/src/main/java/org/springframework/data/elasticsearch/annotations/SearchTemplateQuery.java new file mode 100644 index 0000000000..f50675d979 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/SearchTemplateQuery.java @@ -0,0 +1,42 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import org.springframework.data.annotation.QueryAnnotation; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to mark a repository method as a search template method. The annotation defines the search template id, + * the parameters for the search template are taken from the method's arguments. + * + * @author P.J. Meisch (pj.meisch@sothawo.com) + * @since 5.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@QueryAnnotation +public @interface SearchTemplateQuery { + /** + * The id of the search template. Must not be empt or null. + */ + String id(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Setting.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Setting.java index c2d746f0ff..926154f1f2 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/Setting.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Setting.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,11 @@ */ package org.springframework.data.elasticsearch.annotations; -import java.lang.annotation.*; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import org.springframework.data.annotation.Persistent; @@ -23,14 +27,84 @@ * Elasticsearch Setting * * @author Mohsin Husen + * @author Peter-Josef Meisch */ @Persistent @Inherited @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE}) +@Target({ ElementType.TYPE }) public @interface Setting { + /** + * Resource path for a settings configuration + */ String settingPath() default ""; + /** + * Use server-side settings when creating the index. + */ + boolean useServerConfiguration() default false; + + /** + * Number of shards for the index. Used for index creation.
+ * With version 4.0, the default value is changed from 5 to 1 to reflect the change in the default settings of + * Elasticsearch which changed to 1 as well in Elasticsearch 7.0. + */ + short shards() default 1; + + /** + * Number of replicas for the index. Used for index creation. + */ + short replicas() default 1; + + /** + * Refresh interval for the index. Used for index creation. + */ + String refreshInterval() default "1s"; + + /** + * Index storage type for the index. Used for index creation. + */ + String indexStoreType() default "fs"; + + /** + * fields to define an index sorting + * + * @since 4.2 + */ + String[] sortFields() default {}; + + /** + * defines the order for {@link #sortFields()}. If present, it must have the same number of elements + * + * @since 4.2 + */ + SortOrder[] sortOrders() default {}; + + /** + * defines the mode for {@link #sortFields()}. If present, it must have the same number of elements + * + * @since 4.2 + */ + SortMode[] sortModes() default {}; + + /** + * defines the missing value for {@link #sortFields()}. If present, it must have the same number of elements + * + * @since 4.2 + */ + SortMissing[] sortMissingValues() default {}; + + enum SortOrder { + asc, desc + } + + enum SortMode { + min, max + } + + enum SortMissing { + _last, _first + } } diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Similarity.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Similarity.java new file mode 100644 index 0000000000..46cafd91a2 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Similarity.java @@ -0,0 +1,27 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * @author Peter-Josef Meisch + * @since 4.0 + */ +public final class Similarity { + public final static String Default = "default"; + public final static String BM25 = "BM25"; + public final static String classic = "classic"; + public final static String Boolean = "boolean"; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/SourceFilters.java b/src/main/java/org/springframework/data/elasticsearch/annotations/SourceFilters.java new file mode 100644 index 0000000000..055ecc616f --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/SourceFilters.java @@ -0,0 +1,73 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation can be placed on repository methods to define the properties that should be requested from + * Elasticsearch when the method is run. + * + * @author Alexander Torres + * @author Peter-Josef Meisch + * @since 5.0 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface SourceFilters { + + /** + * Properties to be requested from Elasticsearch to be included in the response. These can be passed in as literals + * like + * + *

+	 * {@code @SourceFilters(includes = {"property1", "property2"})}
+	 * 
+ * + * or as a parameterized value + * + *
+	 * {@code @SourceFilters(includes = "?0")}
+	 * 
+ * + * when the list of properties is passed as a function parameter. + */ + String[] includes() default ""; + + /** + * Properties to be requested from Elasticsearch to be excluded in the response. These can be passed in as literals + * like + * + *
+	 * {@code @SourceFilters(excludes = {"property1", "property2"})}
+	 * 
+ * + * or as a parameterized value + * + *
+	 * {@code @SourceFilters(excludes = "?0")}
+	 * 
+ * + * when the list of properties is passed as a function parameter. + */ + String[] excludes() default ""; +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/TermVector.java b/src/main/java/org/springframework/data/elasticsearch/annotations/TermVector.java new file mode 100644 index 0000000000..25de2cbcad --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/TermVector.java @@ -0,0 +1,24 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +/** + * @author Peter-Josef Meisch + * @since 4.0 + */ +public enum TermVector { + none, no, yes, with_positions, with_offsets, with_positions_offsets, with_positions_payloads, with_positions_offsets_payloads +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/ValueConverter.java b/src/main/java/org/springframework/data/elasticsearch/annotations/ValueConverter.java new file mode 100644 index 0000000000..eb848bfed2 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/ValueConverter.java @@ -0,0 +1,48 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.elasticsearch.core.mapping.PropertyValueConverter; + +/** + * Annotation to put on a property of an entity to define a value converter which can convert the property to a type + * that Elasticsearch understands and back. + * + * @author Peter-Josef Meisch + * @since 4.3 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) +@Documented +@Inherited +public @interface ValueConverter { + + /** + * Defines the class implementing the {@link PropertyValueConverter} interface. If this is a normal class, it must + * provide a default constructor with no arguments. If this is an enum and thus implementing a singleton by enum it + * must only have one enum value. + * + * @return the class to use for conversion + */ + Class value(); +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/WriteOnlyProperty.java b/src/main/java/org/springframework/data/elasticsearch/annotations/WriteOnlyProperty.java new file mode 100644 index 0000000000..7704450e26 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/WriteOnlyProperty.java @@ -0,0 +1,35 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to mark a property that will be written to Elasticsearch, but not set when reading from Elasticsearch. + * This is needed for synthesized fields that may be used for search but that are not available in the entity. + * + * @author Peter-Josef Meisch + * @since 5.0 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.FIELD }) +@Documented +public @interface WriteOnlyProperty { +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/WriteTypeHint.java b/src/main/java/org/springframework/data/elasticsearch/annotations/WriteTypeHint.java new file mode 100644 index 0000000000..86a844cc18 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/WriteTypeHint.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.annotations; + +import org.springframework.data.mapping.context.MappingContext; + +/** + * Defines if type hints should be written. Used by {@link Document} annotation. + * + * @author Peter-Josef Meisch + * @since 4.3 + */ +public enum WriteTypeHint { + + /** + * Use the global settings from the {@link MappingContext}. + */ + DEFAULT, + /** + * Always write type hints for the entity. + */ + TRUE, + /** + * Never write type hints for the entity. + */ + FALSE +} diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/package-info.java b/src/main/java/org/springframework/data/elasticsearch/annotations/package-info.java new file mode 100644 index 0000000000..4b8ccdf64e --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/package-info.java @@ -0,0 +1,2 @@ +@org.jspecify.annotations.NullMarked +package org.springframework.data.elasticsearch.annotations; diff --git a/src/main/java/org/springframework/data/elasticsearch/aot/ElasticsearchAotPredicates.java b/src/main/java/org/springframework/data/elasticsearch/aot/ElasticsearchAotPredicates.java new file mode 100644 index 0000000000..c3921b8940 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/aot/ElasticsearchAotPredicates.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.aot; + +import java.util.function.Predicate; + +import org.springframework.data.util.ReactiveWrappers; + +/** + * @author Peter-Josef Meisch + * @since 5.1 + */ +public class ElasticsearchAotPredicates { + + public static final Predicate IS_REACTIVE_LIBRARY_AVAILABLE = ( + lib) -> ReactiveWrappers.isAvailable(lib); + + public static boolean isReactorPresent() { + return IS_REACTIVE_LIBRARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/aot/SpringDataElasticsearchRuntimeHints.java b/src/main/java/org/springframework/data/elasticsearch/aot/SpringDataElasticsearchRuntimeHints.java new file mode 100644 index 0000000000..100b2ae449 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/aot/SpringDataElasticsearchRuntimeHints.java @@ -0,0 +1,71 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.aot; + +import static org.springframework.data.elasticsearch.aot.ElasticsearchAotPredicates.*; + +import java.util.Arrays; + +import org.jspecify.annotations.Nullable; +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.elasticsearch.client.elc.EntityAsMap; +import org.springframework.data.elasticsearch.core.event.AfterConvertCallback; +import org.springframework.data.elasticsearch.core.event.AfterLoadCallback; +import org.springframework.data.elasticsearch.core.event.AfterSaveCallback; +import org.springframework.data.elasticsearch.core.event.BeforeConvertCallback; +import org.springframework.data.elasticsearch.core.event.ReactiveAfterConvertCallback; +import org.springframework.data.elasticsearch.core.event.ReactiveAfterLoadCallback; +import org.springframework.data.elasticsearch.core.event.ReactiveAfterSaveCallback; +import org.springframework.data.elasticsearch.core.event.ReactiveBeforeConvertCallback; + +/** + * @author Peter-Josef Meisch + * @since 5.1 + */ +public class SpringDataElasticsearchRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + hints.reflection().registerTypes( // + Arrays.asList( // + TypeReference.of(AfterConvertCallback.class), // + TypeReference.of(AfterLoadCallback.class), // + TypeReference.of(AfterSaveCallback.class), // + TypeReference.of(BeforeConvertCallback.class), // + TypeReference.of(EntityAsMap.class) // + ), // + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + if (isReactorPresent()) { + hints.reflection().registerTypes( // + Arrays.asList( // + TypeReference.of(ReactiveAfterConvertCallback.class), // + TypeReference.of(ReactiveAfterLoadCallback.class), // + TypeReference.of(ReactiveAfterSaveCallback.class), // + TypeReference.of(ReactiveBeforeConvertCallback.class) // + ), // + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + + // properties needed to log the different versions + hints.resources().registerPattern("versions.properties"); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/aot/package-info.java b/src/main/java/org/springframework/data/elasticsearch/aot/package-info.java new file mode 100644 index 0000000000..56697c1029 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/aot/package-info.java @@ -0,0 +1,2 @@ +@org.jspecify.annotations.NullMarked +package org.springframework.data.elasticsearch.aot; diff --git a/src/main/java/org/springframework/data/elasticsearch/client/ClientConfiguration.java b/src/main/java/org/springframework/data/elasticsearch/client/ClientConfiguration.java new file mode 100644 index 0000000000..f092e2bf6b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/ClientConfiguration.java @@ -0,0 +1,390 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client; + +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.time.Duration; +import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.support.HttpHeaders; + +/** + * Configuration interface exposing common client configuration properties for Elasticsearch clients. + * + * @author Mark Paluch + * @author Peter-Josef Meisch + * @author Huw Ayling-Miller + * @author Henrique Amaral + * @since 3.2 + */ +public interface ClientConfiguration { + + /** + * Creates a new {@link ClientConfigurationBuilder} instance. + * + * @return a new {@link ClientConfigurationBuilder} instance. + */ + static ClientConfigurationBuilderWithRequiredEndpoint builder() { + return new ClientConfigurationBuilder(); + } + + /** + * Creates a new {@link ClientConfiguration} instance configured to localhost. + * + *
+	 * // "localhost:9200"
+	 * ClientConfiguration configuration = ClientConfiguration.localhost();
+	 * 
+ * + * @return a new {@link ClientConfiguration} instance + * @see ClientConfigurationBuilder#connectedToLocalhost() + */ + static ClientConfiguration localhost() { + return new ClientConfigurationBuilder().connectedToLocalhost().build(); + } + + /** + * Creates a new {@link ClientConfiguration} instance configured to a single host given {@code hostAndPort}. For + * example given the endpoint http://localhost:9200 + * + *
+	 * ClientConfiguration configuration = ClientConfiguration.create("localhost:9200");
+	 * 
+ * + * @return a new {@link ClientConfigurationBuilder} instance. + */ + static ClientConfiguration create(String hostAndPort) { + return new ClientConfigurationBuilder().connectedTo(hostAndPort).build(); + } + + /** + * Creates a new {@link ClientConfiguration} instance configured to a single host given {@link InetSocketAddress}. For + * example given the endpoint http://localhost:9200 + * + *
+	 * ClientConfiguration configuration = ClientConfiguration
+	 * 		.create(InetSocketAddress.createUnresolved("localhost", 9200));
+	 * 
+ * + * @return a new {@link ClientConfigurationBuilder} instance. + */ + static ClientConfiguration create(InetSocketAddress socketAddress) { + return new ClientConfigurationBuilder().connectedTo(socketAddress).build(); + } + + /** + * Returns the configured endpoints. + * + * @return the configured endpoints. + */ + List getEndpoints(); + + /** + * Obtain the {@link HttpHeaders} to be used by default. + * + * @return the {@link HttpHeaders} to be used by default. + */ + HttpHeaders getDefaultHeaders(); + + /** + * Returns {@literal true} when the client should use SSL. + * + * @return {@literal true} when the client should use SSL. + */ + boolean useSsl(); + + /** + * Returns the {@link SSLContext} to use. Can be {@link Optional#empty()} if not configured. + * + * @return the {@link SSLContext} to use. Can be {@link Optional#empty()} if not configured. + */ + Optional getSslContext(); + + /** + * @return the optional SHA-256 fingerprint of the self-signed http_ca.crt certificate output by Elasticsearch at + * startup time. + */ + Optional getCaFingerprint(); + + /** + * Returns the {@link HostnameVerifier} to use. Can be {@link Optional#empty()} if not configured. + * + * @return the {@link HostnameVerifier} to use. Can be {@link Optional#empty()} if not configured. + */ + Optional getHostNameVerifier(); + + /** + * Returns the {@link java.time.Duration connect timeout}. + * + * @see java.net.Socket#connect(SocketAddress, int) + */ + Duration getConnectTimeout(); + + /** + * Returns the {@link java.time.Duration socket timeout} which is typically applied as SO-timeout/read timeout. + * + * @see java.net.Socket#setSoTimeout(int) + */ + Duration getSocketTimeout(); + + /** + * Returns the path prefix that should be prepended to HTTP(s) requests for Elasticsearch behind a proxy. + * + * @return the path prefix. + * @since 4.0 + */ + @Nullable + String getPathPrefix(); + + /** + * returns an optionally set proxy in the form host:port + * + * @return the optional proxy + * @since 4.0 + */ + Optional getProxy(); + + /** + * @return the client configuration callbacks + * @since 4.3 + */ + List> getClientConfigurers(); + + /** + * @return the supplier for custom headers. + */ + Supplier getHeadersSupplier(); + + /** + * @author Christoph Strobl + */ + interface ClientConfigurationBuilderWithRequiredEndpoint { + + /** + * @param hostAndPort the {@literal host} and {@literal port} formatted as String {@literal host:port}. + * @return the {@link MaybeSecureClientConfigurationBuilder}. + */ + default MaybeSecureClientConfigurationBuilder connectedTo(String hostAndPort) { + return connectedTo(new String[] { hostAndPort }); + } + + /** + * @param hostAndPorts the list of {@literal host} and {@literal port} combinations formatted as String + * {@literal host:port}. + * @return the {@link MaybeSecureClientConfigurationBuilder}. + */ + MaybeSecureClientConfigurationBuilder connectedTo(String... hostAndPorts); + + /** + * @param endpoint the {@literal host} and {@literal port}. + * @return the {@link MaybeSecureClientConfigurationBuilder}. + */ + default MaybeSecureClientConfigurationBuilder connectedTo(InetSocketAddress endpoint) { + return connectedTo(new InetSocketAddress[] { endpoint }); + } + + /** + * @param endpoints the list of {@literal host} and {@literal port} combinations. + * @return the {@link MaybeSecureClientConfigurationBuilder}. + */ + MaybeSecureClientConfigurationBuilder connectedTo(InetSocketAddress... endpoints); + + /** + * Obviously for testing. + * + * @return the {@link MaybeSecureClientConfigurationBuilder}. + */ + default MaybeSecureClientConfigurationBuilder connectedToLocalhost() { + return connectedTo("localhost:9200"); + } + } + + /** + * @author Christoph Strobl + */ + interface MaybeSecureClientConfigurationBuilder extends TerminalClientConfigurationBuilder { + + /** + * Connect via {@literal https}
+ * NOTE You need to leave out the protocol in + * {@link ClientConfigurationBuilderWithRequiredEndpoint#connectedTo(String)}. + * + * @return the {@link TerminalClientConfigurationBuilder}. + */ + TerminalClientConfigurationBuilder usingSsl(); + + /** + * Connects using https if flag is true. + * + * @param flag whether to use https in the connection + * @return the {@link TerminalClientConfigurationBuilder} + * @since 5.3 + */ + TerminalClientConfigurationBuilder usingSsl(boolean flag); + + /** + * Connect via {@literal https} using the given {@link SSLContext}.
+ * NOTE You need to leave out the protocol in + * {@link ClientConfigurationBuilderWithRequiredEndpoint#connectedTo(String)}. + * + * @return the {@link TerminalClientConfigurationBuilder}. + */ + TerminalClientConfigurationBuilder usingSsl(SSLContext sslContext); + + /** + * Connect via {@literal https} using the givens {@link SSLContext} and HostnameVerifier {@link HostnameVerifier} + * .
+ * NOTE You need to leave out the protocol in + * {@link ClientConfigurationBuilderWithRequiredEndpoint#connectedTo(String)}. + * + * @return the {@link TerminalClientConfigurationBuilder}. + */ + TerminalClientConfigurationBuilder usingSsl(SSLContext sslContext, HostnameVerifier hostnameVerifier); + + /** + * Connect via https using a SSLContext that is build from the given certificate fingerprint. + * + * @param caFingerprint the SHA-256 fingerprint of the self-signed http_ca.crt certificate output by Elasticsearch + * at startup time. + * @return the {@link TerminalClientConfigurationBuilder}. + */ + TerminalClientConfigurationBuilder usingSsl(String caFingerprint); + } + + /** + * @author Christoph Strobl + * @author Mark Paluch + */ + interface TerminalClientConfigurationBuilder { + + /** + * @param defaultHeaders must not be {@literal null}. + * @return the {@link TerminalClientConfigurationBuilder} + */ + TerminalClientConfigurationBuilder withDefaultHeaders(HttpHeaders defaultHeaders); + + /** + * Configure the {@literal milliseconds} for the connect-timeout. + * + * @param millis the timeout to use. + * @return the {@link TerminalClientConfigurationBuilder} + * @see #withConnectTimeout(Duration) + */ + default TerminalClientConfigurationBuilder withConnectTimeout(long millis) { + return withConnectTimeout(Duration.ofMillis(millis)); + } + + /** + * Configure a {@link java.time.Duration} connect timeout. + * + * @param timeout the timeout to use. Must not be {@literal null}. + * @return the {@link TerminalClientConfigurationBuilder} + * @see java.net.Socket#connect(SocketAddress, int) + */ + TerminalClientConfigurationBuilder withConnectTimeout(Duration timeout); + + /** + * Configure the {@literal milliseconds} for the socket timeout. + * + * @param millis the timeout to use. + * @return the {@link TerminalClientConfigurationBuilder} + * @see #withSocketTimeout(Duration) + */ + default TerminalClientConfigurationBuilder withSocketTimeout(long millis) { + return withSocketTimeout(Duration.ofMillis(millis)); + } + + /** + * Configure a {@link java.time.Duration socket timeout} which is typically applied as SO-timeout/read timeout. + * + * @param timeout the timeout to use. Must not be {@literal null}. + * @return the {@link TerminalClientConfigurationBuilder} + * @see java.net.Socket#setSoTimeout(int) + */ + TerminalClientConfigurationBuilder withSocketTimeout(Duration timeout); + + /** + * Configure the username and password to be sent as a Basic Authentication header + * + * @param username the username. Must not be {@literal null}. + * @param password the password. Must not be {@literal null}. + * @return the {@link TerminalClientConfigurationBuilder} + */ + TerminalClientConfigurationBuilder withBasicAuth(String username, String password); + + /** + * Configure the path prefix that will be prepended to any HTTP(s) requests + * + * @param pathPrefix the pathPrefix. + * @return the {@link TerminalClientConfigurationBuilder} + * @since 4.0 + */ + TerminalClientConfigurationBuilder withPathPrefix(String pathPrefix); + + /** + * @param proxy a proxy formatted as String {@literal host:port}. + * @return the {@link TerminalClientConfigurationBuilder}. + */ + TerminalClientConfigurationBuilder withProxy(String proxy); + + /** + * Register a {@link ClientConfigurationCallback} to configure the client. + * + * @param clientConfigurer configuration callback, must not be {@literal null}. + * @return the {@link TerminalClientConfigurationBuilder}. + * @since 4.3 + */ + TerminalClientConfigurationBuilder withClientConfigurer(ClientConfigurationCallback clientConfigurer); + + /** + * set a supplier for custom headers. This is invoked for every HTTP request to Elasticsearch to retrieve headers + * that should be sent with the request. A common use case is passing in authentication headers that may change. + *
+ * Note: When used in a reactive environment, the calling of {@link Supplier#get()} function must not do any + * blocking operations. It may return {@literal null}. + * + * @param headers supplier function for headers, must not be {@literal null} + * @return the {@link TerminalClientConfigurationBuilder}. + * @since 4.0 + */ + TerminalClientConfigurationBuilder withHeaders(Supplier headers); + + /** + * Build the {@link ClientConfiguration} object. + * + * @return the {@link ClientConfiguration} object. + */ + ClientConfiguration build(); + } + + /** + * Callback to be executed to configure a client. + * + * @param the type of the client configuration class. + * @since 4.3 + */ + @FunctionalInterface + interface ClientConfigurationCallback { + T configure(T clientConfigurer); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/ClientConfigurationBuilder.java b/src/main/java/org/springframework/data/elasticsearch/client/ClientConfigurationBuilder.java new file mode 100644 index 0000000000..71af992127 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/ClientConfigurationBuilder.java @@ -0,0 +1,257 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client; + +import java.net.InetSocketAddress; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Supplier; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationBuilderWithRequiredEndpoint; +import org.springframework.data.elasticsearch.client.ClientConfiguration.MaybeSecureClientConfigurationBuilder; +import org.springframework.data.elasticsearch.client.ClientConfiguration.TerminalClientConfigurationBuilder; +import org.springframework.data.elasticsearch.support.HttpHeaders; +import org.springframework.util.Assert; + +/** + * Default builder implementation for {@link ClientConfiguration}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Peter-Josef Meisch + * @author Huw Ayling-Miller + * @author Henrique Amaral + * @since 3.2 + */ +class ClientConfigurationBuilder + implements ClientConfigurationBuilderWithRequiredEndpoint, MaybeSecureClientConfigurationBuilder { + + private final List hosts = new ArrayList<>(); + private HttpHeaders headers = new HttpHeaders(); + private boolean useSsl; + @Nullable private SSLContext sslContext; + @Nullable private String caFingerprint; + @Nullable private HostnameVerifier hostnameVerifier; + private Duration connectTimeout = Duration.ofSeconds(10); + private Duration soTimeout = Duration.ofSeconds(5); + @Nullable private String username; + @Nullable private String password; + @Nullable private String pathPrefix; + @Nullable private String proxy; + private Supplier headersSupplier = HttpHeaders::new; + private final List> clientConfigurers = new ArrayList<>(); + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationBuilderWithRequiredEndpoint#connectedTo(java.lang.String[]) + */ + @Override + public MaybeSecureClientConfigurationBuilder connectedTo(String... hostAndPorts) { + + Assert.notEmpty(hostAndPorts, "At least one host is required"); + + this.hosts.addAll(Arrays.stream(hostAndPorts).map(ClientConfigurationBuilder::parse).toList()); + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationBuilderWithRequiredEndpoint#connectedTo(java.net.InetSocketAddress[]) + */ + @Override + public MaybeSecureClientConfigurationBuilder connectedTo(InetSocketAddress... endpoints) { + + Assert.notEmpty(endpoints, "At least one endpoint is required"); + + this.hosts.addAll(Arrays.asList(endpoints)); + + return this; + } + + @Override + public MaybeSecureClientConfigurationBuilder withProxy(String proxy) { + Assert.hasLength(proxy, "proxy must not be null or empty"); + this.proxy = proxy; + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.MaybeSecureClientConfigurationBuilder#usingSsl() + */ + @Override + public TerminalClientConfigurationBuilder usingSsl() { + + this.useSsl = true; + return this; + } + + @Override + public TerminalClientConfigurationBuilder usingSsl(boolean flag) { + + this.useSsl = flag; + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.MaybeSecureClientConfigurationBuilder#usingSsl(javax.net.ssl.SSLContext) + */ + @Override + public TerminalClientConfigurationBuilder usingSsl(SSLContext sslContext) { + + Assert.notNull(sslContext, "SSL Context must not be null"); + + this.useSsl = true; + this.sslContext = sslContext; + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.MaybeSecureClientConfigurationBuilder#usingSsl(javax.net.ssl.SSLContext, javax.net.ssl.HostnameVerifier) + */ + @Override + public TerminalClientConfigurationBuilder usingSsl(SSLContext sslContext, HostnameVerifier hostnameVerifier) { + + Assert.notNull(sslContext, "SSL Context must not be null"); + Assert.notNull(hostnameVerifier, "Host Name Verifier must not be null"); + + this.useSsl = true; + this.sslContext = sslContext; + this.hostnameVerifier = hostnameVerifier; + return this; + } + + @Override + public TerminalClientConfigurationBuilder usingSsl(String caFingerprint) { + + Assert.notNull(caFingerprint, "caFingerprint must not be null"); + + this.useSsl = true; + this.caFingerprint = caFingerprint; + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.TerminalClientConfigurationBuilder#withDefaultHeaders(org.springframework.http.HttpHeaders) + */ + @Override + public TerminalClientConfigurationBuilder withDefaultHeaders(HttpHeaders defaultHeaders) { + + Assert.notNull(defaultHeaders, "Default HTTP headers must not be null"); + + this.headers = new HttpHeaders(); + this.headers.addAll(defaultHeaders); + + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.TerminalClientConfigurationBuilder#withConnectTimeout(java.time.Duration) + */ + @Override + public TerminalClientConfigurationBuilder withConnectTimeout(Duration timeout) { + + Assert.notNull(timeout, "I/O timeout must not be null!"); + + this.connectTimeout = timeout; + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.TerminalClientConfigurationBuilder#withTimeout(java.time.Duration) + */ + @Override + public TerminalClientConfigurationBuilder withSocketTimeout(Duration timeout) { + + Assert.notNull(timeout, "Socket timeout must not be null!"); + + this.soTimeout = timeout; + return this; + } + + @Override + public TerminalClientConfigurationBuilder withBasicAuth(String username, String password) { + + Assert.notNull(username, "username must not be null"); + Assert.notNull(password, "password must not be null"); + + this.username = username; + this.password = password; + + return this; + } + + @Override + public TerminalClientConfigurationBuilder withPathPrefix(String pathPrefix) { + this.pathPrefix = pathPrefix; + + return this; + } + + @Override + public TerminalClientConfigurationBuilder withClientConfigurer( + ClientConfiguration.ClientConfigurationCallback clientConfigurer) { + + Assert.notNull(clientConfigurer, "clientConfigurer must not be null"); + + this.clientConfigurers.add(clientConfigurer); + return this; + } + + @Override + public TerminalClientConfigurationBuilder withHeaders(Supplier headers) { + + Assert.notNull(headers, "headersSupplier must not be null"); + + this.headersSupplier = headers; + return this; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationBuilderWithOptionalDefaultHeaders#build() + */ + @Override + public ClientConfiguration build() { + + if (username != null && password != null) { + headers.setBasicAuth(username, password); + } + + if (sslContext != null && caFingerprint != null) { + throw new IllegalArgumentException("Either SSLContext or caFingerprint must be set, but not both"); + } + + return new DefaultClientConfiguration(hosts, headers, useSsl, sslContext, caFingerprint, soTimeout, connectTimeout, + pathPrefix, hostnameVerifier, proxy, clientConfigurers, headersSupplier); + } + + private static InetSocketAddress parse(String hostAndPort) { + return InetSocketAddressParser.parse(hostAndPort, ElasticsearchHost.DEFAULT_PORT); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/DefaultClientConfiguration.java b/src/main/java/org/springframework/data/elasticsearch/client/DefaultClientConfiguration.java new file mode 100644 index 0000000000..ea097bbb59 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/DefaultClientConfiguration.java @@ -0,0 +1,133 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client; + +import java.net.InetSocketAddress; +import java.time.Duration; +import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.support.HttpHeaders; + +/** + * Default {@link ClientConfiguration} implementation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Huw Ayling-Miller + * @author Peter-Josef Meisch + * @since 3.2 + */ +class DefaultClientConfiguration implements ClientConfiguration { + + private final List hosts; + private final HttpHeaders headers; + private final boolean useSsl; + @Nullable private final SSLContext sslContext; + @Nullable private final String caFingerprint; + private final Duration soTimeout; + private final Duration connectTimeout; + @Nullable private final String pathPrefix; + @Nullable private final HostnameVerifier hostnameVerifier; + @Nullable private final String proxy; + private final Supplier headersSupplier; + private final List> clientConfigurers; + + DefaultClientConfiguration(List hosts, HttpHeaders headers, boolean useSsl, + @Nullable SSLContext sslContext, @Nullable String caFingerprint, Duration soTimeout, Duration connectTimeout, + @Nullable String pathPrefix, @Nullable HostnameVerifier hostnameVerifier, @Nullable String proxy, + List> clientConfigurers, Supplier headersSupplier) { + + this.hosts = List.copyOf(hosts); + this.headers = headers; + this.useSsl = useSsl; + this.sslContext = sslContext; + this.caFingerprint = caFingerprint; + this.soTimeout = soTimeout; + this.connectTimeout = connectTimeout; + this.pathPrefix = pathPrefix; + this.hostnameVerifier = hostnameVerifier; + this.proxy = proxy; + this.clientConfigurers = clientConfigurers; + this.headersSupplier = headersSupplier; + } + + @Override + public List getEndpoints() { + return this.hosts; + } + + @Override + public HttpHeaders getDefaultHeaders() { + return this.headers; + } + + @Override + public boolean useSsl() { + return this.useSsl; + } + + @Override + public Optional getSslContext() { + return Optional.ofNullable(this.sslContext); + } + + @Override + public Optional getCaFingerprint() { + return Optional.ofNullable(this.caFingerprint); + } + + @Override + public Optional getHostNameVerifier() { + return Optional.ofNullable(this.hostnameVerifier); + } + + @Override + public Duration getConnectTimeout() { + return this.connectTimeout; + } + + @Override + public Duration getSocketTimeout() { + return this.soTimeout; + } + + @Nullable + @Override + public String getPathPrefix() { + return this.pathPrefix; + } + + @Override + public Optional getProxy() { + return Optional.ofNullable(proxy); + } + + @Override + public List> getClientConfigurers() { + return clientConfigurers; + } + + @Override + public Supplier getHeadersSupplier() { + return headersSupplier; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/ElasticsearchHost.java b/src/main/java/org/springframework/data/elasticsearch/client/ElasticsearchHost.java new file mode 100644 index 0000000000..014acb6328 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/ElasticsearchHost.java @@ -0,0 +1,113 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.elasticsearch.client; + +import java.net.InetSocketAddress; +import java.time.Instant; + +import org.springframework.util.Assert; + +/** + * Value Object containing information about Elasticsearch cluster nodes. + * + * @author Christoph Strobl + * @since 3.2 + */ +public class ElasticsearchHost { + + /** + * Default HTTP port for Elasticsearch servers. + */ + public static final int DEFAULT_PORT = 9200; + + private final InetSocketAddress endpoint; + private final State state; + private final Instant timestamp; + + public ElasticsearchHost(InetSocketAddress endpoint, State state) { + + Assert.notNull(endpoint, "Host must not be null"); + Assert.notNull(state, "State must not be null"); + + this.endpoint = endpoint; + this.state = state; + this.timestamp = Instant.now(); + } + + /** + * @param host must not be {@literal null}. + * @return new instance of {@link ElasticsearchHost}. + */ + public static ElasticsearchHost online(InetSocketAddress host) { + return new ElasticsearchHost(host, State.ONLINE); + } + + /** + * @param host must not be {@literal null}. + * @return new instance of {@link ElasticsearchHost}. + */ + public static ElasticsearchHost offline(InetSocketAddress host) { + return new ElasticsearchHost(host, State.OFFLINE); + } + + /** + * Parse a {@literal hostAndPort} string into a {@link InetSocketAddress}. + * + * @param hostAndPort the string containing host and port or IP address and port in the format {@code host:port}. + * @return the parsed {@link InetSocketAddress}. + */ + public static InetSocketAddress parse(String hostAndPort) { + return InetSocketAddressParser.parse(hostAndPort, DEFAULT_PORT); + } + + /** + * @return {@literal true} if the last known {@link State} was {@link State#ONLINE} + */ + public boolean isOnline() { + return State.ONLINE.equals(state); + } + + /** + * @return never {@literal null}. + */ + public InetSocketAddress getEndpoint() { + return endpoint; + } + + /** + * @return the last known {@link State}. + */ + public State getState() { + return state; + } + + /** + * @return the {@link Instant} the information was captured. + */ + public Instant getTimestamp() { + return timestamp; + } + + @Override + public String toString() { + return "ElasticsearchHost(" + endpoint + ", " + state.name() + ')'; + } + + public enum State { + ONLINE, OFFLINE, UNKNOWN + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/InetSocketAddressParser.java b/src/main/java/org/springframework/data/elasticsearch/client/InetSocketAddressParser.java new file mode 100644 index 0000000000..33f71a49ed --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/InetSocketAddressParser.java @@ -0,0 +1,117 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client; + +import java.net.InetSocketAddress; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Utility to parse endpoints in {@code host:port} format into {@link java.net.InetSocketAddress}. + * + * @author Mark Paluch + * @since 3.2 + */ +public class InetSocketAddressParser { + + /** + * Parse a host and port string into a {@link InetSocketAddress}. + * + * @param hostPortString Hostname/IP address and port formatted as {@code host:port} or {@code host}. + * @param defaultPort default port to apply if {@code hostPostString} does not contain a port. + * @return a {@link InetSocketAddress} that is unresolved to avoid DNS lookups. + * @see InetSocketAddress#createUnresolved(String, int) + */ + public static InetSocketAddress parse(String hostPortString, int defaultPort) { + + Assert.notNull(hostPortString, "HostPortString must not be null"); + String host; + String portString = null; + + if (hostPortString.startsWith("[")) { + String[] hostAndPort = getHostAndPortFromBracketedHost(hostPortString); + host = hostAndPort[0]; + portString = hostAndPort[1]; + } else { + int colonPos = hostPortString.indexOf(':'); + if (colonPos >= 0 && hostPortString.indexOf(':', colonPos + 1) == -1) { + // Exactly 1 colon. Split into host:port. + host = hostPortString.substring(0, colonPos); + portString = hostPortString.substring(colonPos + 1); + } else { + // 0 or 2+ colons. Bare hostname or IPv6 literal. + host = hostPortString; + } + } + + int port = defaultPort; + if (StringUtils.hasText(portString)) { + // Try to parse the whole port string as a number. + Assert.isTrue(!portString.startsWith("+"), String.format("Cannot parse port number: %s", hostPortString)); + try { + port = Integer.parseInt(portString); + } catch (NumberFormatException e) { + throw new IllegalArgumentException(String.format("Cannot parse port number: %s", hostPortString)); + } + + Assert.isTrue(isValidPort(port), String.format("Port number out of range: %s", hostPortString)); + } + + return InetSocketAddress.createUnresolved(host, port); + } + + /** + * Parses a bracketed host-port string, throwing IllegalArgumentException if parsing fails. + * + * @param hostPortString the full bracketed host-port specification. Post might not be specified. + * @return an array with 2 strings: host and port, in that order. + * @throws IllegalArgumentException if parsing the bracketed host-port string fails. + */ + private static String[] getHostAndPortFromBracketedHost(String hostPortString) { + + Assert.isTrue(hostPortString.charAt(0) == '[', + String.format("Bracketed host-port string must start with a bracket: %s", hostPortString)); + + int colonIndex = hostPortString.indexOf(':'); + int closeBracketIndex = hostPortString.lastIndexOf(']'); + + Assert.isTrue(colonIndex > -1 && closeBracketIndex > colonIndex, + String.format("Invalid bracketed host/port: %s", hostPortString)); + + String host = hostPortString.substring(1, closeBracketIndex); + if (closeBracketIndex + 1 == hostPortString.length()) { + return new String[] { host, "" }; + } else { + + Assert.isTrue(hostPortString.charAt(closeBracketIndex + 1) == ':', + "Only a colon may follow a close bracket: " + hostPortString); + for (int i = closeBracketIndex + 2; i < hostPortString.length(); ++i) { + Assert.isTrue(Character.isDigit(hostPortString.charAt(i)), + String.format("Port must be numeric: %s", hostPortString)); + } + return new String[] { host, hostPortString.substring(closeBracketIndex + 2) }; + } + } + + /** + * @param port the port number + * @return {@literal true} for valid port numbers. + */ + private static boolean isValidPort(int port) { + return port >= 0 && port <= 65535; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/NoReachableHostException.java b/src/main/java/org/springframework/data/elasticsearch/client/NoReachableHostException.java new file mode 100644 index 0000000000..b8a560db63 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/NoReachableHostException.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.elasticsearch.client; + +import java.util.Set; + +/** + * {@link RuntimeException} to be emitted / thrown when the cluster is down (aka none of the known nodes is reachable). + * + * @author Christoph Strobl + * @since 3.2 + */ +public class NoReachableHostException extends RuntimeException { + + public NoReachableHostException(Set hosts) { + super(createMessage(hosts)); + } + + public NoReachableHostException(Set hosts, Throwable cause) { + super(createMessage(hosts), cause); + } + + private static String createMessage(Set hosts) { + + if (hosts.size() == 1) { + return String.format("Host '%s' not reachable. Cluster state is offline.", hosts.iterator().next().getEndpoint()); + } + + return String.format("No active host found in cluster. (%s) of (%s) nodes offline.", hosts.size(), hosts.size()); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java b/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java deleted file mode 100644 index 8caa158416..0000000000 --- a/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.elasticsearch.client; - -import static org.elasticsearch.node.NodeBuilder.*; - -import org.elasticsearch.client.Client; -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.ImmutableSettings; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.DisposableBean; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.InitializingBean; - -/** - * NodeClientFactoryBean - * - * @author Rizwan Idrees - * @author Mohsin Husen - */ - -public class NodeClientFactoryBean implements FactoryBean, InitializingBean, DisposableBean { - - private static final Logger logger = LoggerFactory.getLogger(NodeClientFactoryBean.class); - private boolean local; - private boolean enableHttp; - private String clusterName; - private NodeClient nodeClient; - - NodeClientFactoryBean() { - } - - public NodeClientFactoryBean(boolean local) { - this.local = local; - } - - @Override - public NodeClient getObject() throws Exception { - return nodeClient; - } - - @Override - public Class getObjectType() { - return NodeClient.class; - } - - @Override - public boolean isSingleton() { - return true; - } - - @Override - public void afterPropertiesSet() throws Exception { - ImmutableSettings.Builder settings = ImmutableSettings.settingsBuilder().put("http.enabled", - String.valueOf(this.enableHttp)); - - nodeClient = (NodeClient) nodeBuilder().settings(settings).clusterName(this.clusterName).local(this.local).node() - .client(); - } - - public void setLocal(boolean local) { - this.local = local; - } - - public void setEnableHttp(boolean enableHttp) { - this.enableHttp = enableHttp; - } - - public void setClusterName(String clusterName) { - this.clusterName = clusterName; - } - - @Override - public void destroy() throws Exception { - try { - logger.info("Closing elasticSearch client"); - if (nodeClient != null) { - nodeClient.close(); - } - } catch (final Exception e) { - logger.error("Error closing ElasticSearch client: ", e); - } - } -} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java b/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java deleted file mode 100644 index 3e91ba0c2d..0000000000 --- a/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.elasticsearch.client; - -import static org.apache.commons.lang.StringUtils.*; -import static org.elasticsearch.common.settings.ImmutableSettings.*; - -import java.util.Properties; - -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.DisposableBean; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * TransportClientFactoryBean - * - * @author Rizwan Idrees - * @author Mohsin Husen - * @author Jakub Vavrik - * @author Piotr Betkier - */ - -public class TransportClientFactoryBean implements FactoryBean, InitializingBean, DisposableBean { - - private static final Logger logger = LoggerFactory.getLogger(TransportClientFactoryBean.class); - private String clusterNodes = "127.0.0.1:9300"; - private String clusterName = "elasticsearch"; - private Boolean clientTransportSniff = true; - private Boolean clientIgnoreClusterName = Boolean.FALSE; - private String clientPingTimeout = "5s"; - private String clientNodesSamplerInterval = "5s"; - private TransportClient client; - private Properties properties; - static final String COLON = ":"; - static final String COMMA = ","; - - @Override - public void destroy() throws Exception { - try { - logger.info("Closing elasticSearch client"); - if (client != null) { - client.close(); - } - } catch (final Exception e) { - logger.error("Error closing ElasticSearch client: ", e); - } - } - - @Override - public TransportClient getObject() throws Exception { - return client; - } - - @Override - public Class getObjectType() { - return TransportClient.class; - } - - @Override - public boolean isSingleton() { - return false; - } - - @Override - public void afterPropertiesSet() throws Exception { - buildClient(); - } - - protected void buildClient() throws Exception { - client = new TransportClient(settings()); - Assert.hasText(clusterNodes, "[Assertion failed] clusterNodes settings missing."); - for (String clusterNode : split(clusterNodes, COMMA)) { - String hostName = substringBefore(clusterNode, COLON); - String port = substringAfter(clusterNode, COLON); - Assert.hasText(hostName, "[Assertion failed] missing host name in 'clusterNodes'"); - Assert.hasText(port, "[Assertion failed] missing port in 'clusterNodes'"); - logger.info("adding transport node : " + clusterNode); - client.addTransportAddress(new InetSocketTransportAddress(hostName, Integer.valueOf(port))); - } - client.connectedNodes(); - } - - private Settings settings() { - if (properties != null) { - return settingsBuilder().put(properties).build(); - } - return settingsBuilder() - .put("cluster.name", clusterName) - .put("client.transport.sniff", clientTransportSniff) - .put("client.transport.ignore_cluster_name", clientIgnoreClusterName) - .put("client.transport.ping_timeout", clientPingTimeout) - .put("client.transport.nodes_sampler_interval", clientNodesSamplerInterval) - .build(); - } - - public void setClusterNodes(String clusterNodes) { - this.clusterNodes = clusterNodes; - } - - public void setClusterName(String clusterName) { - this.clusterName = clusterName; - } - - public void setClientTransportSniff(Boolean clientTransportSniff) { - this.clientTransportSniff = clientTransportSniff; - } - - public String getClientNodesSamplerInterval() { - return clientNodesSamplerInterval; - } - - public void setClientNodesSamplerInterval(String clientNodesSamplerInterval) { - this.clientNodesSamplerInterval = clientNodesSamplerInterval; - } - - public String getClientPingTimeout() { - return clientPingTimeout; - } - - public void setClientPingTimeout(String clientPingTimeout) { - this.clientPingTimeout = clientPingTimeout; - } - - public Boolean getClientIgnoreClusterName() { - return clientIgnoreClusterName; - } - - public void setClientIgnoreClusterName(Boolean clientIgnoreClusterName) { - this.clientIgnoreClusterName = clientIgnoreClusterName; - } - - public void setProperties(Properties properties) { - this.properties = properties; - } -} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/UnsupportedBackendOperation.java b/src/main/java/org/springframework/data/elasticsearch/client/UnsupportedBackendOperation.java new file mode 100644 index 0000000000..0264b95c00 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/UnsupportedBackendOperation.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client; + +/** + * Exception to be thrown by a backend implementation on operations that are not supported for that backend. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class UnsupportedBackendOperation extends RuntimeException { + public UnsupportedBackendOperation() {} + + public UnsupportedBackendOperation(String message) { + super(message); + } + + public UnsupportedBackendOperation(String message, Throwable cause) { + super(message, cause); + } + + public UnsupportedBackendOperation(Throwable cause) { + super(cause); + } + + public UnsupportedBackendOperation(String message, Throwable cause, boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/UnsupportedClientOperationException.java b/src/main/java/org/springframework/data/elasticsearch/client/UnsupportedClientOperationException.java new file mode 100644 index 0000000000..322646bc66 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/UnsupportedClientOperationException.java @@ -0,0 +1,25 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client; + +/** + * @author Peter-Josef Meisch + */ +public class UnsupportedClientOperationException extends RuntimeException { + public UnsupportedClientOperationException(Class clientClass, String operation) { + super("Client %1$s does not support the operation %2$s".formatted(clientClass, operation)); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/AbstractQueryProcessor.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/AbstractQueryProcessor.java new file mode 100644 index 0000000000..ff0e1bd3a0 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/AbstractQueryProcessor.java @@ -0,0 +1,70 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import java.util.function.Consumer; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.query.CriteriaQuery; +import org.springframework.data.elasticsearch.core.query.Query; +import org.springframework.data.elasticsearch.core.query.StringQuery; + +/** + * An abstract class that serves as a base for query processors. It provides a common interface and basic functionality + * for query processing. + * + * @author Aouichaoui Youssef + * @since 5.3 + */ +public abstract class AbstractQueryProcessor { + + /** + * Convert a spring-data-elasticsearch {@literal query} to an Elasticsearch {@literal query}. + * + * @param query spring-data-elasticsearch {@literal query}. + * @param queryConverter correct mapped field names and the values to the converted values. + * @return an Elasticsearch {@literal query}. + */ + + static co.elastic.clients.elasticsearch._types.query_dsl.@Nullable Query getEsQuery(@Nullable Query query, + @Nullable Consumer queryConverter) { + if (query == null) { + return null; + } + + if (queryConverter != null) { + queryConverter.accept(query); + } + + co.elastic.clients.elasticsearch._types.query_dsl.Query esQuery = null; + + if (query instanceof CriteriaQuery criteriaQuery) { + esQuery = CriteriaQueryProcessor.createQuery(criteriaQuery.getCriteria()); + } else if (query instanceof StringQuery stringQuery) { + esQuery = Queries.wrapperQueryAsQuery(stringQuery.getSource()); + } else if (query instanceof NativeQuery nativeQuery) { + if (nativeQuery.getQuery() != null) { + esQuery = nativeQuery.getQuery(); + } else if (nativeQuery.getSpringDataQuery() != null) { + esQuery = getEsQuery(nativeQuery.getSpringDataQuery(), queryConverter); + } + } else { + throw new IllegalArgumentException("unhandled Query implementation " + query.getClass().getName()); + } + + return esQuery; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/Aggregation.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/Aggregation.java new file mode 100644 index 0000000000..23e2b6ae47 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/Aggregation.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.aggregations.Aggregate; + +/** + * Class to combine an Elasticsearch {@link co.elastic.clients.elasticsearch._types.aggregations.Aggregate} with its + * name. Necessary as the Elasticsearch Aggregate does not know its name. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class Aggregation { + + private final String name; + private final Aggregate aggregate; + + public Aggregation(String name, Aggregate aggregate) { + this.name = name; + this.aggregate = aggregate; + } + + public String getName() { + return name; + } + + public Aggregate getAggregate() { + return aggregate; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/AutoCloseableElasticsearchClient.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/AutoCloseableElasticsearchClient.java new file mode 100644 index 0000000000..3ce661e1fd --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/AutoCloseableElasticsearchClient.java @@ -0,0 +1,47 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.ElasticsearchClient; +import co.elastic.clients.transport.ElasticsearchTransport; + +import java.io.IOException; + +import org.elasticsearch.client.RestClient; +import org.springframework.util.Assert; + +/** + * Extension of the {@link ElasticsearchClient} class that implements {@link AutoCloseable}. As the underlying + * {@link RestClient} must be closed properly this is handled in the {@link #close()} method. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class AutoCloseableElasticsearchClient extends ElasticsearchClient implements AutoCloseable { + + public AutoCloseableElasticsearchClient(ElasticsearchTransport transport) { + super(transport); + Assert.notNull(transport, "transport must not be null"); + } + + @Override + public void close() throws IOException { + // since Elasticsearch 8.16 the ElasticsearchClient implements (through ApiClient) the Closeable interface and + // handles closing of the underlying transport. We now just call the base class, but keep this as we + // have been implementing AutoCloseable since 4.4 and won't change that to a mere Closeable + super.close(); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ChildTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ChildTemplate.java new file mode 100644 index 0000000000..4d3ebf5bd7 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ChildTemplate.java @@ -0,0 +1,76 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.ApiClient; +import co.elastic.clients.elasticsearch.cluster.ElasticsearchClusterClient; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.transport.Transport; + +import java.io.IOException; + +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.util.Assert; + +/** + * base class for a template that uses one of the {@link co.elastic.clients.elasticsearch.ElasticsearchClient}'s child + * clients like {@link ElasticsearchClusterClient} or + * {@link co.elastic.clients.elasticsearch.indices.ElasticsearchIndicesClient}. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public abstract class ChildTemplate> { + + protected final CLIENT client; + protected final RequestConverter requestConverter; + protected final ResponseConverter responseConverter; + protected final ElasticsearchExceptionTranslator exceptionTranslator; + + public ChildTemplate(CLIENT client, ElasticsearchConverter elasticsearchConverter) { + this.client = client; + JsonpMapper jsonpMapper = client._transport().jsonpMapper(); + requestConverter = new RequestConverter(elasticsearchConverter, jsonpMapper); + responseConverter = new ResponseConverter(jsonpMapper); + exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); + } + + /** + * Callback interface to be used with {@link #execute(ClientCallback)} for operating directly on the client. + */ + @FunctionalInterface + public interface ClientCallback { + RESULT doWithClient(CLIENT client) throws IOException; + } + + /** + * Execute a callback with the client and provide exception translation. + * + * @param callback the callback to execute, must not be {@literal null} + * @param the type returned from the callback + * @return the callback result + */ + public RESULT execute(ClientCallback callback) { + + Assert.notNull(callback, "callback must not be null"); + + try { + return callback.doWithClient(client); + } catch (IOException | RuntimeException e) { + throw exceptionTranslator.translateException(e); + } + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ClusterTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ClusterTemplate.java new file mode 100644 index 0000000000..fcba35fa7d --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ClusterTemplate.java @@ -0,0 +1,48 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.cluster.ElasticsearchClusterClient; +import co.elastic.clients.elasticsearch.cluster.HealthRequest; +import co.elastic.clients.elasticsearch.cluster.HealthResponse; +import co.elastic.clients.transport.ElasticsearchTransport; + +import org.springframework.data.elasticsearch.core.cluster.ClusterHealth; +import org.springframework.data.elasticsearch.core.cluster.ClusterOperations; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; + +/** + * Implementation of the {@link ClusterOperations} interface using en {@link ElasticsearchClusterClient}. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ClusterTemplate extends ChildTemplate + implements ClusterOperations { + + public ClusterTemplate(ElasticsearchClusterClient client, ElasticsearchConverter elasticsearchConverter) { + super(client, elasticsearchConverter); + } + + @Override + public ClusterHealth health() { + + HealthRequest healthRequest = requestConverter.clusterHealthRequest(); + HealthResponse healthResponse = execute(client -> client.health(healthRequest)); + return responseConverter.clusterHealth(healthResponse); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java new file mode 100644 index 0000000000..702d8501b3 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java @@ -0,0 +1,351 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.GeoDistanceType; +import co.elastic.clients.elasticsearch._types.GeoShapeRelation; +import co.elastic.clients.elasticsearch._types.query_dsl.BoolQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.GeoBoundingBoxQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.GeoDistanceQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.GeoShapeQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.Query; +import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders; +import co.elastic.clients.elasticsearch._types.query_dsl.QueryVariant; +import co.elastic.clients.json.JsonData; +import co.elastic.clients.util.ObjectBuilder; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.data.elasticsearch.core.convert.GeoConverters; +import org.springframework.data.elasticsearch.core.geo.GeoBox; +import org.springframework.data.elasticsearch.core.geo.GeoJson; +import org.springframework.data.elasticsearch.core.geo.GeoPoint; +import org.springframework.data.elasticsearch.core.query.Criteria; +import org.springframework.data.elasticsearch.utils.geohash.Geohash; +import org.springframework.data.geo.Box; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.util.Assert; + +/** + * Class to convert a {@link org.springframework.data.elasticsearch.core.query.CriteriaQuery} into an Elasticsearch + * filter. + * + * @author Peter-Josef Meisch + * @author Junghoon Ban + * @since 4.4 + */ +class CriteriaFilterProcessor { + /** + * Creates a filter query from the given criteria. + * + * @param criteria the criteria to process + * @return the optional query, empty if the criteria did not contain filter relevant elements + */ + public static Optional createQuery(Criteria criteria) { + + Assert.notNull(criteria, "criteria must not be null"); + + List filterQueries = new ArrayList<>(); + + for (Criteria chainedCriteria : criteria.getCriteriaChain()) { + + if (chainedCriteria.isOr()) { + Collection queriesForEntries = queriesForEntries(chainedCriteria); + + if (!queriesForEntries.isEmpty()) { + BoolQuery.Builder boolQueryBuilder = QueryBuilders.bool(); + queriesForEntries.forEach(boolQueryBuilder::should); + filterQueries.add(new Query(boolQueryBuilder.build())); + } + } else if (chainedCriteria.isNegating()) { + + Assert.notNull(criteria.getField(), "criteria must have a field"); + + Collection negatingFilters = buildNegatingFilter(criteria.getField().getName(), + criteria.getFilterCriteriaEntries()); + filterQueries.addAll(negatingFilters); + } else { + filterQueries.addAll(queriesForEntries(chainedCriteria)); + } + } + + if (filterQueries.isEmpty()) { + return Optional.empty(); + } else { + + if (filterQueries.size() == 1) { + return Optional.of(filterQueries.get(0)); + } else { + BoolQuery.Builder boolQueryBuilder = QueryBuilders.bool(); + filterQueries.forEach(boolQueryBuilder::must); + BoolQuery boolQuery = boolQueryBuilder.build(); + return Optional.of(new Query(boolQuery)); + } + } + } + + private static Collection buildNegatingFilter(String fieldName, + Set filterCriteriaEntries) { + + List negationFilters = new ArrayList<>(); + + filterCriteriaEntries.forEach(criteriaEntry -> { + Optional query = queryFor(criteriaEntry.getKey(), criteriaEntry.getValue(), fieldName); + + if (query.isPresent()) { + BoolQuery negatingFilter = QueryBuilders.bool().mustNot(query.get()).build(); + negationFilters.add(new Query(negatingFilter)); + } + }); + + return negationFilters; + } + + private static Collection queriesForEntries(Criteria criteria) { + + Assert.notNull(criteria.getField(), "criteria must have a field"); + + String fieldName = criteria.getField().getName(); + Assert.notNull(fieldName, "Unknown field"); + + return criteria.getFilterCriteriaEntries().stream() + .map(entry -> queryFor(entry.getKey(), entry.getValue(), fieldName)) // + .filter(Optional::isPresent) // + .map(Optional::get) // + .collect(Collectors.toList()); + } + + private static Optional queryFor(Criteria.OperationKey key, Object value, String fieldName) { + + ObjectBuilder queryBuilder = null; + + switch (key) { + case WITHIN -> { + Assert.isTrue(value instanceof Object[], "Value of a geo distance filter should be an array of two values."); + queryBuilder = withinQuery(fieldName, (Object[]) value); + } + case BBOX -> { + Assert.isTrue(value instanceof Object[], + "Value of a boundedBy filter should be an array of one or two values."); + queryBuilder = boundingBoxQuery(fieldName, (Object[]) value); + } + case GEO_INTERSECTS -> { + Assert.isTrue(value instanceof GeoJson, "value of a GEO_INTERSECTS filter must be a GeoJson object"); + queryBuilder = geoJsonQuery(fieldName, (GeoJson) value, "intersects"); + } + case GEO_IS_DISJOINT -> { + Assert.isTrue(value instanceof GeoJson, "value of a GEO_IS_DISJOINT filter must be a GeoJson object"); + queryBuilder = geoJsonQuery(fieldName, (GeoJson) value, "disjoint"); + } + case GEO_WITHIN -> { + Assert.isTrue(value instanceof GeoJson, "value of a GEO_WITHIN filter must be a GeoJson object"); + queryBuilder = geoJsonQuery(fieldName, (GeoJson) value, "within"); + } + case GEO_CONTAINS -> { + Assert.isTrue(value instanceof GeoJson, "value of a GEO_CONTAINS filter must be a GeoJson object"); + queryBuilder = geoJsonQuery(fieldName, (GeoJson) value, "contains"); + } + } + + return Optional.ofNullable(queryBuilder != null ? queryBuilder.build()._toQuery() : null); + } + + private static ObjectBuilder withinQuery(String fieldName, Object... values) { + + Assert.noNullElements(values, "Geo distance filter takes 2 not null elements array as parameter."); + Assert.isTrue(values.length == 2, "Geo distance filter takes a 2-elements array as parameter."); + Assert.isTrue(values[0] instanceof GeoPoint || values[0] instanceof String || values[0] instanceof Point, + "First element of a geo distance filter must be a GeoPoint, a Point or a text"); + Assert.isTrue(values[1] instanceof String || values[1] instanceof Distance, + "Second element of a geo distance filter must be a text or a Distance"); + + String dist = (values[1] instanceof Distance distance) ? extractDistanceString(distance) : (String) values[1]; + + return QueryBuilders.geoDistance() // + .field(fieldName) // + .distance(dist) // + .distanceType(GeoDistanceType.Plane) // + .location(location -> { + if (values[0] instanceof GeoPoint loc) { + location.latlon(latlon -> latlon.lat(loc.getLat()).lon(loc.getLon())); + } else if (values[0] instanceof Point point) { + GeoPoint loc = GeoPoint.fromPoint(point); + location.latlon(latlon -> latlon.lat(loc.getLat()).lon(loc.getLon())); + } else { + String loc = (String) values[0]; + if (loc.contains(",")) { + String[] c = loc.split(","); + location.latlon(latlon -> latlon.lat(Double.parseDouble(c[0])).lon(Double.parseDouble(c[1]))); + } else { + location.geohash(geohash -> geohash.geohash(loc)); + } + } + return location; + }); + } + + private static ObjectBuilder boundingBoxQuery(String fieldName, Object... values) { + + Assert.noNullElements(values, "Geo boundedBy filter takes a not null element array as parameter."); + + GeoBoundingBoxQuery.Builder queryBuilder = QueryBuilders.geoBoundingBox() // + .field(fieldName); + + if (values.length == 1) { + // GeoEnvelop + oneParameterBBox(queryBuilder, values[0]); + } else if (values.length == 2) { + // 2x GeoPoint + // 2x text + twoParameterBBox(queryBuilder, values); + } else { + throw new IllegalArgumentException( + "Geo distance filter takes a 1-elements array(GeoBox) or 2-elements array(GeoPoints or Strings(format lat,lon or geohash))."); + } + return queryBuilder; + } + + private static void oneParameterBBox(GeoBoundingBoxQuery.Builder queryBuilder, Object value) { + Assert.isTrue(value instanceof GeoBox || value instanceof Box, + "single-element of boundedBy filter must be type of GeoBox or Box"); + + GeoBox geoBBox; + if (value instanceof Box box) { + geoBBox = GeoBox.fromBox(box); + } else { + geoBBox = (GeoBox) value; + } + + queryBuilder.boundingBox(bb -> bb // + .tlbr(tlbr -> tlbr // + .topLeft(glb -> glb // + .latlon(latlon -> latlon // + .lat(geoBBox.getTopLeft().getLat()) // + .lon(geoBBox.getTopLeft().getLon()))) // + .bottomRight(glb -> glb // + .latlon(latlon -> latlon // + .lat(geoBBox.getBottomRight().getLat())// + .lon(geoBBox.getBottomRight().getLon()// ) + ))))); + } + + private static void twoParameterBBox(GeoBoundingBoxQuery.Builder queryBuilder, Object... values) { + + Assert.isTrue(allElementsAreOfType(values, GeoPoint.class) || allElementsAreOfType(values, String.class), + " both elements of boundedBy filter must be type of GeoPoint or text(format lat,lon or geohash)"); + + if (values[0] instanceof GeoPoint topLeft) { + GeoPoint bottomRight = (GeoPoint) values[1]; + queryBuilder.boundingBox(bb -> bb // + .tlbr(tlbr -> tlbr // + .topLeft(glb -> glb // + .latlon(latlon -> latlon // + .lat(topLeft.getLat()) // + .lon(topLeft.getLon()))) // + .bottomRight(glb -> glb // + .latlon(latlon -> latlon // + .lat(bottomRight.getLat()) // + .lon(bottomRight.getLon()))) // + ) // + ); + } else { + String topLeft = (String) values[0]; + String bottomRight = (String) values[1]; + boolean isGeoHash = !topLeft.contains(","); + queryBuilder.boundingBox(bb -> bb // + .tlbr(tlbr -> tlbr // + .topLeft(glb -> { + if (isGeoHash) { + // although the builder in 8.13.2 supports geohash, the server throws an error, so we convert to a + // lat,lon string here + glb.text(Geohash.toLatLon(topLeft)); + // glb.geohash(gh -> gh.geohash(topLeft)); + } else { + glb.text(topLeft); + } + return glb; + }) // + .bottomRight(glb -> { + if (isGeoHash) { + glb.text(Geohash.toLatLon(bottomRight)); + // glb.geohash(gh -> gh.geohash(bottomRight)); + } else { + glb.text(bottomRight); + } + return glb; + }) // + )); + } + } + + private static boolean allElementsAreOfType(Object[] array, Class clazz) { + for (Object o : array) { + if (!clazz.isInstance(o)) { + return false; + } + } + return true; + } + + private static ObjectBuilder geoJsonQuery(String fieldName, GeoJson geoJson, + String relation) { + return buildGeoShapeQuery(fieldName, geoJson, relation); + } + + private static ObjectBuilder buildGeoShapeQuery(String fieldName, GeoJson geoJson, + String relation) { + return QueryBuilders.geoShape().field(fieldName) // + .shape(gsf -> gsf // + .shape(JsonData.of(GeoConverters.GeoJsonToMapConverter.INSTANCE.convert(geoJson))) // + .relation(toRelation(relation))); // + } + + private static GeoShapeRelation toRelation(String relation) { + + for (GeoShapeRelation geoShapeRelation : GeoShapeRelation.values()) { + + if (geoShapeRelation.name().equalsIgnoreCase(relation)) { + return geoShapeRelation; + } + } + throw new IllegalArgumentException("Unknown geo_shape relation: " + relation); + } + + /** + * extract the distance string from a {@link org.springframework.data.geo.Distance} object. + * + * @param distance distance object to extract string from + */ + private static String extractDistanceString(Distance distance) { + + StringBuilder sb = new StringBuilder(); + sb.append((int) distance.getValue()); + switch ((Metrics) distance.getMetric()) { + case KILOMETERS -> sb.append("km"); + case MILES -> sb.append("mi"); + } + + return sb.toString(); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaQueryException.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaQueryException.java new file mode 100644 index 0000000000..cb6cccf973 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaQueryException.java @@ -0,0 +1,28 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import org.springframework.dao.UncategorizedDataAccessException; + +/** + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class CriteriaQueryException extends UncategorizedDataAccessException { + public CriteriaQueryException(String msg) { + super(msg, null); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaQueryProcessor.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaQueryProcessor.java new file mode 100644 index 0000000000..1c9c9ef53a --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaQueryProcessor.java @@ -0,0 +1,460 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import static org.springframework.data.elasticsearch.client.elc.Queries.*; +import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*; +import static org.springframework.util.StringUtils.*; + +import co.elastic.clients.elasticsearch._types.FieldValue; +import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode; +import co.elastic.clients.elasticsearch._types.query_dsl.Operator; +import co.elastic.clients.elasticsearch._types.query_dsl.Query; +import co.elastic.clients.elasticsearch.core.search.InnerHits; +import co.elastic.clients.json.JsonData; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.annotations.FieldType; +import org.springframework.data.elasticsearch.core.query.Criteria; +import org.springframework.data.elasticsearch.core.query.Field; +import org.springframework.data.elasticsearch.core.query.HasChildQuery; +import org.springframework.data.elasticsearch.core.query.HasParentQuery; +import org.springframework.data.elasticsearch.core.query.InnerHitsQuery; +import org.springframework.util.Assert; + +/** + * Class to convert a {@link org.springframework.data.elasticsearch.core.query.CriteriaQuery} into an Elasticsearch + * query. + * + * @author Peter-Josef Meisch + * @author Ezequiel Antúnez Camacho + * @since 4.4 + */ +class CriteriaQueryProcessor extends AbstractQueryProcessor { + + /** + * creates a query from the criteria + * + * @param criteria the {@link Criteria} + * @return the optional query, null if the criteria did not contain filter relevant elements + */ + @Nullable + public static Query createQuery(Criteria criteria) { + + Assert.notNull(criteria, "criteria must not be null"); + + List shouldQueries = new ArrayList<>(); + List mustNotQueries = new ArrayList<>(); + List mustQueries = new ArrayList<>(); + + Query firstQuery = null; + boolean negateFirstQuery = false; + + for (Criteria chainedCriteria : criteria.getCriteriaChain()) { + Query queryFragment = queryForEntries(chainedCriteria); + + if (queryFragment != null) { + + if (firstQuery == null) { + firstQuery = queryFragment; + negateFirstQuery = chainedCriteria.isNegating(); + continue; + } + + if (chainedCriteria.isOr()) { + shouldQueries.add(queryFragment); + } else if (chainedCriteria.isNegating()) { + mustNotQueries.add(queryFragment); + } else { + mustQueries.add(queryFragment); + } + } + } + + for (Criteria subCriteria : criteria.getSubCriteria()) { + Query subQuery = createQuery(subCriteria); + if (subQuery != null) { + if (criteria.isOr()) { + shouldQueries.add(subQuery); + } else if (criteria.isNegating()) { + mustNotQueries.add(subQuery); + } else { + mustQueries.add(subQuery); + } + } + } + + if (firstQuery != null) { + + if (!shouldQueries.isEmpty() && mustNotQueries.isEmpty() && mustQueries.isEmpty()) { + shouldQueries.add(0, firstQuery); + } else { + + if (negateFirstQuery) { + mustNotQueries.add(0, firstQuery); + } else { + mustQueries.add(0, firstQuery); + } + } + } + + var filterQuery = CriteriaFilterProcessor.createQuery(criteria); + if (shouldQueries.isEmpty() && mustNotQueries.isEmpty() && mustQueries.isEmpty()) { + + if (filterQuery.isEmpty()) { + return null; + } + + // we need something to add the filter to + mustQueries.add(Query.of(qb -> qb.matchAll(m -> m))); + } + + return new Query.Builder().bool(boolQueryBuilder -> { + + if (!shouldQueries.isEmpty()) { + boolQueryBuilder.should(shouldQueries); + } + + if (!mustNotQueries.isEmpty()) { + boolQueryBuilder.mustNot(mustNotQueries); + } + + if (!mustQueries.isEmpty()) { + boolQueryBuilder.must(mustQueries); + } + + filterQuery.ifPresent(boolQueryBuilder::filter); + + return boolQueryBuilder; + }).build(); + } + + @Nullable + private static Query queryForEntries(Criteria criteria) { + + Field field = criteria.getField(); + + if (field == null || criteria.getQueryCriteriaEntries().isEmpty()) + return null; + + String fieldName = field.getName(); + Assert.notNull(fieldName, "Unknown field " + fieldName); + + Iterator it = criteria.getQueryCriteriaEntries().iterator(); + + Float boost = Float.isNaN(criteria.getBoost()) ? null : criteria.getBoost(); + Query.Builder queryBuilder; + + if (criteria.getQueryCriteriaEntries().size() == 1) { + queryBuilder = queryFor(it.next(), field, boost); + } else { + queryBuilder = new Query.Builder(); + queryBuilder.bool(boolQueryBuilder -> { + while (it.hasNext()) { + Criteria.CriteriaEntry entry = it.next(); + boolQueryBuilder.must(queryFor(entry, field, null).build()); + } + boolQueryBuilder.boost(boost); + return boolQueryBuilder; + }); + + } + + if (hasText(field.getPath())) { + final Query query = queryBuilder.build(); + queryBuilder = new Query.Builder(); + queryBuilder.nested(nqb -> nqb // + .path(field.getPath()) // + .query(query) // + .scoreMode(ChildScoreMode.Avg)); + } + + if (criteria.isNegating() && criteria.isOr()) { + final Query query = queryBuilder.build(); + queryBuilder = new Query.Builder(); + queryBuilder.bool(mnqb -> mnqb.mustNot(query)); + } + + return queryBuilder.build(); + } + + private static Query.Builder queryFor(Criteria.CriteriaEntry entry, Field field, @Nullable Float boost) { + + String fieldName = field.getName(); + boolean isKeywordField = FieldType.Keyword == field.getFieldType(); + + Criteria.OperationKey key = entry.getKey(); + Object value = key.hasValue() ? entry.getValue() : null; + String searchText = value != null ? escape(value.toString()) : "UNKNOWN_VALUE"; + + Query.Builder queryBuilder = new Query.Builder(); + switch (key) { + case EXISTS: + queryBuilder // + .exists(eb -> eb // + .field(fieldName) // + .boost(boost)); + break; + case EMPTY: + queryBuilder // + .bool(bb -> bb // + .must(mb -> mb // + .exists(eb -> eb // + .field(fieldName) // + )) // + .mustNot(mnb -> mnb // + .wildcard(wb -> wb // + .field(fieldName) // + .wildcard("*"))) // + .boost(boost)); + break; + case NOT_EMPTY: + queryBuilder // + .wildcard(wb -> wb // + .field(fieldName) // + .wildcard("*") // + .boost(boost)); + break; + case EQUALS: + queryBuilder.queryString(queryStringQuery(fieldName, searchText, Operator.And, boost)); + break; + case CONTAINS: + queryBuilder.queryString(queryStringQuery(fieldName, '*' + searchText + '*', true, boost)); + break; + case STARTS_WITH: + queryBuilder.queryString(queryStringQuery(fieldName, searchText + '*', true, boost)); + break; + case ENDS_WITH: + queryBuilder.queryString(queryStringQuery(fieldName, '*' + searchText, true, boost)); + break; + case EXPRESSION: + queryBuilder.queryString(queryStringQuery(fieldName, Objects.requireNonNull(value).toString(), boost)); + break; + case LESS: + queryBuilder + .range(rb -> rb + .untyped(ut -> ut + .field(fieldName) + .lt(JsonData.of(value)) + .boost(boost))); + break; + case LESS_EQUAL: + queryBuilder + .range(rb -> rb + .untyped(ut -> ut + .field(fieldName) + .lte(JsonData.of(value)) + .boost(boost))); + break; + case GREATER: + queryBuilder + .range(rb -> rb + .untyped(ut -> ut + .field(fieldName) + .gt(JsonData.of(value)) + .boost(boost))); + break; + case GREATER_EQUAL: + queryBuilder + .range(rb -> rb + .untyped(ut -> ut + .field(fieldName) + .gte(JsonData.of(value)) + .boost(boost))); + break; + case BETWEEN: + Object[] ranges = (Object[]) value; + Assert.notNull(value, "value for a between condition must not be null"); + queryBuilder + .range(rb -> rb + .untyped(ut -> { + ut.field(fieldName); + if (ranges[0] != null) { + ut.gte(JsonData.of(ranges[0])); + } + + if (ranges[1] != null) { + ut.lte(JsonData.of(ranges[1])); + } + ut.boost(boost); // + return ut; + })); + + break; + case FUZZY: + queryBuilder // + .fuzzy(fb -> fb // + .field(fieldName) // + .value(FieldValue.of(searchText)) // + .boost(boost)); // + break; + case MATCHES: + queryBuilder.match(matchQuery(fieldName, Objects.requireNonNull(value).toString(), Operator.Or, boost)); + break; + case MATCHES_ALL: + queryBuilder.match(matchQuery(fieldName, Objects.requireNonNull(value).toString(), Operator.And, boost)); + + break; + case IN: + if (value instanceof Iterable iterable) { + if (isKeywordField) { + queryBuilder.bool(bb -> bb // + .must(mb -> mb // + .terms(tb -> tb // + .field(fieldName) // + .terms(tsb -> tsb // + .value(toFieldValueList(iterable))))) // + .boost(boost)); // + } else { + queryBuilder // + .queryString(qsb -> qsb // + .fields(fieldName) // + .query(orQueryString(iterable)) // + .boost(boost)); // + } + } else { + throw new CriteriaQueryException("value for " + fieldName + " is not an Iterable"); + } + break; + case NOT_IN: + if (value instanceof Iterable iterable) { + if (isKeywordField) { + queryBuilder.bool(bb -> bb // + .mustNot(mnb -> mnb // + .terms(tb -> tb // + .field(fieldName) // + .terms(tsb -> tsb // + .value(toFieldValueList(iterable))))) // + .boost(boost)); // + } else { + queryBuilder // + .queryString(qsb -> qsb // + .fields(fieldName) // + .query("NOT(" + orQueryString(iterable) + ')') // + .boost(boost)); // + } + } else { + throw new CriteriaQueryException("value for " + fieldName + " is not an Iterable"); + } + break; + case REGEXP: + queryBuilder // + .regexp(rb -> rb // + .field(fieldName) // + .value(Objects.requireNonNull(value).toString()) // + .boost(boost)); // + break; + case HAS_CHILD: + if (value instanceof HasChildQuery query) { + queryBuilder.hasChild(hcb -> hcb + .type(query.getType()) + .query(getEsQuery(query.getQuery(), null)) + .innerHits(getInnerHits(query.getInnerHitsQuery())) + .ignoreUnmapped(query.getIgnoreUnmapped()) + .minChildren(query.getMinChildren()) + .maxChildren(query.getMaxChildren()) + .scoreMode(scoreMode(query.getScoreMode()))); + } else { + throw new CriteriaQueryException("value for " + fieldName + " is not a has_child query"); + } + break; + case HAS_PARENT: + if (value instanceof HasParentQuery query) { + queryBuilder.hasParent(hpb -> hpb + .parentType(query.getParentType()) + .query(getEsQuery(query.getQuery(), null)) + .innerHits(getInnerHits(query.getInnerHitsQuery())) + .ignoreUnmapped(query.getIgnoreUnmapped()) + .score(query.getScore())); + } else { + throw new CriteriaQueryException("value for " + fieldName + " is not a has_parent query"); + } + break; + default: + throw new CriteriaQueryException("Could not build query for " + entry); + } + + return queryBuilder; + } + + private static List toFieldValueList(Iterable iterable) { + List list = new ArrayList<>(); + for (Object item : iterable) { + list.add(item != null ? FieldValue.of(item.toString()) : null); + } + return list; + } + + private static String orQueryString(Iterable iterable) { + StringBuilder sb = new StringBuilder(); + + for (Object item : iterable) { + + if (item != null) { + + if (!sb.isEmpty()) { + sb.append(' '); + } + sb.append('"'); + sb.append(escape(item.toString())); + sb.append('"'); + } + } + + return sb.toString(); + } + + /** + * Returns a String where those characters that TextParser expects to be escaped are escaped by a preceding + * \. Copied from Apachae 2 licensed org.apache.lucene.queryparser.flexible.standard.QueryParserUtil + * class + */ + public static String escape(String s) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + // These characters are part of the query syntax and must be escaped + if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':' || c == '^' || c == '[' + || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~' || c == '*' || c == '?' || c == '|' || c == '&' + || c == '/') { + sb.append('\\'); + } + sb.append(c); + } + return sb.toString(); + } + + /** + * Convert a spring-data-elasticsearch {@literal inner_hits} to an Elasticsearch {@literal inner_hits} query. + * + * @param query spring-data-elasticsearch {@literal inner_hits}. + * @return an Elasticsearch {@literal inner_hits} query. + */ + @Nullable + private static InnerHits getInnerHits(@Nullable InnerHitsQuery query) { + if (query == null) { + return null; + } + + return InnerHits.of(iqb -> iqb.from(query.getFrom()).size(query.getSize()).name(query.getName())); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java new file mode 100644 index 0000000000..53e8cefa7b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java @@ -0,0 +1,241 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.core.GetResponse; +import co.elastic.clients.elasticsearch.core.MgetResponse; +import co.elastic.clients.elasticsearch.core.explain.ExplanationDetail; +import co.elastic.clients.elasticsearch.core.get.GetResult; +import co.elastic.clients.elasticsearch.core.search.CompletionSuggestOption; +import co.elastic.clients.elasticsearch.core.search.Hit; +import co.elastic.clients.elasticsearch.core.search.NestedIdentity; +import co.elastic.clients.json.JsonData; +import co.elastic.clients.json.JsonpMapper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.MultiGetItem; +import org.springframework.data.elasticsearch.core.document.Document; +import org.springframework.data.elasticsearch.core.document.Explanation; +import org.springframework.data.elasticsearch.core.document.NestedMetaData; +import org.springframework.data.elasticsearch.core.document.SearchDocument; +import org.springframework.data.elasticsearch.core.document.SearchDocumentAdapter; +import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse; +import org.springframework.util.Assert; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Utility class to adapt different Elasticsearch responses to a + * {@link org.springframework.data.elasticsearch.core.document.Document} + * + * @author Peter-Josef Meisch + * @author Haibo Liu + * @author Mohamed El Harrougui + * @since 4.4 + */ +final class DocumentAdapters { + + private static final Log LOGGER = LogFactory.getLog(DocumentAdapters.class); + + private DocumentAdapters() { + } + + /** + * Creates a {@link SearchDocument} from a {@link Hit} returned by the Elasticsearch client. + * + * @param hit the hit object + * @param jsonpMapper to map JsonData objects + * @return the created {@link SearchDocument} + */ + public static SearchDocument from(Hit hit, JsonpMapper jsonpMapper) { + + Assert.notNull(hit, "hit must not be null"); + + Map> highlightFields = hit.highlight(); + + Map innerHits = new LinkedHashMap<>(); + hit.innerHits().forEach((name, innerHitsResult) -> { + // noinspection ReturnOfNull + innerHits.put(name, SearchDocumentResponseBuilder.from(innerHitsResult.hits(), null, null, null, 0, null, null, + searchDocument -> null, jsonpMapper)); + }); + + NestedMetaData nestedMetaData = from(hit.nested()); + + Explanation explanation = from(hit.explanation()); + + Map matchedQueries = hit.matchedQueries(); + + Function, EntityAsMap> fromFields = fields -> { + StringBuilder sb = new StringBuilder("{"); + final boolean[] firstField = {true}; + hit.fields().forEach((key, jsonData) -> { + if (!firstField[0]) { + sb.append(','); + } + sb.append('"').append(key).append("\":") // + .append(jsonData.toJson(jsonpMapper).toString()); + firstField[0] = false; + }); + sb.append('}'); + return new EntityAsMap().fromJson(sb.toString()); + }; + + EntityAsMap hitFieldsAsMap = fromFields.apply(hit.fields()); + + Map> documentFields = new LinkedHashMap<>(); + hitFieldsAsMap.forEach((key, value) -> { + if (value instanceof List) { + // noinspection unchecked + documentFields.put(key, (List) value); + } else { + documentFields.put(key, Collections.singletonList(value)); + } + }); + + Document document; + Object source = hit.source(); + if (source == null) { + document = Document.from(hitFieldsAsMap); + } else { + if (source instanceof EntityAsMap entityAsMap) { + document = Document.from(entityAsMap); + } else if (source instanceof JsonData jsonData) { + document = Document.from(jsonData.to(EntityAsMap.class)); + } else { + + if (LOGGER.isWarnEnabled()) { + LOGGER.warn(String.format("Cannot map from type " + source.getClass().getName())); + } + document = Document.create(); + } + } + document.setIndex(hit.index()); + document.setId(hit.id()); + + if (hit.version() != null) { + document.setVersion(hit.version()); + } + document.setSeqNo(hit.seqNo() != null && hit.seqNo() >= 0 ? hit.seqNo() : -2); // -2 was the default value in the + // old client + document.setPrimaryTerm(hit.primaryTerm() != null && hit.primaryTerm() > 0 ? hit.primaryTerm() : 0); + + float score = hit.score() != null ? hit.score().floatValue() : Float.NaN; + return new SearchDocumentAdapter(document, score, hit.sort().stream().map(TypeUtils::toObject).toArray(), + documentFields, highlightFields, innerHits, nestedMetaData, explanation, matchedQueries, hit.routing()); + } + + public static SearchDocument from(CompletionSuggestOption completionSuggestOption) { + + Document document = completionSuggestOption.source() != null ? Document.from(completionSuggestOption.source()) + : Document.create(); + document.setIndex(completionSuggestOption.index()); + + if (completionSuggestOption.id() != null) { + document.setId(completionSuggestOption.id()); + } + + float score = completionSuggestOption.score() != null ? completionSuggestOption.score().floatValue() : Float.NaN; + return new SearchDocumentAdapter(document, score, new Object[]{}, Collections.emptyMap(), Collections.emptyMap(), + Collections.emptyMap(), null, null, null, completionSuggestOption.routing()); + } + + @Nullable + private static Explanation from(co.elastic.clients.elasticsearch.core.explain.@Nullable Explanation explanation) { + + if (explanation == null) { + return null; + } + List details = explanation.details().stream().map(DocumentAdapters::from).collect(Collectors.toList()); + return new Explanation(true, (double) explanation.value(), explanation.description(), details); + } + + private static Explanation from(ExplanationDetail explanationDetail) { + + List details = explanationDetail.details().stream().map(DocumentAdapters::from) + .collect(Collectors.toList()); + return new Explanation(null, (double) explanationDetail.value(), explanationDetail.description(), details); + } + + @Nullable + private static NestedMetaData from(@Nullable NestedIdentity nestedIdentity) { + + if (nestedIdentity == null) { + return null; + } + + NestedMetaData child = from(nestedIdentity.nested()); + return NestedMetaData.of(nestedIdentity.field(), nestedIdentity.offset(), child); + } + + /** + * Creates a {@link Document} from a {@link GetResponse} where the found document is contained as {@link EntityAsMap}. + * + * @param getResponse the response instance + * @return the Document + */ + @Nullable + public static Document from(GetResult getResponse) { + + Assert.notNull(getResponse, "getResponse must not be null"); + + if (!getResponse.found()) { + return null; + } + + Document document = getResponse.source() != null ? Document.from(getResponse.source()) : Document.create(); + document.setIndex(getResponse.index()); + document.setId(getResponse.id()); + + if (getResponse.version() != null) { + document.setVersion(getResponse.version()); + } + + if (getResponse.seqNo() != null) { + document.setSeqNo(getResponse.seqNo()); + } + + if (getResponse.primaryTerm() != null) { + document.setPrimaryTerm(getResponse.primaryTerm()); + } + + return document; + } + + /** + * Creates a list of {@link MultiGetItem}s from a {@link MgetResponse} where the data is contained as + * {@link EntityAsMap} instances. + * + * @param mgetResponse the response instance + * @return list of multiget items + */ + public static List> from(MgetResponse mgetResponse) { + + Assert.notNull(mgetResponse, "mgetResponse must not be null"); + + return mgetResponse.docs().stream() // + .map(itemResponse -> MultiGetItem.of( // + itemResponse.isFailure() ? null : from(itemResponse.result()), // + ResponseConverter.getFailure(itemResponse))) + .collect(Collectors.toList()); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchAggregation.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchAggregation.java new file mode 100644 index 0000000000..828e81bf4b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchAggregation.java @@ -0,0 +1,37 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import org.springframework.data.elasticsearch.core.AggregationContainer; + +/** + * {@link AggregationContainer} for a {@link Aggregation} that holds Elasticsearch data. + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ElasticsearchAggregation implements AggregationContainer { + + private final Aggregation aggregation; + + public ElasticsearchAggregation(Aggregation aggregation) { + this.aggregation = aggregation; + } + + @Override + public Aggregation aggregation() { + return aggregation; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchAggregations.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchAggregations.java new file mode 100644 index 0000000000..95e7788024 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchAggregations.java @@ -0,0 +1,78 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.aggregations.Aggregate; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.AggregationsContainer; +import org.springframework.util.Assert; + +/** + * AggregationsContainer implementation for the Elasticsearch aggregations. + * + * @author Peter-Josef Meisch + * @author Sascha Woo + * @since 4.4 + */ +public class ElasticsearchAggregations implements AggregationsContainer> { + + private final List aggregations; + private final Map aggregationsAsMap; + + public ElasticsearchAggregations(Map aggregations) { + + Assert.notNull(aggregations, "aggregations must not be null"); + + aggregationsAsMap = new HashMap<>(); + aggregations.forEach((name, aggregate) -> aggregationsAsMap // + .put(name, new ElasticsearchAggregation(new Aggregation(name, aggregate)))); + + this.aggregations = new ArrayList<>(aggregationsAsMap.values()); + } + + @Override + public List aggregations() { + return aggregations; + } + + /** + * @return the {@link ElasticsearchAggregation}s keyed by aggregation name. + */ + public Map aggregationsAsMap() { + return aggregationsAsMap; + } + + /** + * Returns the aggregation that is associated with the specified name. + * + * @param name the name of the aggregation + * @return the aggregation or {@literal null} if not found + */ + @Nullable + public ElasticsearchAggregation get(String name) { + + Assert.notNull(name, "name must not be null"); + + return aggregationsAsMap.get(name); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClientBeanDefinitionParser.java similarity index 59% rename from src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java rename to src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClientBeanDefinitionParser.java index 909ccfbbcf..dc1e0701d3 100644 --- a/src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClientBeanDefinitionParser.java @@ -1,54 +1,47 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.data.elasticsearch.config; - -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.data.elasticsearch.client.NodeClientFactoryBean; -import org.w3c.dom.Element; - -/** - * NodeClientBeanDefinitionParser - * - * @author Rizwan Idrees - * @author Mohsin Husen - */ - -public class NodeClientBeanDefinitionParser extends AbstractBeanDefinitionParser { - - @Override - protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(NodeClientFactoryBean.class); - setLocalSettings(element, builder); - return getSourcedBeanDefinition(builder, element, parserContext); - } - - private void setLocalSettings(Element element, BeanDefinitionBuilder builder) { - builder.addPropertyValue("local", Boolean.valueOf(element.getAttribute("local"))); - builder.addPropertyValue("clusterName", element.getAttribute("cluster-name")); - builder.addPropertyValue("enableHttp", Boolean.valueOf(element.getAttribute("http-enabled"))); - } - - private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source, - ParserContext context) { - AbstractBeanDefinition definition = builder.getBeanDefinition(); - definition.setSource(context.extractSource(source)); - return definition; - } -} +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; +import org.springframework.beans.factory.xml.ParserContext; +import org.w3c.dom.Element; + +/** + * @author Peter-Josef Meisch + * @since 5.0 + */ +public class ElasticsearchClientBeanDefinitionParser extends AbstractBeanDefinitionParser { + + @Override + protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ElasticsearchClientFactoryBean.class); + setConfigurations(element, builder); + return getSourcedBeanDefinition(builder, element, parserContext); + } + + private void setConfigurations(Element element, BeanDefinitionBuilder builder) { + builder.addPropertyValue("hosts", element.getAttribute("hosts")); + } + + private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source, + ParserContext context) { + AbstractBeanDefinition definition = builder.getBeanDefinition(); + definition.setSource(context.extractSource(source)); + return definition; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClientFactoryBean.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClientFactoryBean.java new file mode 100644 index 0000000000..2ca7fcbb82 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClientFactoryBean.java @@ -0,0 +1,97 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.ElasticsearchClient; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.FactoryBeanNotInitializedException; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.elasticsearch.client.ClientConfiguration; +import org.springframework.util.Assert; + +/** + * ElasticsearchClientFactoryBean + * + * @author Peter-Josef Meisch + * @since 5.0 + */ +public class ElasticsearchClientFactoryBean + implements FactoryBean, InitializingBean, DisposableBean { + + private static final Log LOGGER = LogFactory.getLog(ElasticsearchClientFactoryBean.class); + + private @Nullable AutoCloseableElasticsearchClient client; + private String hosts = "http://localhost:9200/"; + static final String COMMA = ","; + + @Override + public void destroy() { + try { + LOGGER.info("Closing elasticSearch client"); + if (client != null) { + client.close(); + } + } catch (final Exception e) { + LOGGER.error("Error closing ElasticSearch client: ", e); + } + } + + @Override + public void afterPropertiesSet() throws Exception { + buildClient(); + } + + @Override + public ElasticsearchClient getObject() { + + if (client == null) { + throw new FactoryBeanNotInitializedException(); + } + + return client; + } + + @Override + public Class getObjectType() { + return ElasticsearchClient.class; + } + + @Override + public boolean isSingleton() { + return false; + } + + protected void buildClient() throws Exception { + + Assert.hasText(hosts, "[Assertion Failed] At least one host must be set."); + + var clientConfiguration = ClientConfiguration.builder().connectedTo(hosts).build(); + client = (AutoCloseableElasticsearchClient) ElasticsearchClients.createImperative(clientConfiguration); + } + + public void setHosts(String hosts) { + this.hosts = hosts; + } + + public String getHosts() { + return this.hosts; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClients.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClients.java new file mode 100644 index 0000000000..c4f6452cc0 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchClients.java @@ -0,0 +1,422 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.ElasticsearchClient; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.json.jackson.JacksonJsonpMapper; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.transport.TransportUtils; +import co.elastic.clients.transport.Version; +import co.elastic.clients.transport.rest_client.RestClientOptions; +import co.elastic.clients.transport.rest_client.RestClientTransport; + +import java.net.InetSocketAddress; +import java.time.Duration; +import java.util.Arrays; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.apache.http.HttpHost; +import org.apache.http.HttpRequest; +import org.apache.http.HttpRequestInterceptor; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.entity.ContentType; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.message.BasicHeader; +import org.apache.http.message.BasicNameValuePair; +import org.apache.http.protocol.HttpContext; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.client.ClientConfiguration; +import org.springframework.data.elasticsearch.support.HttpHeaders; +import org.springframework.util.Assert; + +/** + * Utility class to create the different Elasticsearch clients + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +@SuppressWarnings("unused") +public final class ElasticsearchClients { + /** + * Name of whose value can be used to correlate log messages for this request. + */ + private static final String X_SPRING_DATA_ELASTICSEARCH_CLIENT = "X-SpringDataElasticsearch-Client"; + public static final String IMPERATIVE_CLIENT = "imperative"; + public static final String REACTIVE_CLIENT = "reactive"; + + private static final JsonpMapper DEFAULT_JSONP_MAPPER = new JacksonJsonpMapper(); + + // region reactive client + /** + * Creates a new {@link ReactiveElasticsearchClient} + * + * @param clientConfiguration configuration options, must not be {@literal null}. + * @return the {@link ReactiveElasticsearchClient} + */ + public static ReactiveElasticsearchClient createReactive(ClientConfiguration clientConfiguration) { + + Assert.notNull(clientConfiguration, "clientConfiguration must not be null"); + + return createReactive(getRestClient(clientConfiguration), null, DEFAULT_JSONP_MAPPER); + } + + /** + * Creates a new {@link ReactiveElasticsearchClient} + * + * @param clientConfiguration configuration options, must not be {@literal null}. + * @param transportOptions options to be added to each request. + * @return the {@link ReactiveElasticsearchClient} + */ + public static ReactiveElasticsearchClient createReactive(ClientConfiguration clientConfiguration, + @Nullable TransportOptions transportOptions) { + + Assert.notNull(clientConfiguration, "ClientConfiguration must not be null!"); + + return createReactive(getRestClient(clientConfiguration), transportOptions, DEFAULT_JSONP_MAPPER); + } + + /** + * Creates a new {@link ReactiveElasticsearchClient} + * + * @param clientConfiguration configuration options, must not be {@literal null}. + * @param transportOptions options to be added to each request. + * @param jsonpMapper the JsonpMapper to use + * @return the {@link ReactiveElasticsearchClient} + */ + public static ReactiveElasticsearchClient createReactive(ClientConfiguration clientConfiguration, + @Nullable TransportOptions transportOptions, JsonpMapper jsonpMapper) { + + Assert.notNull(clientConfiguration, "ClientConfiguration must not be null!"); + Assert.notNull(jsonpMapper, "jsonpMapper must not be null"); + + return createReactive(getRestClient(clientConfiguration), transportOptions, jsonpMapper); + } + + /** + * Creates a new {@link ReactiveElasticsearchClient}. + * + * @param restClient the underlying {@link RestClient} + * @return the {@link ReactiveElasticsearchClient} + */ + public static ReactiveElasticsearchClient createReactive(RestClient restClient) { + return createReactive(restClient, null, DEFAULT_JSONP_MAPPER); + } + + /** + * Creates a new {@link ReactiveElasticsearchClient}. + * + * @param restClient the underlying {@link RestClient} + * @param transportOptions options to be added to each request. + * @return the {@link ReactiveElasticsearchClient} + */ + public static ReactiveElasticsearchClient createReactive(RestClient restClient, + @Nullable TransportOptions transportOptions, JsonpMapper jsonpMapper) { + + Assert.notNull(restClient, "restClient must not be null"); + + var transport = getElasticsearchTransport(restClient, REACTIVE_CLIENT, transportOptions, jsonpMapper); + return createReactive(transport); + } + + /** + * Creates a new {@link ReactiveElasticsearchClient} that uses the given {@link ElasticsearchTransport}. + * + * @param transport the transport to use + * @return the {@link ElasticsearchClient + */ + public static ReactiveElasticsearchClient createReactive(ElasticsearchTransport transport) { + + Assert.notNull(transport, "transport must not be null"); + + return new ReactiveElasticsearchClient(transport); + } + // endregion + + // region imperative client + /** + * Creates a new imperative {@link ElasticsearchClient} + * + * @param clientConfiguration configuration options, must not be {@literal null}. + * @return the {@link ElasticsearchClient} + */ + public static ElasticsearchClient createImperative(ClientConfiguration clientConfiguration) { + return createImperative(getRestClient(clientConfiguration), null, DEFAULT_JSONP_MAPPER); + } + + /** + * Creates a new imperative {@link ElasticsearchClient} + * + * @param clientConfiguration configuration options, must not be {@literal null}. + * @param transportOptions options to be added to each request. + * @return the {@link ElasticsearchClient} + */ + public static ElasticsearchClient createImperative(ClientConfiguration clientConfiguration, + TransportOptions transportOptions) { + return createImperative(getRestClient(clientConfiguration), transportOptions, DEFAULT_JSONP_MAPPER); + } + + /** + * Creates a new imperative {@link ElasticsearchClient} + * + * @param restClient the RestClient to use + * @return the {@link ElasticsearchClient} + */ + public static ElasticsearchClient createImperative(RestClient restClient) { + return createImperative(restClient, null, DEFAULT_JSONP_MAPPER); + } + + /** + * Creates a new imperative {@link ElasticsearchClient} + * + * @param restClient the RestClient to use + * @param transportOptions options to be added to each request. + * @param jsonpMapper the mapper for the transport to use + * @return the {@link ElasticsearchClient} + */ + public static ElasticsearchClient createImperative(RestClient restClient, @Nullable TransportOptions transportOptions, + JsonpMapper jsonpMapper) { + + Assert.notNull(restClient, "restClient must not be null"); + + ElasticsearchTransport transport = getElasticsearchTransport(restClient, IMPERATIVE_CLIENT, transportOptions, + jsonpMapper); + + return createImperative(transport); + } + + /** + * Creates a new {@link ElasticsearchClient} that uses the given {@link ElasticsearchTransport}. + * + * @param transport the transport to use + * @return the {@link ElasticsearchClient + */ + public static AutoCloseableElasticsearchClient createImperative(ElasticsearchTransport transport) { + + Assert.notNull(transport, "transport must not be null"); + + return new AutoCloseableElasticsearchClient(transport); + } + // endregion + + // region low level RestClient + private static RestClientOptions.Builder getRestClientOptionsBuilder(@Nullable TransportOptions transportOptions) { + + if (transportOptions instanceof RestClientOptions restClientOptions) { + return restClientOptions.toBuilder(); + } + + var builder = new RestClientOptions.Builder(RequestOptions.DEFAULT.toBuilder()); + + if (transportOptions != null) { + transportOptions.headers().forEach(header -> builder.addHeader(header.getKey(), header.getValue())); + transportOptions.queryParameters().forEach(builder::setParameter); + builder.onWarnings(transportOptions.onWarnings()); + } + + return builder; + } + + /** + * Creates a low level {@link RestClient} for the given configuration. + * + * @param clientConfiguration must not be {@literal null} + * @return the {@link RestClient} + */ + public static RestClient getRestClient(ClientConfiguration clientConfiguration) { + return getRestClientBuilder(clientConfiguration).build(); + } + + private static RestClientBuilder getRestClientBuilder(ClientConfiguration clientConfiguration) { + HttpHost[] httpHosts = formattedHosts(clientConfiguration.getEndpoints(), clientConfiguration.useSsl()).stream() + .map(HttpHost::create).toArray(HttpHost[]::new); + RestClientBuilder builder = RestClient.builder(httpHosts); + + if (clientConfiguration.getPathPrefix() != null) { + builder.setPathPrefix(clientConfiguration.getPathPrefix()); + } + + HttpHeaders headers = clientConfiguration.getDefaultHeaders(); + + if (!headers.isEmpty()) { + builder.setDefaultHeaders(toHeaderArray(headers)); + } + + builder.setHttpClientConfigCallback(clientBuilder -> { + if (clientConfiguration.getCaFingerprint().isPresent()) { + clientBuilder + .setSSLContext(TransportUtils.sslContextFromCaFingerprint(clientConfiguration.getCaFingerprint().get())); + } + clientConfiguration.getSslContext().ifPresent(clientBuilder::setSSLContext); + clientConfiguration.getHostNameVerifier().ifPresent(clientBuilder::setSSLHostnameVerifier); + clientBuilder.addInterceptorLast(new CustomHeaderInjector(clientConfiguration.getHeadersSupplier())); + + RequestConfig.Builder requestConfigBuilder = RequestConfig.custom(); + Duration connectTimeout = clientConfiguration.getConnectTimeout(); + + if (!connectTimeout.isNegative()) { + requestConfigBuilder.setConnectTimeout(Math.toIntExact(connectTimeout.toMillis())); + } + + Duration socketTimeout = clientConfiguration.getSocketTimeout(); + + if (!socketTimeout.isNegative()) { + requestConfigBuilder.setSocketTimeout(Math.toIntExact(socketTimeout.toMillis())); + requestConfigBuilder.setConnectionRequestTimeout(Math.toIntExact(socketTimeout.toMillis())); + } + + clientBuilder.setDefaultRequestConfig(requestConfigBuilder.build()); + + clientConfiguration.getProxy().map(HttpHost::create).ifPresent(clientBuilder::setProxy); + + for (ClientConfiguration.ClientConfigurationCallback clientConfigurer : clientConfiguration + .getClientConfigurers()) { + if (clientConfigurer instanceof ElasticsearchHttpClientConfigurationCallback restClientConfigurationCallback) { + clientBuilder = restClientConfigurationCallback.configure(clientBuilder); + } + } + + return clientBuilder; + }); + + for (ClientConfiguration.ClientConfigurationCallback clientConfigurationCallback : clientConfiguration + .getClientConfigurers()) { + if (clientConfigurationCallback instanceof ElasticsearchRestClientConfigurationCallback configurationCallback) { + builder = configurationCallback.configure(builder); + } + } + return builder; + } + // endregion + + // region Elasticsearch transport + /** + * Creates an {@link ElasticsearchTransport} that will use the given client that additionally is customized with a + * header to contain the clientType + * + * @param restClient the client to use + * @param clientType the client type to pass in each request as header + * @param transportOptions options for the transport + * @param jsonpMapper mapper for the transport + * @return ElasticsearchTransport + */ + public static ElasticsearchTransport getElasticsearchTransport(RestClient restClient, String clientType, + @Nullable TransportOptions transportOptions, JsonpMapper jsonpMapper) { + + Assert.notNull(restClient, "restClient must not be null"); + Assert.notNull(clientType, "clientType must not be null"); + Assert.notNull(jsonpMapper, "jsonpMapper must not be null"); + + TransportOptions.Builder transportOptionsBuilder = transportOptions != null ? transportOptions.toBuilder() + : new RestClientOptions(RequestOptions.DEFAULT, false).toBuilder(); + + RestClientOptions.Builder restClientOptionsBuilder = getRestClientOptionsBuilder(transportOptions); + + ContentType jsonContentType = Version.VERSION == null ? ContentType.APPLICATION_JSON + : ContentType.create("application/vnd.elasticsearch+json", + new BasicNameValuePair("compatible-with", String.valueOf(Version.VERSION.major()))); + + Consumer setHeaderIfNotPresent = header -> { + if (restClientOptionsBuilder.build().headers().stream() // + .noneMatch((h) -> h.getKey().equalsIgnoreCase(header))) { + // need to add the compatibility header, this is only done automatically when not passing in custom options. + // code copied from RestClientTransport as it is not available outside the package + restClientOptionsBuilder.addHeader(header, jsonContentType.toString()); + } + }; + + setHeaderIfNotPresent.accept("Content-Type"); + setHeaderIfNotPresent.accept("Accept"); + + restClientOptionsBuilder.addHeader(X_SPRING_DATA_ELASTICSEARCH_CLIENT, clientType); + + return new RestClientTransport(restClient, jsonpMapper, restClientOptionsBuilder.build()); + } + // endregion + + private static List formattedHosts(List hosts, boolean useSsl) { + return hosts.stream().map(it -> (useSsl ? "https" : "http") + "://" + it.getHostString() + ':' + it.getPort()) + .collect(Collectors.toList()); + } + + private static org.apache.http.Header[] toHeaderArray(HttpHeaders headers) { + return headers.entrySet().stream() // + .flatMap(entry -> entry.getValue().stream() // + .map(value -> new BasicHeader(entry.getKey(), value))) // + .toArray(org.apache.http.Header[]::new); + } + + /** + * Interceptor to inject custom supplied headers. + * + * @since 4.4 + */ + private record CustomHeaderInjector(Supplier headersSupplier) implements HttpRequestInterceptor { + + @Override + public void process(HttpRequest request, HttpContext context) { + HttpHeaders httpHeaders = headersSupplier.get(); + + if (httpHeaders != null && !httpHeaders.isEmpty()) { + Arrays.stream(toHeaderArray(httpHeaders)).forEach(request::addHeader); + } + } + } + + /** + * {@link org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationCallback} to configure + * the Elasticsearch RestClient's Http client with a {@link HttpAsyncClientBuilder} + * + * @since 4.4 + */ + public interface ElasticsearchHttpClientConfigurationCallback + extends ClientConfiguration.ClientConfigurationCallback { + + static ElasticsearchHttpClientConfigurationCallback from( + Function httpClientBuilderCallback) { + + Assert.notNull(httpClientBuilderCallback, "httpClientBuilderCallback must not be null"); + + return httpClientBuilderCallback::apply; + } + } + + /** + * {@link org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationCallback} to configure + * the RestClient client with a {@link RestClientBuilder} + * + * @since 5.0 + */ + public interface ElasticsearchRestClientConfigurationCallback + extends ClientConfiguration.ClientConfigurationCallback { + + static ElasticsearchRestClientConfigurationCallback from( + Function restClientBuilderCallback) { + + Assert.notNull(restClientBuilderCallback, "restClientBuilderCallback must not be null"); + + return restClientBuilderCallback::apply; + } + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchConfiguration.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchConfiguration.java new file mode 100644 index 0000000000..93d26101ab --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchConfiguration.java @@ -0,0 +1,140 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.ElasticsearchClient; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.json.jackson.JacksonJsonpMapper; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.transport.rest_client.RestClientOptions; + +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestClient; +import org.springframework.context.annotation.Bean; +import org.springframework.data.elasticsearch.client.ClientConfiguration; +import org.springframework.data.elasticsearch.config.ElasticsearchConfigurationSupport; +import org.springframework.data.elasticsearch.core.ElasticsearchOperations; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.util.Assert; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + +/** + * Base class for a @{@link org.springframework.context.annotation.Configuration} class to set up the Elasticsearch + * connection using the Elasticsearch Client. This class exposes different parts of the setup as Spring beans. Deriving + * classes must provide the {@link ClientConfiguration} to use. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public abstract class ElasticsearchConfiguration extends ElasticsearchConfigurationSupport { + + /** + * Must be implemented by deriving classes to provide the {@link ClientConfiguration}. + * + * @return configuration, must not be {@literal null} + */ + @Bean(name = "elasticsearchClientConfiguration") + public abstract ClientConfiguration clientConfiguration(); + + /** + * Provides the underlying low level Elasticsearch RestClient. + * + * @param clientConfiguration configuration for the client, must not be {@literal null} + * @return RestClient + */ + @Bean + public RestClient elasticsearchRestClient(ClientConfiguration clientConfiguration) { + + Assert.notNull(clientConfiguration, "clientConfiguration must not be null"); + + return ElasticsearchClients.getRestClient(clientConfiguration); + } + + /** + * Provides the Elasticsearch transport to be used. The default implementation uses the {@link RestClient} bean and + * the {@link JsonpMapper} bean provided in this class. + * + * @return the {@link ElasticsearchTransport} + * @since 5.2 + */ + @Bean + public ElasticsearchTransport elasticsearchTransport(RestClient restClient, JsonpMapper jsonpMapper) { + + Assert.notNull(restClient, "restClient must not be null"); + Assert.notNull(jsonpMapper, "jsonpMapper must not be null"); + + return ElasticsearchClients.getElasticsearchTransport(restClient, ElasticsearchClients.IMPERATIVE_CLIENT, + transportOptions(), jsonpMapper); + } + + /** + * Provides the {@link ElasticsearchClient} to be used. + * + * @param transport the {@link ElasticsearchTransport} to use + * @return ElasticsearchClient instance + */ + @Bean + public ElasticsearchClient elasticsearchClient(ElasticsearchTransport transport) { + + Assert.notNull(transport, "transport must not be null"); + + return ElasticsearchClients.createImperative(transport); + } + + /** + * Creates a {@link ElasticsearchOperations} implementation using an + * {@link co.elastic.clients.elasticsearch.ElasticsearchClient}. + * + * @return never {@literal null}. + */ + @Bean(name = { "elasticsearchOperations", "elasticsearchTemplate" }) + public ElasticsearchOperations elasticsearchOperations(ElasticsearchConverter elasticsearchConverter, + ElasticsearchClient elasticsearchClient) { + + ElasticsearchTemplate template = new ElasticsearchTemplate(elasticsearchClient, elasticsearchConverter); + template.setRefreshPolicy(refreshPolicy()); + + return template; + } + + /** + * Provides the JsonpMapper bean that is used in the {@link #elasticsearchTransport(RestClient, JsonpMapper)} method. + * + * @return the {@link JsonpMapper} to use + * @since 5.2 + */ + @Bean + public JsonpMapper jsonpMapper() { + // we need to create our own objectMapper that keeps null values in order to provide the storeNullValue + // functionality. The one Elasticsearch would provide removes the nulls. We remove unwanted nulls before they get + // into this mapper, so we can safely keep them here. + var objectMapper = (new ObjectMapper()) + .configure(SerializationFeature.INDENT_OUTPUT, false) + .setSerializationInclusion(JsonInclude.Include.ALWAYS); + return new JacksonJsonpMapper(objectMapper); + } + + /** + * @return the options that should be added to every request. Must not be {@literal null} + */ + public TransportOptions transportOptions() { + return new RestClientOptions(RequestOptions.DEFAULT, false); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java new file mode 100644 index 0000000000..224e9c671f --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java @@ -0,0 +1,138 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.ElasticsearchException; +import co.elastic.clients.elasticsearch._types.ErrorResponse; +import co.elastic.clients.json.JsonpMapper; + +import java.io.IOException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.elasticsearch.client.ResponseException; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.elasticsearch.NoSuchIndexException; +import org.springframework.data.elasticsearch.ResourceNotFoundException; +import org.springframework.data.elasticsearch.UncategorizedElasticsearchException; +import org.springframework.data.elasticsearch.VersionConflictException; + +/** + * Simple {@link PersistenceExceptionTranslator} for Elasticsearch. Convert the given runtime exception to an + * appropriate exception from the {@code org.springframework.dao} hierarchy. Return {@literal null} if no translation is + * appropriate: any other exception may have resulted from user code, and should not be translated. + * + * @author Peter-Josef Meisch + * @author Junghoon Ban + * @since 4.4 + */ +public class ElasticsearchExceptionTranslator implements PersistenceExceptionTranslator { + + private final JsonpMapper jsonpMapper; + + public ElasticsearchExceptionTranslator(JsonpMapper jsonpMapper) { + this.jsonpMapper = jsonpMapper; + } + + /** + * translates an Exception if possible. Exceptions that are no {@link RuntimeException}s are wrapped in a + * RuntimeException + * + * @param throwable the Exception to map + * @return the potentially translated RuntimeException. + */ + public RuntimeException translateException(Throwable throwable) { + + RuntimeException runtimeException = throwable instanceof RuntimeException ex ? ex + : new RuntimeException(throwable.getMessage(), throwable); + RuntimeException potentiallyTranslatedException = translateExceptionIfPossible(runtimeException); + + return potentiallyTranslatedException != null ? potentiallyTranslatedException : runtimeException; + } + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + + checkForConflictException(ex); + + if (ex instanceof ElasticsearchException elasticsearchException) { + + ErrorResponse response = elasticsearchException.response(); + var errorType = response.error().type(); + var errorReason = response.error().reason() != null ? response.error().reason() : "undefined reason"; + + if (response.status() == 404) { + + if ("index_not_found_exception".equals(errorType)) { + // noinspection RegExpRedundantEscape + Pattern pattern = Pattern.compile(".*no such index \\[(.*)\\]"); + String index = ""; + Matcher matcher = pattern.matcher(errorReason); + if (matcher.matches()) { + index = matcher.group(1); + } + return new NoSuchIndexException(index); + } + + return new ResourceNotFoundException(errorReason); + } + + if (response.status() == 409) { + + } + String body = JsonUtils.toJson(response, jsonpMapper); + + if (errorType != null && errorType.contains("validation_exception")) { + return new DataIntegrityViolationException(errorReason); + } + + return new UncategorizedElasticsearchException(ex.getMessage(), response.status(), body, ex); + } + + Throwable cause = ex.getCause(); + if (cause instanceof IOException) { + return new DataAccessResourceFailureException(ex.getMessage(), ex); + } + + return null; + } + + private void checkForConflictException(Throwable exception) { + Integer status = null; + String message = null; + + if (exception instanceof ResponseException responseException) { + status = responseException.getResponse().getStatusLine().getStatusCode(); + message = responseException.getMessage(); + } else if (exception.getCause() != null) { + checkForConflictException(exception.getCause()); + } + + if (status != null && message != null) { + if (status == 409 && message.contains("type\":\"version_conflict_engine_exception")) + if (message.contains("version conflict, required seqNo")) { + throw new OptimisticLockingFailureException("Cannot index a document due to seq_no+primary_term conflict", + exception); + } else if (message.contains("version conflict, current version [")) { + throw new VersionConflictException("Version conflict", exception); + } + } + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchTemplate.java new file mode 100644 index 0000000000..3e0d6235d9 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchTemplate.java @@ -0,0 +1,741 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*; + +import co.elastic.clients.elasticsearch.ElasticsearchClient; +import co.elastic.clients.elasticsearch._types.Time; +import co.elastic.clients.elasticsearch.core.*; +import co.elastic.clients.elasticsearch.core.bulk.BulkResponseItem; +import co.elastic.clients.elasticsearch.core.msearch.MultiSearchResponseItem; +import co.elastic.clients.elasticsearch.core.search.ResponseBody; +import co.elastic.clients.elasticsearch.sql.ElasticsearchSqlClient; +import co.elastic.clients.elasticsearch.sql.QueryResponse; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.transport.Version; + +import java.io.IOException; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.BulkFailureException; +import org.springframework.data.elasticsearch.client.UnsupportedBackendOperation; +import org.springframework.data.elasticsearch.core.AbstractElasticsearchTemplate; +import org.springframework.data.elasticsearch.core.IndexOperations; +import org.springframework.data.elasticsearch.core.IndexedObjectInformation; +import org.springframework.data.elasticsearch.core.MultiGetItem; +import org.springframework.data.elasticsearch.core.SearchHits; +import org.springframework.data.elasticsearch.core.SearchScrollHits; +import org.springframework.data.elasticsearch.core.cluster.ClusterOperations; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.data.elasticsearch.core.document.Document; +import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse; +import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates; +import org.springframework.data.elasticsearch.core.query.*; +import org.springframework.data.elasticsearch.core.query.UpdateResponse; +import org.springframework.data.elasticsearch.core.reindex.ReindexRequest; +import org.springframework.data.elasticsearch.core.reindex.ReindexResponse; +import org.springframework.data.elasticsearch.core.script.Script; +import org.springframework.data.elasticsearch.core.sql.SqlResponse; +import org.springframework.util.Assert; + +/** + * Implementation of {@link org.springframework.data.elasticsearch.core.ElasticsearchOperations} using the new + * Elasticsearch client. + * + * @author Peter-Josef Meisch + * @author Hamid Rahimi + * @author Illia Ulianov + * @author Haibo Liu + * @since 4.4 + */ +public class ElasticsearchTemplate extends AbstractElasticsearchTemplate { + + private static final Log LOGGER = LogFactory.getLog(ElasticsearchTemplate.class); + + private final ElasticsearchClient client; + private final ElasticsearchSqlClient sqlClient; + private final RequestConverter requestConverter; + private final ResponseConverter responseConverter; + private final JsonpMapper jsonpMapper; + private final ElasticsearchExceptionTranslator exceptionTranslator; + + // region _initialization + public ElasticsearchTemplate(ElasticsearchClient client) { + + Assert.notNull(client, "client must not be null"); + + this.client = client; + this.sqlClient = client.sql(); + this.jsonpMapper = client._transport().jsonpMapper(); + requestConverter = new RequestConverter(elasticsearchConverter, jsonpMapper); + responseConverter = new ResponseConverter(jsonpMapper); + exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); + } + + public ElasticsearchTemplate(ElasticsearchClient client, ElasticsearchConverter elasticsearchConverter) { + super(elasticsearchConverter); + + Assert.notNull(client, "client must not be null"); + + this.client = client; + this.sqlClient = client.sql(); + this.jsonpMapper = client._transport().jsonpMapper(); + requestConverter = new RequestConverter(elasticsearchConverter, jsonpMapper); + responseConverter = new ResponseConverter(jsonpMapper); + exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); + } + + @Override + protected AbstractElasticsearchTemplate doCopy() { + return new ElasticsearchTemplate(client, elasticsearchConverter); + } + // endregion + + // region child templates + @Override + public IndexOperations indexOps(Class clazz) { + return new IndicesTemplate(client.indices(), getClusterTemplate(), elasticsearchConverter, clazz); + } + + @Override + public IndexOperations indexOps(IndexCoordinates index) { + return new IndicesTemplate(client.indices(), getClusterTemplate(), elasticsearchConverter, index); + } + + @Override + public ClusterOperations cluster() { + return getClusterTemplate(); + } + + private ClusterTemplate getClusterTemplate() { + return new ClusterTemplate(client.cluster(), elasticsearchConverter); + } + // endregion + + // region document operations + @Override + @Nullable + public T get(String id, Class clazz, IndexCoordinates index) { + + GetRequest getRequest = requestConverter.documentGetRequest(elasticsearchConverter.convertId(id), + routingResolver.getRouting(), index); + GetResponse getResponse = execute(client -> client.get(getRequest, EntityAsMap.class)); + + ReadDocumentCallback callback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, index); + return callback.doWith(DocumentAdapters.from(getResponse)); + } + + @Override + public List> multiGet(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + + MgetRequest request = requestConverter.documentMgetRequest(query, clazz, index); + MgetResponse result = execute(client -> client.mget(request, EntityAsMap.class)); + + ReadDocumentCallback callback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, index); + + return DocumentAdapters.from(result).stream() // + .map(multiGetItem -> MultiGetItem.of( // + multiGetItem.isFailed() ? null : callback.doWith(multiGetItem.getItem()), multiGetItem.getFailure())) // + .collect(Collectors.toList()); + } + + @Override + public void bulkUpdate(List queries, BulkOptions bulkOptions, IndexCoordinates index) { + + Assert.notNull(queries, "queries must not be null"); + Assert.notNull(bulkOptions, "bulkOptions must not be null"); + Assert.notNull(index, "index must not be null"); + + doBulkOperation(queries, bulkOptions, index); + } + + @Override + public ByQueryResponse delete(DeleteQuery query, Class clazz) { + return delete(query, clazz, getIndexCoordinatesFor(clazz)); + } + + @Override + public ByQueryResponse delete(DeleteQuery query, Class clazz, IndexCoordinates index) { + Assert.notNull(query, "query must not be null"); + + DeleteByQueryRequest request = requestConverter.documentDeleteByQueryRequest(query, routingResolver.getRouting(), + clazz, index, getRefreshPolicy()); + + DeleteByQueryResponse response = execute(client -> client.deleteByQuery(request)); + + return responseConverter.byQueryResponse(response); + } + + @Override + public UpdateResponse update(UpdateQuery updateQuery, IndexCoordinates index) { + + UpdateRequest request = requestConverter.documentUpdateRequest(updateQuery, index, getRefreshPolicy(), + routingResolver.getRouting()); + co.elastic.clients.elasticsearch.core.UpdateResponse response = execute( + client -> client.update(request, Document.class)); + return UpdateResponse.of(result(response.result())); + } + + @Override + public ByQueryResponse updateByQuery(UpdateQuery updateQuery, IndexCoordinates index) { + + Assert.notNull(updateQuery, "updateQuery must not be null"); + Assert.notNull(index, "index must not be null"); + + UpdateByQueryRequest request = requestConverter.documentUpdateByQueryRequest(updateQuery, index, + getRefreshPolicy()); + + UpdateByQueryResponse byQueryResponse = execute(client -> client.updateByQuery(request)); + return responseConverter.byQueryResponse(byQueryResponse); + } + + @Override + public String doIndex(IndexQuery query, IndexCoordinates indexCoordinates) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(indexCoordinates, "indexCoordinates must not be null"); + + IndexRequest indexRequest = requestConverter.documentIndexRequest(query, indexCoordinates, refreshPolicy); + + IndexResponse indexResponse = execute(client -> client.index(indexRequest)); + + Object queryObject = query.getObject(); + + if (queryObject != null) { + query.setObject(entityOperations.updateIndexedObject( + queryObject, + new IndexedObjectInformation( + indexResponse.id(), + indexResponse.index(), + indexResponse.seqNo(), + indexResponse.primaryTerm(), + indexResponse.version()), + elasticsearchConverter, + routingResolver)); + } + + return indexResponse.id(); + } + + @Override + protected boolean doExists(String id, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(index, "index must not be null"); + + ExistsRequest request = requestConverter.documentExistsRequest(id, routingResolver.getRouting(), index); + + return execute(client -> client.exists(request)).value(); + } + + @Override + protected String doDelete(String id, @Nullable String routing, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(index, "index must not be null"); + + DeleteRequest request = requestConverter.documentDeleteRequest(elasticsearchConverter.convertId(id), routing, index, + getRefreshPolicy()); + return execute(client -> client.delete(request)).id(); + } + + @Override + public ReindexResponse reindex(ReindexRequest reindexRequest) { + + Assert.notNull(reindexRequest, "reindexRequest must not be null"); + + co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, + true); + co.elastic.clients.elasticsearch.core.ReindexResponse reindexResponse = execute( + client -> client.reindex(reindexRequestES)); + return responseConverter.reindexResponse(reindexResponse); + } + + @Override + public String submitReindex(ReindexRequest reindexRequest) { + + co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, + false); + co.elastic.clients.elasticsearch.core.ReindexResponse reindexResponse = execute( + client -> client.reindex(reindexRequestES)); + + if (reindexResponse.task() == null) { + throw new UnsupportedBackendOperation("ElasticsearchClient did not return a task id on submit request"); + } + + return reindexResponse.task(); + } + + @Override + public List doBulkOperation(List queries, BulkOptions bulkOptions, + IndexCoordinates index) { + + BulkRequest bulkRequest = requestConverter.documentBulkRequest(queries, bulkOptions, index, refreshPolicy); + BulkResponse bulkResponse = execute(client -> client.bulk(bulkRequest)); + List indexedObjectInformationList = checkForBulkOperationFailure(bulkResponse); + updateIndexedObjectsWithQueries(queries, indexedObjectInformationList); + return indexedObjectInformationList; + } + + // endregion + + @Override + public String getClusterVersion() { + return execute(client -> client.info().version().number()); + } + + @Override + public String getVendor() { + return "Elasticsearch"; + } + + @Override + public String getRuntimeLibraryVersion() { + return Version.VERSION != null ? Version.VERSION.toString() : "0.0.0.?"; + } + + // region search operations + @Override + public long count(Query query, @Nullable Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(index, "index must not be null"); + + SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, + true); + + SearchResponse searchResponse = execute(client -> client.search(searchRequest, EntityAsMap.class)); + + return searchResponse.hits().total().value(); + } + + @Override + public SearchHits search(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + Assert.notNull(index, "index must not be null"); + + if (query instanceof SearchTemplateQuery searchTemplateQuery) { + return doSearch(searchTemplateQuery, clazz, index); + } else { + return doSearch(query, clazz, index); + } + } + + protected SearchHits doSearch(Query query, Class clazz, IndexCoordinates index) { + SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, + false); + SearchResponse searchResponse = execute(client -> client.search(searchRequest, EntityAsMap.class)); + + // noinspection DuplicatedCode + ReadDocumentCallback readDocumentCallback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, index); + SearchDocumentResponse.EntityCreator entityCreator = getEntityCreator(readDocumentCallback); + SearchDocumentResponseCallback> callback = new ReadSearchDocumentResponseCallback<>(clazz, index); + + return callback.doWith(SearchDocumentResponseBuilder.from(searchResponse, entityCreator, jsonpMapper)); + } + + protected SearchHits doSearch(SearchTemplateQuery query, Class clazz, IndexCoordinates index) { + var searchTemplateRequest = requestConverter.searchTemplate(query, routingResolver.getRouting(), index); + var searchTemplateResponse = execute(client -> client.searchTemplate(searchTemplateRequest, EntityAsMap.class)); + + // noinspection DuplicatedCode + ReadDocumentCallback readDocumentCallback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, index); + SearchDocumentResponse.EntityCreator entityCreator = getEntityCreator(readDocumentCallback); + SearchDocumentResponseCallback> callback = new ReadSearchDocumentResponseCallback<>(clazz, index); + + return callback.doWith(SearchDocumentResponseBuilder.from(searchTemplateResponse, entityCreator, jsonpMapper)); + } + + @Override + protected SearchHits doSearch(MoreLikeThisQuery query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + Assert.notNull(index, "index must not be null"); + + return search(NativeQuery.builder() // + .withQuery(q -> q.moreLikeThis(requestConverter.moreLikeThisQuery(query, index)))// + .withPageable(query.getPageable()) // + .build(), clazz, index); + } + + @Override + public SearchScrollHits searchScrollStart(long scrollTimeInMillis, Query query, Class clazz, + IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(query.getPageable(), "pageable of query must not be null."); + + SearchRequest request = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, false, + scrollTimeInMillis); + SearchResponse response = execute(client -> client.search(request, EntityAsMap.class)); + + return getSearchScrollHits(clazz, index, response); + } + + @Override + public SearchScrollHits searchScrollContinue(String scrollId, long scrollTimeInMillis, Class clazz, + IndexCoordinates index) { + + Assert.notNull(scrollId, "scrollId must not be null"); + + ScrollRequest request = ScrollRequest + .of(sr -> sr.scrollId(scrollId).scroll(Time.of(t -> t.time(scrollTimeInMillis + "ms")))); + ScrollResponse response = execute(client -> client.scroll(request, EntityAsMap.class)); + + return getSearchScrollHits(clazz, index, response); + } + + private SearchScrollHits getSearchScrollHits(Class clazz, IndexCoordinates index, + ResponseBody response) { + ReadDocumentCallback documentCallback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, index); + SearchDocumentResponseCallback> callback = new ReadSearchScrollDocumentResponseCallback<>(clazz, + index); + + return callback + .doWith(SearchDocumentResponseBuilder.from(response, getEntityCreator(documentCallback), jsonpMapper)); + } + + @Override + public void searchScrollClear(List scrollIds) { + + Assert.notNull(scrollIds, "scrollIds must not be null"); + + if (!scrollIds.isEmpty()) { + ClearScrollRequest request = ClearScrollRequest.of(csr -> csr.scrollId(scrollIds)); + execute(client -> client.clearScroll(request)); + } + } + + @Override + public List> multiSearch(List queries, Class clazz, IndexCoordinates index) { + + Assert.notNull(queries, "queries must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + + int size = queries.size(); + // noinspection unchecked + return multiSearch(queries, Collections.nCopies(size, clazz), Collections.nCopies(size, index)) + .stream().map(searchHits -> (SearchHits) searchHits) + .collect(Collectors.toList()); + } + + @Override + public List> multiSearch(List queries, List> classes) { + + Assert.notNull(queries, "queries must not be null"); + Assert.notNull(classes, "classes must not be null"); + Assert.isTrue(queries.size() == classes.size(), "queries and classes must have the same size"); + + return multiSearch(queries, classes, classes.stream().map(this::getIndexCoordinatesFor).toList()); + } + + @Override + public List> multiSearch(List queries, List> classes, + IndexCoordinates index) { + + Assert.notNull(queries, "queries must not be null"); + Assert.notNull(classes, "classes must not be null"); + Assert.notNull(index, "index must not be null"); + Assert.isTrue(queries.size() == classes.size(), "queries and classes must have the same size"); + + return multiSearch(queries, classes, Collections.nCopies(queries.size(), index)); + } + + @Override + public List> multiSearch(List queries, List> classes, + List indexes) { + + Assert.notNull(queries, "queries must not be null"); + Assert.notNull(classes, "classes must not be null"); + Assert.notNull(indexes, "indexes must not be null"); + Assert.isTrue(queries.size() == classes.size() && queries.size() == indexes.size(), + "queries, classes and indexes must have the same size"); + + List multiSearchQueryParameters = new ArrayList<>(queries.size()); + Iterator> it = classes.iterator(); + Iterator indexesIt = indexes.iterator(); + + Assert.isTrue(!queries.isEmpty(), "queries should have at least 1 query"); + boolean isSearchTemplateQuery = queries.get(0) instanceof SearchTemplateQuery; + + for (Query query : queries) { + Assert.isTrue((query instanceof SearchTemplateQuery) == isSearchTemplateQuery, + "SearchTemplateQuery can't be mixed with other types of query in multiple search"); + + Class clazz = it.next(); + IndexCoordinates index = indexesIt.next(); + multiSearchQueryParameters.add(new MultiSearchQueryParameter(query, clazz, index)); + } + + return multiSearch(multiSearchQueryParameters, isSearchTemplateQuery); + } + + private List> multiSearch(List multiSearchQueryParameters, + boolean isSearchTemplateQuery) { + return isSearchTemplateQuery ? doMultiTemplateSearch(multiSearchQueryParameters.stream() + .map(p -> new MultiSearchTemplateQueryParameter((SearchTemplateQuery) p.query, p.clazz, p.index)) + .toList()) + : doMultiSearch(multiSearchQueryParameters); + } + + private List> doMultiTemplateSearch( + List mSearchTemplateQueryParameters) { + MsearchTemplateRequest request = requestConverter.searchMsearchTemplateRequest(mSearchTemplateQueryParameters, + routingResolver.getRouting()); + + MsearchTemplateResponse response = execute( + client -> client.msearchTemplate(request, EntityAsMap.class)); + List> responseItems = response.responses(); + + Assert.isTrue(mSearchTemplateQueryParameters.size() == responseItems.size(), + "number of response items does not match number of requests"); + + int size = mSearchTemplateQueryParameters.size(); + List> classes = mSearchTemplateQueryParameters + .stream().map(MultiSearchTemplateQueryParameter::clazz).collect(Collectors.toList()); + List indices = mSearchTemplateQueryParameters + .stream().map(MultiSearchTemplateQueryParameter::index).collect(Collectors.toList()); + + return getSearchHitsFromMsearchResponse(size, classes, indices, responseItems); + } + + private List> doMultiSearch(List multiSearchQueryParameters) { + + MsearchRequest request = requestConverter.searchMsearchRequest(multiSearchQueryParameters, + routingResolver.getRouting()); + + MsearchResponse msearchResponse = execute(client -> client.msearch(request, EntityAsMap.class)); + List> responseItems = msearchResponse.responses(); + + Assert.isTrue(multiSearchQueryParameters.size() == responseItems.size(), + "number of response items does not match number of requests"); + + int size = multiSearchQueryParameters.size(); + List> classes = multiSearchQueryParameters + .stream().map(MultiSearchQueryParameter::clazz).collect(Collectors.toList()); + List indices = multiSearchQueryParameters + .stream().map(MultiSearchQueryParameter::index).collect(Collectors.toList()); + + return getSearchHitsFromMsearchResponse(size, classes, indices, responseItems); + } + + /** + * {@link MsearchResponse} and {@link MsearchTemplateResponse} share the same {@link MultiSearchResponseItem} + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + private List> getSearchHitsFromMsearchResponse(int size, List> classes, + List indices, List> responseItems) { + List> searchHitsList = new ArrayList<>(size); + Iterator> clazzIter = classes.iterator(); + Iterator indexIter = indices.iterator(); + Iterator> responseIterator = responseItems.iterator(); + + while (clazzIter.hasNext() && indexIter.hasNext()) { + MultiSearchResponseItem responseItem = responseIterator.next(); + + if (responseItem.isResult()) { + + Class clazz = clazzIter.next(); + IndexCoordinates index = indexIter.next(); + ReadDocumentCallback documentCallback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, + index); + SearchDocumentResponseCallback> callback = new ReadSearchDocumentResponseCallback<>(clazz, + index); + + SearchHits searchHits = callback.doWith( + SearchDocumentResponseBuilder.from(responseItem.result(), getEntityCreator(documentCallback), jsonpMapper)); + + searchHitsList.add(searchHits); + } else { + if (LOGGER.isWarnEnabled()) { + LOGGER.warn(String.format("multisearch response contains failure: %s", + responseItem.failure().error().reason())); + } + } + } + + return searchHitsList; + } + + /** + * value class combining the information needed for a single query in a multisearch request. + */ + record MultiSearchQueryParameter(Query query, Class clazz, IndexCoordinates index) { + } + + /** + * value class combining the information needed for a single query in a template multisearch request. + */ + record MultiSearchTemplateQueryParameter(SearchTemplateQuery query, Class clazz, IndexCoordinates index) { + } + + @Override + public String openPointInTime(IndexCoordinates index, Duration keepAlive, Boolean ignoreUnavailable) { + + Assert.notNull(index, "index must not be null"); + Assert.notNull(keepAlive, "keepAlive must not be null"); + Assert.notNull(ignoreUnavailable, "ignoreUnavailable must not be null"); + + var request = requestConverter.searchOpenPointInTimeRequest(index, keepAlive, ignoreUnavailable); + return execute(client -> client.openPointInTime(request)).id(); + } + + @Override + public Boolean closePointInTime(String pit) { + + Assert.notNull(pit, "pit must not be null"); + + ClosePointInTimeRequest request = requestConverter.searchClosePointInTime(pit); + var response = execute(client -> client.closePointInTime(request)); + return response.succeeded(); + } + + // endregion + + // region script methods + @Override + public boolean putScript(Script script) { + + Assert.notNull(script, "script must not be null"); + + var request = requestConverter.scriptPut(script); + return execute(client -> client.putScript(request)).acknowledged(); + } + + @Nullable + @Override + public Script getScript(String name) { + + Assert.notNull(name, "name must not be null"); + + var request = requestConverter.scriptGet(name); + return responseConverter.scriptResponse(execute(client -> client.getScript(request))); + } + + public boolean deleteScript(String name) { + + Assert.notNull(name, "name must not be null"); + + DeleteScriptRequest request = requestConverter.scriptDelete(name); + return execute(client -> client.deleteScript(request)).acknowledged(); + } + + @Override + public SqlResponse search(SqlQuery query) { + Assert.notNull(query, "Query must not be null."); + + try { + QueryResponse response = sqlClient.query(requestConverter.sqlQueryRequest(query)); + + return responseConverter.sqlResponse(response); + } catch (IOException e) { + throw exceptionTranslator.translateException(e); + } + } + // endregion + + // region client callback + /** + * Callback interface to be used with {@link #execute(ElasticsearchTemplate.ClientCallback)} for operating directly on + * the {@link ElasticsearchClient}. + */ + @FunctionalInterface + public interface ClientCallback { + T doWithClient(ElasticsearchClient client) throws IOException; + } + + /** + * Execute a callback with the {@link ElasticsearchClient} and provide exception translation. + * + * @param callback the callback to execute, must not be {@literal null} + * @param the type returned from the callback + * @return the callback result + */ + public T execute(ElasticsearchTemplate.ClientCallback callback) { + + Assert.notNull(callback, "callback must not be null"); + + try { + return callback.doWithClient(client); + } catch (IOException | RuntimeException e) { + throw exceptionTranslator.translateException(e); + } + } + // endregion + + // region helper methods + @Override + public Query matchAllQuery() { + return NativeQuery.builder().withQuery(qb -> qb.matchAll(mab -> mab)).build(); + } + + @Override + public Query idsQuery(List ids) { + return NativeQuery.builder().withQuery(qb -> qb.ids(iq -> iq.values(ids))).build(); + } + + @Override + public BaseQueryBuilder queryBuilderWithIds(List ids) { + return NativeQuery.builder().withIds(ids); + } + + /** + * extract the list of {@link IndexedObjectInformation} from a {@link BulkResponse}. + * + * @param bulkResponse the response to evaluate + * @return the list of the {@link IndexedObjectInformation}s + */ + protected List checkForBulkOperationFailure(BulkResponse bulkResponse) { + + if (bulkResponse.errors()) { + Map failedDocuments = new HashMap<>(); + for (BulkResponseItem item : bulkResponse.items()) { + + if (item.error() != null) { + failedDocuments.put(item.id(), new BulkFailureException.FailureDetails(item.status(), item.error().reason())); + } + } + throw new BulkFailureException( + "Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + + failedDocuments + ']', + failedDocuments); + } + + return bulkResponse.items().stream().map( + item -> new IndexedObjectInformation(item.id(), item.index(), item.seqNo(), item.primaryTerm(), item.version())) + .collect(Collectors.toList()); + + } + // endregion + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/EntityAsMap.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/EntityAsMap.java new file mode 100644 index 0000000000..e54d13ea74 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/EntityAsMap.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import org.springframework.data.elasticsearch.support.DefaultStringObjectMap; + +/** + * A Map<String,Object> to represent any entity as it's returned from Elasticsearch and before it is converted to a + * {@link org.springframework.data.elasticsearch.core.document.Document}. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class EntityAsMap extends DefaultStringObjectMap {} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java new file mode 100644 index 0000000000..dfe850e4d8 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java @@ -0,0 +1,240 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*; + +import java.util.Arrays; +import java.util.stream.Collectors; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity; +import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentProperty; +import org.springframework.data.elasticsearch.core.query.highlight.Highlight; +import org.springframework.data.elasticsearch.core.query.highlight.HighlightField; +import org.springframework.data.elasticsearch.core.query.highlight.HighlightFieldParameters; +import org.springframework.data.elasticsearch.core.query.highlight.HighlightParameters; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.util.StringUtils; + +/** + * Converts the {@link Highlight} annotation from a method to an ElasticsearchClient + * {@link co.elastic.clients.elasticsearch.core.search.Highlight}. + * + * @author Peter-Josef Meisch + * @author Haibo Liu + * @since 4.4 + */ +class HighlightQueryBuilder { + private final MappingContext, ElasticsearchPersistentProperty> mappingContext; + private final RequestConverter requestConverter; + + HighlightQueryBuilder( + MappingContext, ElasticsearchPersistentProperty> mappingContext, + RequestConverter requestConverter) { + this.mappingContext = mappingContext; + this.requestConverter = requestConverter; + } + + public co.elastic.clients.elasticsearch.core.search.Highlight getHighlight(Highlight highlight, + @Nullable Class type) { + + co.elastic.clients.elasticsearch.core.search.Highlight.Builder highlightBuilder = new co.elastic.clients.elasticsearch.core.search.Highlight.Builder(); + + // in the old implementation we could use one addParameters method, but in the new Elasticsearch client + // the builder for highlight and highlightfield share no code + addParameters(highlight.getParameters(), highlightBuilder, type); + + for (HighlightField highlightField : highlight.getFields()) { + String mappedName = mapFieldName(highlightField.getName(), type); + highlightBuilder.fields(mappedName, hf -> { + addParameters(highlightField.getParameters(), hf, type); + return hf; + }); + } + + return highlightBuilder.build(); + } + + /* + * the builder for highlight and highlight fields don't share code, so we have these two methods here that basically are almost copies + */ + private void addParameters(HighlightParameters parameters, + co.elastic.clients.elasticsearch.core.search.Highlight.Builder builder, @Nullable Class type) { + + if (StringUtils.hasLength(parameters.getBoundaryChars())) { + builder.boundaryChars(parameters.getBoundaryChars()); + } + + if (parameters.getBoundaryMaxScan() > -1) { + builder.boundaryMaxScan(parameters.getBoundaryMaxScan()); + } + + if (StringUtils.hasLength(parameters.getBoundaryScanner())) { + builder.boundaryScanner(boundaryScanner(parameters.getBoundaryScanner())); + } + + if (StringUtils.hasLength(parameters.getBoundaryScannerLocale())) { + builder.boundaryScannerLocale(parameters.getBoundaryScannerLocale()); + } + + if (StringUtils.hasLength(parameters.getFragmenter())) { + builder.fragmenter(highlighterFragmenter(parameters.getFragmenter())); + } + + if (parameters.getFragmentSize() > -1) { + builder.fragmentSize(parameters.getFragmentSize()); + } + + if (parameters.getNoMatchSize() > -1) { + builder.noMatchSize(parameters.getNoMatchSize()); + } + + if (parameters.getNumberOfFragments() > -1) { + builder.numberOfFragments(parameters.getNumberOfFragments()); + } + + if (parameters.getHighlightQuery() != null) { + builder.highlightQuery(requestConverter.getQuery(parameters.getHighlightQuery(), type)); + } + + if (StringUtils.hasLength(parameters.getOrder())) { + builder.order(highlighterOrder(parameters.getOrder())); + } + + if (parameters.getPreTags().length > 0) { + builder.preTags(Arrays.asList(parameters.getPreTags())); + } + + if (parameters.getPostTags().length > 0) { + builder.postTags(Arrays.asList(parameters.getPostTags())); + } + + if (!parameters.getRequireFieldMatch()) { // default is true + builder.requireFieldMatch(false); + } + + if (StringUtils.hasLength(parameters.getType())) { + builder.type(highlighterType(parameters.getType())); + } + + if (StringUtils.hasLength(parameters.getEncoder())) { + builder.encoder(highlighterEncoder(parameters.getEncoder())); + } + + if (StringUtils.hasLength(parameters.getTagsSchema())) { + builder.tagsSchema(highlighterTagsSchema(parameters.getTagsSchema())); + } + } + + /* + * the builder for highlight and highlight fields don't share code, so we have these two methods here that basically are almost copies + */ + private void addParameters(HighlightFieldParameters parameters, + co.elastic.clients.elasticsearch.core.search.HighlightField.Builder builder, Class type) { + + if (StringUtils.hasLength(parameters.getBoundaryChars())) { + builder.boundaryChars(parameters.getBoundaryChars()); + } + + if (parameters.getBoundaryMaxScan() > -1) { + builder.boundaryMaxScan(parameters.getBoundaryMaxScan()); + } + + if (StringUtils.hasLength(parameters.getBoundaryScanner())) { + builder.boundaryScanner(boundaryScanner(parameters.getBoundaryScanner())); + } + + if (StringUtils.hasLength(parameters.getBoundaryScannerLocale())) { + builder.boundaryScannerLocale(parameters.getBoundaryScannerLocale()); + } + + if (parameters.getForceSource()) { // default is false + builder.forceSource(parameters.getForceSource()); + } + + if (StringUtils.hasLength(parameters.getFragmenter())) { + builder.fragmenter(highlighterFragmenter(parameters.getFragmenter())); + } + + if (parameters.getFragmentSize() > -1) { + builder.fragmentSize(parameters.getFragmentSize()); + } + + if (parameters.getNoMatchSize() > -1) { + builder.noMatchSize(parameters.getNoMatchSize()); + } + + if (parameters.getNumberOfFragments() > -1) { + builder.numberOfFragments(parameters.getNumberOfFragments()); + } + + if (parameters.getHighlightQuery() != null) { + builder.highlightQuery(requestConverter.getQuery(parameters.getHighlightQuery(), type)); + } + + if (StringUtils.hasLength(parameters.getOrder())) { + builder.order(highlighterOrder(parameters.getOrder())); + } + + if (parameters.getPhraseLimit() > -1) { + builder.phraseLimit(parameters.getPhraseLimit()); + } + + if (parameters.getPreTags().length > 0) { + builder.preTags(Arrays.asList(parameters.getPreTags())); + } + + if (parameters.getPostTags().length > 0) { + builder.postTags(Arrays.asList(parameters.getPostTags())); + } + + if (!parameters.getRequireFieldMatch()) { // default is true + builder.requireFieldMatch(false); + } + + if (StringUtils.hasLength(parameters.getType())) { + builder.type(highlighterType(parameters.getType())); + } + + if ((parameters).getFragmentOffset() > -1) { + builder.fragmentOffset(parameters.getFragmentOffset()); + } + + if (parameters.getMatchedFields().length > 0) { + builder.matchedFields(Arrays.stream(parameters.getMatchedFields()).map(fieldName -> mapFieldName(fieldName, type)) // + .collect(Collectors.toList())); + } + } + + private String mapFieldName(String fieldName, @Nullable Class type) { + + if (type != null) { + ElasticsearchPersistentEntity persistentEntity = mappingContext.getPersistentEntity(type); + + if (persistentEntity != null) { + ElasticsearchPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(fieldName); + + if (persistentProperty != null) { + return persistentProperty.getFieldName(); + } + } + } + + return fieldName; + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/IndicesTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/IndicesTemplate.java new file mode 100644 index 0000000000..5a735a7240 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/IndicesTemplate.java @@ -0,0 +1,463 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import static org.springframework.util.StringUtils.*; + +import co.elastic.clients.elasticsearch.indices.*; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.endpoints.BooleanResponse; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import org.jspecify.annotations.Nullable; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.elasticsearch.UncategorizedElasticsearchException; +import org.springframework.data.elasticsearch.annotations.Mapping; +import org.springframework.data.elasticsearch.core.IndexInformation; +import org.springframework.data.elasticsearch.core.IndexOperations; +import org.springframework.data.elasticsearch.core.ResourceUtil; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.data.elasticsearch.core.document.Document; +import org.springframework.data.elasticsearch.core.index.*; +import org.springframework.data.elasticsearch.core.index.DeleteIndexTemplateRequest; +import org.springframework.data.elasticsearch.core.index.DeleteTemplateRequest; +import org.springframework.data.elasticsearch.core.index.ExistsIndexTemplateRequest; +import org.springframework.data.elasticsearch.core.index.ExistsTemplateRequest; +import org.springframework.data.elasticsearch.core.index.GetIndexTemplateRequest; +import org.springframework.data.elasticsearch.core.index.GetTemplateRequest; +import org.springframework.data.elasticsearch.core.index.PutIndexTemplateRequest; +import org.springframework.data.elasticsearch.core.index.PutTemplateRequest; +import org.springframework.data.elasticsearch.core.mapping.Alias; +import org.springframework.data.elasticsearch.core.mapping.CreateIndexSettings; +import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity; +import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates; +import org.springframework.util.Assert; + +/** + * Implementation of the {@link IndexOperations} interface using en {@link ElasticsearchIndicesClient}. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class IndicesTemplate extends ChildTemplate + implements IndexOperations { + + // we need a cluster client as well because ES has put some methods from the indices API into the cluster client + // (component templates) + private final ClusterTemplate clusterTemplate; + protected final ElasticsearchConverter elasticsearchConverter; + @Nullable protected final Class boundClass; + @Nullable protected final IndexCoordinates boundIndex; + + public IndicesTemplate(ElasticsearchIndicesClient client, ClusterTemplate clusterTemplate, + ElasticsearchConverter elasticsearchConverter, Class boundClass) { + super(client, elasticsearchConverter); + + Assert.notNull(clusterTemplate, "cluster must not be null"); + Assert.notNull(elasticsearchConverter, "elasticsearchConverter must not be null"); + Assert.notNull(boundClass, "boundClass may not be null"); + + this.clusterTemplate = clusterTemplate; + this.elasticsearchConverter = elasticsearchConverter; + this.boundClass = boundClass; + this.boundIndex = null; + + } + + public IndicesTemplate(ElasticsearchIndicesClient client, ClusterTemplate clusterTemplate, + ElasticsearchConverter elasticsearchConverter, IndexCoordinates boundIndex) { + super(client, elasticsearchConverter); + + Assert.notNull(clusterTemplate, "cluster must not be null"); + Assert.notNull(elasticsearchConverter, "elasticsearchConverter must not be null"); + Assert.notNull(boundIndex, "boundIndex must not be null"); + + this.clusterTemplate = clusterTemplate; + this.elasticsearchConverter = elasticsearchConverter; + this.boundClass = null; + this.boundIndex = boundIndex; + + } + + protected Class checkForBoundClass() { + if (boundClass == null) { + throw new InvalidDataAccessApiUsageException("IndexOperations are not bound"); + } + return boundClass; + } + + @Override + public boolean create() { + + Settings settings = boundClass != null ? createSettings(boundClass) : new Settings(); + return doCreate(getIndexCoordinates(), settings, null); + } + + @Override + public boolean create(Map settings) { + + Assert.notNull(settings, "settings must not be null"); + + return doCreate(getIndexCoordinates(), settings, null); + } + + @Override + public boolean create(Map settings, Document mapping) { + + Assert.notNull(settings, "settings must not be null"); + Assert.notNull(mapping, "mapping must not be null"); + + return doCreate(getIndexCoordinates(), settings, mapping); + } + + @Override + public boolean createWithMapping() { + return doCreate(getIndexCoordinates(), createSettings(), createMapping()); + } + + protected boolean doCreate(IndexCoordinates indexCoordinates, Map settings, + @Nullable Document mapping) { + Set aliases = (boundClass != null) ? getAliasesFor(boundClass) : new HashSet<>(); + CreateIndexSettings indexSettings = CreateIndexSettings.builder(indexCoordinates) + .withAliases(aliases) + .withSettings(settings) + .withMapping(mapping) + .build(); + + CreateIndexRequest createIndexRequest = requestConverter.indicesCreateRequest(indexSettings); + CreateIndexResponse createIndexResponse = execute(client -> client.create(createIndexRequest)); + return Boolean.TRUE.equals(createIndexResponse.acknowledged()); + } + + @Override + public boolean delete() { + return doDelete(getIndexCoordinates()); + } + + private boolean doDelete(IndexCoordinates indexCoordinates) { + + Assert.notNull(indexCoordinates, "indexCoordinates must not be null"); + + if (doExists(indexCoordinates)) { + DeleteIndexRequest deleteIndexRequest = requestConverter.indicesDeleteRequest(indexCoordinates); + DeleteIndexResponse deleteIndexResponse = execute(client -> client.delete(deleteIndexRequest)); + return deleteIndexResponse.acknowledged(); + } + + return false; + } + + @Override + public boolean exists() { + return doExists(getIndexCoordinates()); + } + + private boolean doExists(IndexCoordinates indexCoordinates) { + + Assert.notNull(indexCoordinates, "indexCoordinates must not be null"); + + ExistsRequest existsRequest = requestConverter.indicesExistsRequest(indexCoordinates); + BooleanResponse existsResponse = execute(client -> client.exists(existsRequest)); + return existsResponse.value(); + } + + @Override + public void refresh() { + + RefreshRequest refreshRequest = requestConverter.indicesRefreshRequest(getIndexCoordinates()); + execute(client -> client.refresh(refreshRequest)); + } + + @Override + public Document createMapping() { + return createMapping(checkForBoundClass()); + } + + @Override + public Document createMapping(Class clazz) { + + Assert.notNull(clazz, "clazz must not be null"); + + // load mapping specified in Mapping annotation if present + Mapping mappingAnnotation = AnnotatedElementUtils.findMergedAnnotation(clazz, Mapping.class); + + if (mappingAnnotation != null) { + String mappingPath = mappingAnnotation.mappingPath(); + + if (hasText(mappingPath)) { + String mappings = ResourceUtil.readFileFromClasspath(mappingPath); + + if (hasText(mappings)) { + return Document.parse(mappings); + } + } + } + + // build mapping from field annotations + try { + String mapping = new MappingBuilder(elasticsearchConverter).buildPropertyMapping(clazz); + return Document.parse(mapping); + } catch (Exception e) { + throw new UncategorizedElasticsearchException("Failed to build mapping for " + clazz.getSimpleName(), e); + } + } + + @Override + public boolean putMapping(Document mapping) { + + Assert.notNull(mapping, "mapping must not be null"); + + PutMappingRequest putMappingRequest = requestConverter.indicesPutMappingRequest(getIndexCoordinates(), mapping); + PutMappingResponse putMappingResponse = execute(client -> client.putMapping(putMappingRequest)); + return putMappingResponse.acknowledged(); + } + + @Override + public Map getMapping() { + + IndexCoordinates indexCoordinates = getIndexCoordinates(); + GetMappingRequest getMappingRequest = requestConverter.indicesGetMappingRequest(indexCoordinates); + GetMappingResponse getMappingResponse = execute(client -> client.getMapping(getMappingRequest)); + + return responseConverter.indicesGetMapping(getMappingResponse, indexCoordinates); + } + + @Override + public Settings createSettings() { + return createSettings(checkForBoundClass()); + } + + @Override + public Settings createSettings(Class clazz) { + + Assert.notNull(clazz, "clazz must not be null"); + + ElasticsearchPersistentEntity persistentEntity = getRequiredPersistentEntity(clazz); + String settingPath = persistentEntity.settingPath(); + return hasText(settingPath) // + ? Settings.parse(ResourceUtil.readFileFromClasspath(settingPath)) // + : persistentEntity.getDefaultSettings(); + + } + + @Override + public Settings getSettings() { + return getSettings(false); + } + + @Override + public Settings getSettings(boolean includeDefaults) { + + GetIndicesSettingsRequest getIndicesSettingsRequest = requestConverter + .indicesGetSettingsRequest(getIndexCoordinates(), includeDefaults); + GetIndicesSettingsResponse getIndicesSettingsResponse = execute( + client -> client.getSettings(getIndicesSettingsRequest)); + return responseConverter.indicesGetSettings(getIndicesSettingsResponse, getIndexCoordinates().getIndexName()); + } + + @Override + public boolean alias(AliasActions aliasActions) { + + Assert.notNull(aliasActions, "aliasActions must not be null"); + + UpdateAliasesRequest updateAliasesRequest = requestConverter.indicesUpdateAliasesRequest(aliasActions); + UpdateAliasesResponse updateAliasesResponse = execute(client -> client.updateAliases(updateAliasesRequest)); + return updateAliasesResponse.acknowledged(); + } + + @Override + public Map> getAliases(String... aliasNames) { + + Assert.notNull(aliasNames, "aliasNames must not be null"); + + GetAliasRequest getAliasRequest = requestConverter.indicesGetAliasRequest(aliasNames, null); + var getAliasResponse = execute(client -> client.getAlias(getAliasRequest)); + return responseConverter.indicesGetAliasData(getAliasResponse); + } + + @Override + public Map> getAliasesForIndex(String... indexNames) { + + Assert.notNull(indexNames, "indexNames must not be null"); + + GetAliasRequest getAliasRequest = requestConverter.indicesGetAliasRequest(null, indexNames); + var getAliasResponse = execute(client -> client.getAlias(getAliasRequest)); + return responseConverter.indicesGetAliasData(getAliasResponse); + } + + @Override + public boolean putTemplate(PutTemplateRequest putTemplateRequest) { + + Assert.notNull(putTemplateRequest, "putTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.PutTemplateRequest putTemplateRequestES = requestConverter + .indicesPutTemplateRequest(putTemplateRequest); + return execute(client -> client.putTemplate(putTemplateRequestES)).acknowledged(); + } + + @Override + public TemplateData getTemplate(GetTemplateRequest getTemplateRequest) { + + Assert.notNull(getTemplateRequest, "getTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.GetTemplateRequest getTemplateRequestES = requestConverter + .indicesGetTemplateRequest(getTemplateRequest); + GetTemplateResponse getTemplateResponse = execute(client -> client.getTemplate(getTemplateRequestES)); + + return responseConverter.indicesGetTemplateData(getTemplateResponse, getTemplateRequest.getTemplateName()); + } + + @Override + public boolean existsTemplate(ExistsTemplateRequest existsTemplateRequest) { + + Assert.notNull(existsTemplateRequest, "existsTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.ExistsTemplateRequest existsTemplateRequestSO = requestConverter + .indicesExistsTemplateRequest(existsTemplateRequest); + return execute(client -> client.existsTemplate(existsTemplateRequestSO)).value(); + } + + @Override + public boolean deleteTemplate(DeleteTemplateRequest deleteTemplateRequest) { + + Assert.notNull(deleteTemplateRequest, "deleteTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.DeleteTemplateRequest deleteTemplateRequestES = requestConverter + .indicesDeleteTemplateRequest(deleteTemplateRequest); + return execute(client -> client.deleteTemplate(deleteTemplateRequestES)).acknowledged(); + } + + @Override + public boolean putIndexTemplate(PutIndexTemplateRequest putIndexTemplateRequest) { + + co.elastic.clients.elasticsearch.indices.PutIndexTemplateRequest putIndexTemplateRequestES = requestConverter + .indicesPutIndexTemplateRequest(putIndexTemplateRequest); + + return execute(client -> client.putIndexTemplate(putIndexTemplateRequestES)).acknowledged(); + } + + @Override + public boolean existsIndexTemplate(ExistsIndexTemplateRequest existsIndexTemplateRequest) { + + Assert.notNull(existsIndexTemplateRequest, "existsIndexTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.ExistsIndexTemplateRequest existsTemplateRequestES = requestConverter + .indicesExistsIndexTemplateRequest(existsIndexTemplateRequest); + return execute(client -> client.existsIndexTemplate(existsTemplateRequestES)).value(); + } + + @Override + public List getIndexTemplate(GetIndexTemplateRequest getIndexTemplateRequest) { + + Assert.notNull(getIndexTemplateRequest, "getIndexTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.GetIndexTemplateRequest getIndexTemplateRequestES = requestConverter + .indicesGetIndexTemplateRequest(getIndexTemplateRequest); + var getIndexTemplateResponse = execute(client -> client.getIndexTemplate(getIndexTemplateRequestES)); + return responseConverter.getIndexTemplates(getIndexTemplateResponse); + } + + @Override + public boolean deleteIndexTemplate(DeleteIndexTemplateRequest deleteIndexTemplateRequest) { + + Assert.notNull(deleteIndexTemplateRequest, "deleteIndexTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.indices.DeleteIndexTemplateRequest deleteIndexTemplateRequestES = requestConverter + .indicesDeleteIndexTemplateRequest(deleteIndexTemplateRequest); + return execute(client -> client.deleteIndexTemplate(deleteIndexTemplateRequestES)).acknowledged(); + } + + @Override + public boolean putComponentTemplate(PutComponentTemplateRequest putComponentTemplateRequest) { + + Assert.notNull(putComponentTemplateRequest, "putComponentTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.cluster.PutComponentTemplateRequest putComponentTemplateRequestES = requestConverter + .clusterPutComponentTemplateRequest(putComponentTemplateRequest); + // the new Elasticsearch client has this call in the cluster index + return clusterTemplate.execute(client -> client.putComponentTemplate(putComponentTemplateRequestES)).acknowledged(); + } + + @Override + public boolean existsComponentTemplate(ExistsComponentTemplateRequest existsComponentTemplateRequest) { + + Assert.notNull(existsComponentTemplateRequest, "existsComponentTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.cluster.ExistsComponentTemplateRequest existsComponentTemplateRequestES = requestConverter + .clusterExistsComponentTemplateRequest(existsComponentTemplateRequest); + return clusterTemplate.execute(client -> client.existsComponentTemplate(existsComponentTemplateRequestES)).value(); + } + + @Override + public List getComponentTemplate(GetComponentTemplateRequest getComponentTemplateRequest) { + + co.elastic.clients.elasticsearch.cluster.GetComponentTemplateRequest getComponentTemplateRequestES = requestConverter + .clusterGetComponentTemplateRequest(getComponentTemplateRequest); + var response = clusterTemplate.execute(client -> client.getComponentTemplate(getComponentTemplateRequestES)); + return responseConverter.clusterGetComponentTemplates(response); + } + + @Override + public boolean deleteComponentTemplate(DeleteComponentTemplateRequest deleteComponentTemplateRequest) { + + Assert.notNull(deleteComponentTemplateRequest, "deleteComponentTemplateRequest must not be null"); + + co.elastic.clients.elasticsearch.cluster.DeleteComponentTemplateRequest deleteComponentTemplateRequestES = requestConverter + .clusterDeleteComponentTemplateRequest(deleteComponentTemplateRequest); + return clusterTemplate.execute(client -> client.deleteComponentTemplate(deleteComponentTemplateRequestES)) + .acknowledged(); + } + + @Override + public List getInformation(IndexCoordinates indexCoordinates) { + + Assert.notNull(indexCoordinates, "indexCoordinates must not be null"); + + GetIndexRequest getIndexRequest = requestConverter.indicesGetIndexRequest(indexCoordinates); + GetIndexResponse getIndexResponse = execute(client -> client.get(getIndexRequest)); + return responseConverter.indicesGetIndexInformations(getIndexResponse); + } + + // region Helper functions + ElasticsearchPersistentEntity getRequiredPersistentEntity(Class clazz) { + return elasticsearchConverter.getMappingContext().getRequiredPersistentEntity(clazz); + } + + @Override + public IndexCoordinates getIndexCoordinates() { + return (boundClass != null) ? getIndexCoordinatesFor(boundClass) : Objects.requireNonNull(boundIndex); + } + + public IndexCoordinates getIndexCoordinatesFor(Class clazz) { + return getRequiredPersistentEntity(clazz).getIndexCoordinates(); + } + + /** + * Get the {@link Alias} of the provided class. + * + * @param clazz provided class that can be used to extract aliases. + */ + public Set getAliasesFor(Class clazz) { + return getRequiredPersistentEntity(clazz).getAliases(); + } + // endregion +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/JsonUtils.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/JsonUtils.java new file mode 100644 index 0000000000..5a927774f9 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/JsonUtils.java @@ -0,0 +1,65 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.json.JsonpMapper; +import jakarta.json.stream.JsonGenerator; + +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +/** + * @author Peter-Josef Meisch + * @since 4.4 + */ +final class JsonUtils { + + private static final Log LOGGER = LogFactory.getLog(JsonUtils.class); + + private JsonUtils() {} + + public static String toJson(Object object, JsonpMapper mapper) { + + // noinspection SpellCheckingInspection + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + JsonGenerator generator = mapper.jsonProvider().createGenerator(baos); + mapper.serialize(object, generator); + generator.close(); + String json = "{}"; + json = baos.toString(StandardCharsets.UTF_8); + return json; + } + + @Nullable + public static String queryToJson(co.elastic.clients.elasticsearch._types.query_dsl.@Nullable Query query, + JsonpMapper mapper) { + + if (query == null) { + return null; + } + + var baos = new ByteArrayOutputStream(); + var generator = mapper.jsonProvider().createGenerator(baos); + query.serialize(generator, mapper); + generator.close(); + return baos.toString(StandardCharsets.UTF_8); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/NativeQuery.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/NativeQuery.java new file mode 100644 index 0000000000..d8d2d21aec --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/NativeQuery.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.KnnSearch; +import co.elastic.clients.elasticsearch._types.SortOptions; +import co.elastic.clients.elasticsearch._types.aggregations.Aggregation; +import co.elastic.clients.elasticsearch._types.query_dsl.Query; +import co.elastic.clients.elasticsearch.core.search.FieldCollapse; +import co.elastic.clients.elasticsearch.core.search.Suggester; +import co.elastic.clients.json.JsonData; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.query.BaseQuery; +import org.springframework.util.Assert; + +/** + * A {@link org.springframework.data.elasticsearch.core.query.Query} implementation using query builders from the new + * Elasticsearch Client library. + * + * @author Peter-Josef Meisch + * @author Sascha Woo + * @author Haibo Liu + * @since 4.4 + */ +public class NativeQuery extends BaseQuery { + + @Nullable private final Query query; + private org.springframework.data.elasticsearch.core.query.@Nullable Query springDataQuery; + @Nullable private Query filter; + // note: the new client does not have pipeline aggs, these are just set up as normal aggs + private final Map aggregations = new LinkedHashMap<>(); + @Nullable private Suggester suggester; + @Nullable private FieldCollapse fieldCollapse; + private List sortOptions = Collections.emptyList(); + + private Map searchExtensions = Collections.emptyMap(); + @Nullable private List knnSearches = Collections.emptyList(); + + public NativeQuery(NativeQueryBuilder builder) { + super(builder); + this.query = builder.getQuery(); + this.filter = builder.getFilter(); + this.aggregations.putAll(builder.getAggregations()); + this.suggester = builder.getSuggester(); + this.fieldCollapse = builder.getFieldCollapse(); + this.sortOptions = builder.getSortOptions(); + this.searchExtensions = builder.getSearchExtensions(); + + if (builder.getSpringDataQuery() != null) { + Assert.isTrue(!NativeQuery.class.isAssignableFrom(builder.getSpringDataQuery().getClass()), + "Cannot add an NativeQuery in a NativeQuery"); + } + this.springDataQuery = builder.getSpringDataQuery(); + this.knnSearches = builder.getKnnSearches(); + } + + public NativeQuery(@Nullable Query query) { + this.query = query; + } + + public static NativeQueryBuilder builder() { + return new NativeQueryBuilder(); + } + + @Nullable + public Query getQuery() { + return query; + } + + @Nullable + public Query getFilter() { + return filter; + } + + public Map getAggregations() { + return aggregations; + } + + @Nullable + public Suggester getSuggester() { + return suggester; + } + + @Nullable + public FieldCollapse getFieldCollapse() { + return fieldCollapse; + } + + public List getSortOptions() { + return sortOptions; + } + + public Map getSearchExtensions() { + return searchExtensions; + } + + /** + * @see NativeQueryBuilder#withQuery(org.springframework.data.elasticsearch.core.query.Query). + * @since 5.1 + */ + public void setSpringDataQuery(org.springframework.data.elasticsearch.core.query.@Nullable Query springDataQuery) { + this.springDataQuery = springDataQuery; + } + + /** + * @since 5.3.1 + */ + @Nullable + public List getKnnSearches() { + return knnSearches; + } + + public org.springframework.data.elasticsearch.core.query.@Nullable Query getSpringDataQuery() { + return springDataQuery; + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/NativeQueryBuilder.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/NativeQueryBuilder.java new file mode 100644 index 0000000000..e8a1e748a0 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/NativeQueryBuilder.java @@ -0,0 +1,244 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.KnnQuery; +import co.elastic.clients.elasticsearch._types.KnnSearch; +import co.elastic.clients.elasticsearch._types.SortOptions; +import co.elastic.clients.elasticsearch._types.aggregations.Aggregation; +import co.elastic.clients.elasticsearch._types.query_dsl.Query; +import co.elastic.clients.elasticsearch.core.search.FieldCollapse; +import co.elastic.clients.elasticsearch.core.search.Suggester; +import co.elastic.clients.json.JsonData; +import co.elastic.clients.util.ObjectBuilder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.query.BaseQueryBuilder; +import org.springframework.util.Assert; + +/** + * @author Peter-Josef Meisch + * @author Sascha Woo + * @author Haibo Liu + * @since 4.4 + */ +public class NativeQueryBuilder extends BaseQueryBuilder { + + @Nullable private Query query; + @Nullable private Query filter; + private final Map aggregations = new LinkedHashMap<>(); + @Nullable private Suggester suggester; + @Nullable private FieldCollapse fieldCollapse; + private final List sortOptions = new ArrayList<>(); + private final Map searchExtensions = new LinkedHashMap<>(); + + private org.springframework.data.elasticsearch.core.query.@Nullable Query springDataQuery; + @Nullable private KnnQuery knnQuery; + @Nullable private List knnSearches = Collections.emptyList(); + + public NativeQueryBuilder() {} + + @Nullable + public Query getQuery() { + return query; + } + + @Nullable + public Query getFilter() { + return this.filter; + } + + public Map getAggregations() { + return aggregations; + } + + @Nullable + public Suggester getSuggester() { + return suggester; + } + + @Nullable + public FieldCollapse getFieldCollapse() { + return fieldCollapse; + } + + public List getSortOptions() { + return sortOptions; + } + + public Map getSearchExtensions() { + return this.searchExtensions; + } + + @Nullable + public KnnQuery getKnnQuery() { + return knnQuery; + } + + /** + * @since 5.3.1 + */ + @Nullable + public List getKnnSearches() { + return knnSearches; + } + + public org.springframework.data.elasticsearch.core.query.@Nullable Query getSpringDataQuery() { + return springDataQuery; + } + + public NativeQueryBuilder withQuery(Query query) { + + Assert.notNull(query, "query must not be null"); + + this.query = query; + return this; + } + + public NativeQueryBuilder withQuery(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return withQuery(fn.apply(new Query.Builder()).build()); + } + + public NativeQueryBuilder withFilter(@Nullable Query filter) { + this.filter = filter; + return this; + } + + public NativeQueryBuilder withFilter(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return withFilter(fn.apply(new Query.Builder()).build()); + } + + public NativeQueryBuilder withAggregation(String name, Aggregation aggregation) { + + Assert.notNull(name, "name must not be null"); + Assert.notNull(aggregation, "aggregation must not be null"); + + this.aggregations.put(name, aggregation); + return this; + } + + public NativeQueryBuilder withSuggester(@Nullable Suggester suggester) { + this.suggester = suggester; + return this; + } + + public NativeQueryBuilder withFieldCollapse(@Nullable FieldCollapse fieldCollapse) { + this.fieldCollapse = fieldCollapse; + return this; + } + + public NativeQueryBuilder withSort(List values) { + + Assert.notEmpty(values, "values must not be empty"); + + sortOptions.clear(); + sortOptions.addAll(values); + + return this; + } + + public NativeQueryBuilder withSort(SortOptions value, SortOptions... values) { + + Assert.notNull(value, "value must not be null"); + sortOptions.add(value); + if (values.length > 0) { + sortOptions.addAll(Arrays.asList(values)); + } + + return this; + } + + public NativeQueryBuilder withSort(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + withSort(fn.apply(new SortOptions.Builder()).build()); + + return this; + } + + public NativeQueryBuilder withSearchExtension(String key, JsonData value) { + + Assert.notNull(key, "key must not be null"); + Assert.notNull(value, "value must not be null"); + + searchExtensions.put(key, value); + return this; + } + + public NativeQueryBuilder withSearchExtensions(Map searchExtensions) { + + Assert.notNull(searchExtensions, "searchExtensions must not be null"); + + this.searchExtensions.putAll(searchExtensions); + return this; + } + + /** + * Allows to use a {@link org.springframework.data.elasticsearch.core.query.Query} within a NativeQuery. Cannot be + * used together with {@link #withQuery(Query)} that sets an Elasticsearch query. Passing in a {@link NativeQuery} + * will result in an exception when {@link #build()} is called. + * + * @since 5.1 + */ + public NativeQueryBuilder withQuery(org.springframework.data.elasticsearch.core.query.Query query) { + this.springDataQuery = query; + return this; + } + + /** + * @since 5.4 + */ + public NativeQueryBuilder withKnnSearches(List knnSearches) { + this.knnSearches = knnSearches; + return this; + } + + /** + * @since 5.4 + */ + public NativeQueryBuilder withKnnSearches(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return withKnnSearches(fn.apply(new KnnSearch.Builder()).build()); + } + + /** + * @since 5.4 + */ + public NativeQueryBuilder withKnnSearches(KnnSearch knnSearch) { + return withKnnSearches(List.of(knnSearch)); + } + + public NativeQuery build() { + Assert.isTrue(query == null || springDataQuery == null, "Cannot have both a native query and a Spring Data query"); + return new NativeQuery(this); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/Queries.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/Queries.java new file mode 100644 index 0000000000..7259f0ca41 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/Queries.java @@ -0,0 +1,196 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch._types.FieldValue; +import co.elastic.clients.elasticsearch._types.LatLonGeoLocation; +import co.elastic.clients.elasticsearch._types.aggregations.Aggregation; +import co.elastic.clients.elasticsearch._types.query_dsl.IdsQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.MatchAllQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.MatchQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.Operator; +import co.elastic.clients.elasticsearch._types.query_dsl.Query; +import co.elastic.clients.elasticsearch._types.query_dsl.QueryStringQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.TermQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.WildcardQuery; +import co.elastic.clients.elasticsearch._types.query_dsl.WrapperQuery; +import co.elastic.clients.util.ObjectBuilder; + +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.List; +import java.util.function.Function; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.elasticsearch.core.geo.GeoPoint; +import org.springframework.data.elasticsearch.core.query.BaseQueryBuilder; +import org.springframework.util.Assert; + +/** + * Utility class simplifying the creation of some more complex queries and type. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public final class Queries { + + private Queries() {} + + public static IdsQuery idsQuery(List ids) { + + Assert.notNull(ids, "ids must not be null"); + + return IdsQuery.of(i -> i.values(ids)); + } + + public static Query idsQueryAsQuery(List ids) { + + Assert.notNull(ids, "ids must not be null"); + + Function> builder = b -> b.ids(idsQuery(ids)); + + return builder.apply(new Query.Builder()).build(); + } + + public static MatchQuery matchQuery(String fieldName, String query, @Nullable Operator operator, + @Nullable Float boost) { + + Assert.notNull(fieldName, "fieldName must not be null"); + Assert.notNull(query, "query must not be null"); + + return MatchQuery.of(mb -> mb.field(fieldName).query(FieldValue.of(query)).operator(operator).boost(boost)); + } + + public static Query matchQueryAsQuery(String fieldName, String query, @Nullable Operator operator, + @Nullable Float boost) { + + Function> builder = b -> b.match(matchQuery(fieldName, query, operator, boost)); + + return builder.apply(new Query.Builder()).build(); + } + + public static MatchAllQuery matchAllQuery() { + + return MatchAllQuery.of(b -> b); + } + + public static Query matchAllQueryAsQuery() { + + Function> builder = b -> b.matchAll(matchAllQuery()); + + return builder.apply(new Query.Builder()).build(); + } + + public static QueryStringQuery queryStringQuery(String fieldName, String query, @Nullable Float boost) { + return queryStringQuery(fieldName, query, null, null, boost); + } + + public static QueryStringQuery queryStringQuery(String fieldName, String query, Operator defaultOperator, + @Nullable Float boost) { + return queryStringQuery(fieldName, query, null, defaultOperator, boost); + } + + public static QueryStringQuery queryStringQuery(String fieldName, String query, @Nullable Boolean analyzeWildcard, + @Nullable Float boost) { + return queryStringQuery(fieldName, query, analyzeWildcard, null, boost); + } + + public static QueryStringQuery queryStringQuery(String fieldName, String query, @Nullable Boolean analyzeWildcard, + @Nullable Operator defaultOperator, @Nullable Float boost) { + + Assert.notNull(fieldName, "fieldName must not be null"); + Assert.notNull(query, "query must not be null"); + + return QueryStringQuery.of(qs -> qs.fields(fieldName).query(query).analyzeWildcard(analyzeWildcard) + .defaultOperator(defaultOperator).boost(boost)); + } + + public static TermQuery termQuery(String fieldName, String value) { + + Assert.notNull(fieldName, "fieldName must not be null"); + Assert.notNull(value, "value must not be null"); + + return TermQuery.of(t -> t.field(fieldName).value(FieldValue.of(value))); + } + + public static Query termQueryAsQuery(String fieldName, String value) { + + Function> builder = q -> q.term(termQuery(fieldName, value)); + return builder.apply(new Query.Builder()).build(); + } + + public static WildcardQuery wildcardQuery(String field, String value) { + + Assert.notNull(field, "field must not be null"); + Assert.notNull(value, "value must not be null"); + + return WildcardQuery.of(w -> w.field(field).wildcard(value)); + } + + public static Query wildcardQueryAsQuery(String field, String value) { + Function> builder = q -> q.wildcard(wildcardQuery(field, value)); + return builder.apply(new Query.Builder()).build(); + } + + public static Query wrapperQueryAsQuery(String query) { + + Function> builder = q -> q.wrapper(wrapperQuery(query)); + + return builder.apply(new Query.Builder()).build(); + } + + public static WrapperQuery wrapperQuery(String query) { + + Assert.notNull(query, "query must not be null"); + + String encodedValue = Base64.getEncoder().encodeToString(query.getBytes(StandardCharsets.UTF_8)); + + return WrapperQuery.of(wq -> wq.query(encodedValue)); + } + + public static LatLonGeoLocation latLon(GeoPoint geoPoint) { + + Assert.notNull(geoPoint, "geoPoint must not be null"); + + return latLon(geoPoint.getLat(), geoPoint.getLon()); + } + + public static LatLonGeoLocation latLon(double lat, double lon) { + return LatLonGeoLocation.of(_0 -> _0.lat(lat).lon(lon)); + } + + public static org.springframework.data.elasticsearch.core.query.Query getTermsAggsQuery(String aggsName, + String aggsField) { + return NativeQuery.builder() // + .withQuery(Queries.matchAllQueryAsQuery()) // + .withAggregation(aggsName, Aggregation.of(a -> a // + .terms(ta -> ta.field(aggsField)))) // + .withMaxResults(0) // + .build(); + } + + public static org.springframework.data.elasticsearch.core.query.Query queryWithIds(String... ids) { + return NativeQuery.builder().withIds(ids).build(); + } + + public static BaseQueryBuilder getBuilderWithMatchAllQuery() { + return NativeQuery.builder().withQuery(matchAllQueryAsQuery()); + } + + public static BaseQueryBuilder getBuilderWithTermQuery(String field, String value) { + return NativeQuery.builder().withQuery(termQueryAsQuery(field, value)); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveChildTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveChildTemplate.java new file mode 100644 index 0000000000..8cdd00cb05 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveChildTemplate.java @@ -0,0 +1,71 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.ApiClient; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.transport.Transport; +import reactor.core.publisher.Flux; + +import org.reactivestreams.Publisher; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.util.Assert; + +/** + * base class for a reactive template that uses on of the {@link ReactiveElasticsearchClient}'s child clients. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ReactiveChildTemplate> { + protected final CLIENT client; + protected final ElasticsearchConverter elasticsearchConverter; + protected final RequestConverter requestConverter; + protected final ResponseConverter responseConverter; + protected final ElasticsearchExceptionTranslator exceptionTranslator; + + public ReactiveChildTemplate(CLIENT client, ElasticsearchConverter elasticsearchConverter) { + this.client = client; + this.elasticsearchConverter = elasticsearchConverter; + JsonpMapper jsonpMapper = client._transport().jsonpMapper(); + requestConverter = new RequestConverter(elasticsearchConverter, jsonpMapper); + responseConverter = new ResponseConverter(jsonpMapper); + exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); + } + + /** + * Callback interface to be used with {@link #execute(ClientCallback)} for operating directly on the client. + */ + @FunctionalInterface + public interface ClientCallback> { + RESULT doWithClient(CLIENT client); + } + + /** + * Execute a callback with the client and provide exception translation. + * + * @param callback the callback to execute, must not be {@literal null} + * @param the type returned from the callback + * @return the callback result + */ + public Publisher execute(ClientCallback> callback) { + + Assert.notNull(callback, "callback must not be null"); + + return Flux.defer(() -> callback.doWithClient(client)).onErrorMap(exceptionTranslator::translateException); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveClusterTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveClusterTemplate.java new file mode 100644 index 0000000000..3207fd5117 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveClusterTemplate.java @@ -0,0 +1,48 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.elasticsearch.cluster.HealthRequest; +import co.elastic.clients.elasticsearch.cluster.HealthResponse; +import co.elastic.clients.transport.ElasticsearchTransport; +import reactor.core.publisher.Mono; + +import org.springframework.data.elasticsearch.core.cluster.ClusterHealth; +import org.springframework.data.elasticsearch.core.cluster.ReactiveClusterOperations; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; + +/** + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ReactiveClusterTemplate + extends ReactiveChildTemplate + implements ReactiveClusterOperations { + + public ReactiveClusterTemplate(ReactiveElasticsearchClusterClient client, + ElasticsearchConverter elasticsearchConverter) { + super(client, elasticsearchConverter); + } + + @Override + public Mono health() { + + HealthRequest healthRequest = requestConverter.clusterHealthRequest(); + Mono healthResponse = Mono.from(execute(client -> client.health(healthRequest))); + return healthResponse.map(responseConverter::clusterHealth); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchClient.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchClient.java new file mode 100644 index 0000000000..7241fa7b89 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchClient.java @@ -0,0 +1,447 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.ApiClient; +import co.elastic.clients.elasticsearch._types.ErrorResponse; +import co.elastic.clients.elasticsearch.core.*; +import co.elastic.clients.elasticsearch.core.search.ResponseBody; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.JsonEndpoint; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.transport.endpoints.BooleanResponse; +import co.elastic.clients.transport.endpoints.EndpointWithResponseMapperAttr; +import co.elastic.clients.util.ObjectBuilder; +import reactor.core.publisher.Mono; + +import java.io.IOException; +import java.util.function.Function; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +/** + * Reactive version of {@link co.elastic.clients.elasticsearch.ElasticsearchClient}. + * + * @author Peter-Josef Meisch + * @author maryantocinn + * @since 4.4 + */ +public class ReactiveElasticsearchClient extends ApiClient + implements AutoCloseable { + + public ReactiveElasticsearchClient(ElasticsearchTransport transport) { + super(transport, null); + } + + public ReactiveElasticsearchClient(ElasticsearchTransport transport, @Nullable TransportOptions transportOptions) { + super(transport, transportOptions); + } + + @Override + public ReactiveElasticsearchClient withTransportOptions(@Nullable TransportOptions transportOptions) { + return new ReactiveElasticsearchClient(transport, transportOptions); + } + + @Override + public void close() throws IOException { + // since Elasticsearch 8.16 the ElasticsearchClient implements (through ApiClient) the Closeable interface and + // handles closing of the underlying transport. We now just call the base class, but keep this as we + // have been implementing AutoCloseable since 4.4 and won't change that to a mere Closeable + super.close(); + } + + // region child clients + + public ReactiveElasticsearchClusterClient cluster() { + return new ReactiveElasticsearchClusterClient(transport, transportOptions); + } + + public ReactiveElasticsearchIndicesClient indices() { + return new ReactiveElasticsearchIndicesClient(transport, transportOptions); + } + + public ReactiveElasticsearchSqlClient sql() { + return new ReactiveElasticsearchSqlClient(transport, transportOptions); + } + + // endregion + // region info + + public Mono info() { + return Mono + .fromFuture(transport.performRequestAsync(InfoRequest._INSTANCE, InfoRequest._ENDPOINT, transportOptions)); + } + + public Mono ping() { + return Mono + .fromFuture(transport.performRequestAsync(PingRequest._INSTANCE, PingRequest._ENDPOINT, transportOptions)); + } + + // endregion + // region document + + public Mono index(IndexRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, IndexRequest._ENDPOINT, transportOptions)); + } + + public Mono index(Function, ObjectBuilder>> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return index(fn.apply(new IndexRequest.Builder<>()).build()); + } + + public Mono bulk(BulkRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, BulkRequest._ENDPOINT, transportOptions)); + } + + public Mono bulk(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return bulk(fn.apply(new BulkRequest.Builder()).build()); + } + + public Mono> get(GetRequest request, Class tClass) { + + Assert.notNull(request, "request must not be null"); + + // code adapted from + // co.elastic.clients.elasticsearch.ElasticsearchClient.get(co.elastic.clients.elasticsearch.core.GetRequest, + // java.lang.Class) + // noinspection unchecked + JsonEndpoint, ErrorResponse> endpoint = (JsonEndpoint, ErrorResponse>) GetRequest._ENDPOINT; + endpoint = new EndpointWithResponseMapperAttr<>(endpoint, + "co.elastic.clients:Deserializer:_global.get.Response.TDocument", + getDeserializer(tClass)); + + return Mono.fromFuture(transport.performRequestAsync(request, endpoint, transportOptions)); + } + + public Mono exists(ExistsRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, ExistsRequest._ENDPOINT, transportOptions)); + } + + public Mono> update(UpdateRequest request, Class clazz) { + + Assert.notNull(request, "request must not be null"); + + // noinspection unchecked + JsonEndpoint, UpdateResponse, ErrorResponse> endpoint = new EndpointWithResponseMapperAttr( + UpdateRequest._ENDPOINT, "co.elastic.clients:Deserializer:_global.update.Response.TDocument", + this.getDeserializer(clazz)); + return Mono.fromFuture(transport.performRequestAsync(request, endpoint, this.transportOptions)); + } + + public Mono> update( + Function, ObjectBuilder>> fn, Class clazz) { + + Assert.notNull(fn, "fn must not be null"); + + return update(fn.apply(new UpdateRequest.Builder<>()).build(), clazz); + } + + public Mono> get(Function> fn, Class tClass) { + Assert.notNull(fn, "fn must not be null"); + + return get(fn.apply(new GetRequest.Builder()).build(), tClass); + } + + public Mono> mget(MgetRequest request, Class clazz) { + + Assert.notNull(request, "request must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + + // noinspection unchecked + JsonEndpoint, ErrorResponse> endpoint = (JsonEndpoint, ErrorResponse>) MgetRequest._ENDPOINT; + endpoint = new EndpointWithResponseMapperAttr<>(endpoint, + "co.elastic.clients:Deserializer:_global.mget.Response.TDocument", + this.getDeserializer(clazz)); + + return Mono.fromFuture(transport.performRequestAsync(request, endpoint, transportOptions)); + } + + public Mono> mget(Function> fn, Class clazz) { + + Assert.notNull(fn, "fn must not be null"); + + return mget(fn.apply(new MgetRequest.Builder()).build(), clazz); + } + + public Mono reindex(ReindexRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, ReindexRequest._ENDPOINT, transportOptions)); + } + + public Mono reindex(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return reindex(fn.apply(new ReindexRequest.Builder()).build()); + } + + public Mono delete(DeleteRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, DeleteRequest._ENDPOINT, transportOptions)); + } + + public Mono delete(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return delete(fn.apply(new DeleteRequest.Builder()).build()); + } + + public Mono deleteByQuery(DeleteByQueryRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, DeleteByQueryRequest._ENDPOINT, transportOptions)); + } + + public Mono deleteByQuery( + Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return deleteByQuery(fn.apply(new DeleteByQueryRequest.Builder()).build()); + } + + /** + * @since 5.4 + */ + public Mono count(CountRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, CountRequest._ENDPOINT, transportOptions)); + } + + /** + * @since 5.4 + */ + public Mono count(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return count(fn.apply(new CountRequest.Builder()).build()); + } + + // endregion + // region search + + public Mono> search(SearchRequest request, Class tDocumentClass) { + + Assert.notNull(request, "request must not be null"); + Assert.notNull(tDocumentClass, "tDocumentClass must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, + SearchRequest.createSearchEndpoint(this.getDeserializer(tDocumentClass)), transportOptions)); + } + + public Mono> search(Function> fn, + Class tDocumentClass) { + + Assert.notNull(fn, "fn must not be null"); + Assert.notNull(tDocumentClass, "tDocumentClass must not be null"); + + return search(fn.apply(new SearchRequest.Builder()).build(), tDocumentClass); + } + + /** + * @since 5.1 + */ + public Mono> searchTemplate(SearchTemplateRequest request, Class tDocumentClass) { + + Assert.notNull(request, "request must not be null"); + Assert.notNull(tDocumentClass, "tDocumentClass must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, + SearchTemplateRequest.createSearchTemplateEndpoint(this.getDeserializer(tDocumentClass)), transportOptions)); + } + + /** + * @since 5.1 + */ + public Mono> searchTemplate( + Function> fn, Class tDocumentClass) { + + Assert.notNull(fn, "fn must not be null"); + + return searchTemplate(fn.apply(new SearchTemplateRequest.Builder()).build(), tDocumentClass); + } + + public Mono> scroll(ScrollRequest request, Class tDocumentClass) { + + Assert.notNull(request, "request must not be null"); + Assert.notNull(tDocumentClass, "tDocumentClass must not be null"); + + // code adapted from + // co.elastic.clients.elasticsearch.ElasticsearchClient.scroll(co.elastic.clients.elasticsearch.core.ScrollRequest, + // java.lang.Class) + // noinspection unchecked + JsonEndpoint, ErrorResponse> endpoint = (JsonEndpoint, ErrorResponse>) ScrollRequest._ENDPOINT; + endpoint = new EndpointWithResponseMapperAttr<>(endpoint, + "co.elastic.clients:Deserializer:_global.scroll.Response.TDocument", getDeserializer(tDocumentClass)); + + return Mono.fromFuture(transport.performRequestAsync(request, endpoint, transportOptions)); + } + + public Mono> scroll(Function> fn, + Class tDocumentClass) { + + Assert.notNull(fn, "fn must not be null"); + Assert.notNull(tDocumentClass, "tDocumentClass must not be null"); + + return scroll(fn.apply(new ScrollRequest.Builder()).build(), tDocumentClass); + } + + public Mono clearScroll(ClearScrollRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, ClearScrollRequest._ENDPOINT, transportOptions)); + } + + public Mono clearScroll( + Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return clearScroll(fn.apply(new ClearScrollRequest.Builder()).build()); + } + + /** + * @since 5.0 + */ + public Mono openPointInTime(OpenPointInTimeRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, OpenPointInTimeRequest._ENDPOINT, transportOptions)); + } + + /** + * @since 5.0 + */ + public Mono openPointInTime( + Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return openPointInTime(fn.apply(new OpenPointInTimeRequest.Builder()).build()); + } + + /** + * @since 5.0 + */ + public Mono closePointInTime(ClosePointInTimeRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, ClosePointInTimeRequest._ENDPOINT, transportOptions)); + } + + /** + * @since 5.0 + */ + public Mono closePointInTime( + Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return closePointInTime(fn.apply(new ClosePointInTimeRequest.Builder()).build()); + } + // endregion + + // region script api + /** + * @since 5.1 + */ + public Mono putScript(PutScriptRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, PutScriptRequest._ENDPOINT, transportOptions)); + } + + /** + * @since 5.1 + */ + public Mono putScript(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return putScript(fn.apply(new PutScriptRequest.Builder()).build()); + } + + /** + * @since 5.1 + */ + public Mono getScript(GetScriptRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, GetScriptRequest._ENDPOINT, transportOptions)); + } + + /** + * @since 5.1 + */ + public Mono getScript(Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return getScript(fn.apply(new GetScriptRequest.Builder()).build()); + } + + /** + * @since 5.1 + */ + public Mono deleteScript(DeleteScriptRequest request) { + + Assert.notNull(request, "request must not be null"); + + return Mono.fromFuture(transport.performRequestAsync(request, DeleteScriptRequest._ENDPOINT, transportOptions)); + } + + /** + * @since 5.1 + */ + public Mono deleteScript( + Function> fn) { + + Assert.notNull(fn, "fn must not be null"); + + return deleteScript(fn.apply(new DeleteScriptRequest.Builder()).build()); + } + // endregion + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchClusterClient.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchClusterClient.java new file mode 100644 index 0000000000..b90f0da967 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchClusterClient.java @@ -0,0 +1,107 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.ApiClient; +import co.elastic.clients.elasticsearch.cluster.DeleteComponentTemplateRequest; +import co.elastic.clients.elasticsearch.cluster.DeleteComponentTemplateResponse; +import co.elastic.clients.elasticsearch.cluster.ExistsComponentTemplateRequest; +import co.elastic.clients.elasticsearch.cluster.GetComponentTemplateRequest; +import co.elastic.clients.elasticsearch.cluster.GetComponentTemplateResponse; +import co.elastic.clients.elasticsearch.cluster.HealthRequest; +import co.elastic.clients.elasticsearch.cluster.HealthResponse; +import co.elastic.clients.elasticsearch.cluster.PutComponentTemplateRequest; +import co.elastic.clients.elasticsearch.cluster.PutComponentTemplateResponse; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.transport.endpoints.BooleanResponse; +import co.elastic.clients.util.ObjectBuilder; +import reactor.core.publisher.Mono; + +import java.util.function.Function; + +import org.jspecify.annotations.Nullable; + +/** + * Reactive version of the {@link co.elastic.clients.elasticsearch.cluster.ElasticsearchClusterClient} + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ReactiveElasticsearchClusterClient + extends ApiClient { + + public ReactiveElasticsearchClusterClient(ElasticsearchTransport transport, + @Nullable TransportOptions transportOptions) { + super(transport, transportOptions); + } + + @Override + public ReactiveElasticsearchClusterClient withTransportOptions(@Nullable TransportOptions transportOptions) { + return new ReactiveElasticsearchClusterClient(transport, transportOptions); + } + + public Mono health(HealthRequest healthRequest) { + return Mono.fromFuture(transport.performRequestAsync(healthRequest, HealthRequest._ENDPOINT, transportOptions)); + } + + public Mono health(Function> fn) { + return health(fn.apply(new HealthRequest.Builder()).build()); + } + + public Mono putComponentTemplate( + PutComponentTemplateRequest putComponentTemplateRequest) { + return Mono.fromFuture(transport.performRequestAsync(putComponentTemplateRequest, + PutComponentTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono putComponentTemplate( + Function> fn) { + return putComponentTemplate(fn.apply(new PutComponentTemplateRequest.Builder()).build()); + } + + public Mono getComponentTemplate( + GetComponentTemplateRequest getComponentTemplateRequest) { + return Mono.fromFuture(transport.performRequestAsync(getComponentTemplateRequest, + GetComponentTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono getComponentTemplate( + Function> fn) { + return getComponentTemplate(fn.apply(new GetComponentTemplateRequest.Builder()).build()); + } + + public Mono existsComponentTemplate(ExistsComponentTemplateRequest existsComponentTemplateRequest) { + return Mono.fromFuture(transport.performRequestAsync(existsComponentTemplateRequest, + ExistsComponentTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono existsComponentTemplate( + Function> fn) { + return existsComponentTemplate(fn.apply(new ExistsComponentTemplateRequest.Builder()).build()); + } + + public Mono deleteComponentTemplate( + DeleteComponentTemplateRequest deleteComponentTemplateRequest) { + return Mono.fromFuture(transport.performRequestAsync(deleteComponentTemplateRequest, + DeleteComponentTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono deleteComponentTemplate( + Function> fn) { + return deleteComponentTemplate(fn.apply(new DeleteComponentTemplateRequest.Builder()).build()); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchConfiguration.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchConfiguration.java new file mode 100644 index 0000000000..2506b59c01 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchConfiguration.java @@ -0,0 +1,130 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.json.jackson.JacksonJsonpMapper; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.transport.rest_client.RestClientOptions; + +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestClient; +import org.springframework.context.annotation.Bean; +import org.springframework.data.elasticsearch.client.ClientConfiguration; +import org.springframework.data.elasticsearch.config.ElasticsearchConfigurationSupport; +import org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.util.Assert; + +/** + * Base class for a @{@link org.springframework.context.annotation.Configuration} class to set up the Elasticsearch + * connection using the {@link ReactiveElasticsearchClient}. This class exposes different parts of the setup as Spring + * beans. Deriving * classes must provide the {@link ClientConfiguration} to use. + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public abstract class ReactiveElasticsearchConfiguration extends ElasticsearchConfigurationSupport { + + /** + * Must be implemented by deriving classes to provide the {@link ClientConfiguration}. + * + * @return configuration, must not be {@literal null} + */ + @Bean(name = "elasticsearchClientConfiguration") + public abstract ClientConfiguration clientConfiguration(); + + /** + * Provides the underlying low level RestClient. + * + * @param clientConfiguration configuration for the client, must not be {@literal null} + * @return RestClient + */ + @Bean + public RestClient elasticsearchRestClient(ClientConfiguration clientConfiguration) { + + Assert.notNull(clientConfiguration, "clientConfiguration must not be null"); + + return ElasticsearchClients.getRestClient(clientConfiguration); + } + + /** + * Provides the Elasticsearch transport to be used. The default implementation uses the {@link RestClient} bean and + * the {@link JsonpMapper} bean provided in this class. + * + * @return the {@link ElasticsearchTransport} + * @since 5.2 + */ + @Bean + public ElasticsearchTransport elasticsearchTransport(RestClient restClient, JsonpMapper jsonpMapper) { + + Assert.notNull(restClient, "restClient must not be null"); + Assert.notNull(jsonpMapper, "jsonpMapper must not be null"); + + return ElasticsearchClients.getElasticsearchTransport(restClient, ElasticsearchClients.REACTIVE_CLIENT, + transportOptions(), jsonpMapper); + } + + /** + * Provides the {@link ReactiveElasticsearchClient} instance used. + * + * @param transport the ElasticsearchTransport to use + * @return ReactiveElasticsearchClient instance. + */ + @Bean + public ReactiveElasticsearchClient reactiveElasticsearchClient(ElasticsearchTransport transport) { + + Assert.notNull(transport, "transport must not be null"); + + return ElasticsearchClients.createReactive(transport); + } + + /** + * Creates {@link ReactiveElasticsearchOperations}. + * + * @return never {@literal null}. + */ + @Bean(name = { "reactiveElasticsearchOperations", "reactiveElasticsearchTemplate" }) + public ReactiveElasticsearchOperations reactiveElasticsearchOperations(ElasticsearchConverter elasticsearchConverter, + ReactiveElasticsearchClient reactiveElasticsearchClient) { + + ReactiveElasticsearchTemplate template = new ReactiveElasticsearchTemplate(reactiveElasticsearchClient, + elasticsearchConverter); + template.setRefreshPolicy(refreshPolicy()); + + return template; + } + + /** + * Provides the JsonpMapper that is used in the {@link #elasticsearchTransport(RestClient, JsonpMapper)} method and + * exposes it as a bean. + * + * @return the {@link JsonpMapper} to use + * @since 5.2 + */ + @Bean + public JsonpMapper jsonpMapper() { + return new JacksonJsonpMapper(); + } + + /** + * @return the options that should be added to every request. Must not be {@literal null} + */ + public TransportOptions transportOptions() { + return new RestClientOptions(RequestOptions.DEFAULT, false).toBuilder().build(); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchIndicesClient.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchIndicesClient.java new file mode 100644 index 0000000000..3ebde9776b --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchIndicesClient.java @@ -0,0 +1,568 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.ApiClient; +import co.elastic.clients.elasticsearch.indices.*; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.transport.endpoints.BooleanResponse; +import co.elastic.clients.util.ObjectBuilder; +import reactor.core.publisher.Mono; + +import java.util.function.Function; + +import org.jspecify.annotations.Nullable; + +/** + * Reactive version of the {@link co.elastic.clients.elasticsearch.indices.ElasticsearchIndicesClient} + * + * @author Peter-Josef Meisch + * @since 4.4 + */ +public class ReactiveElasticsearchIndicesClient + extends ApiClient { + + public ReactiveElasticsearchIndicesClient(ElasticsearchTransport transport, + @Nullable TransportOptions transportOptions) { + super(transport, transportOptions); + } + + @Override + public ReactiveElasticsearchIndicesClient withTransportOptions(@Nullable TransportOptions transportOptions) { + return new ReactiveElasticsearchIndicesClient(transport, transportOptions); + } + + public Mono addBlock(AddBlockRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, AddBlockRequest._ENDPOINT, transportOptions)); + } + + public Mono addBlock(Function> fn) { + return addBlock(fn.apply(new AddBlockRequest.Builder()).build()); + } + + public Mono analyze(AnalyzeRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, AnalyzeRequest._ENDPOINT, transportOptions)); + } + + public Mono analyze(Function> fn) { + return analyze(fn.apply(new AnalyzeRequest.Builder()).build()); + } + + public Mono analyze() { + return analyze(builder -> builder); + } + + public Mono clearCache(ClearCacheRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ClearCacheRequest._ENDPOINT, transportOptions)); + } + + public Mono clearCache(Function> fn) { + return clearCache(fn.apply(new ClearCacheRequest.Builder()).build()); + } + + public Mono clearCache() { + return clearCache(builder -> builder); + } + + public Mono clone(CloneIndexRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, CloneIndexRequest._ENDPOINT, transportOptions)); + } + + public Mono clone(Function> fn) { + return clone(fn.apply(new CloneIndexRequest.Builder()).build()); + } + + public Mono close(CloseIndexRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, CloseIndexRequest._ENDPOINT, transportOptions)); + } + + public Mono close(Function> fn) { + return close(fn.apply(new CloseIndexRequest.Builder()).build()); + } + + public Mono create(CreateIndexRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, CreateIndexRequest._ENDPOINT, transportOptions)); + } + + public Mono create(Function> fn) { + return create(fn.apply(new CreateIndexRequest.Builder()).build()); + } + + public Mono createDataStream(CreateDataStreamRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, CreateDataStreamRequest._ENDPOINT, transportOptions)); + } + + public Mono createDataStream( + Function> fn) { + return createDataStream(fn.apply(new CreateDataStreamRequest.Builder()).build()); + } + + public Mono dataStreamsStats(DataStreamsStatsRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, DataStreamsStatsRequest._ENDPOINT, transportOptions)); + } + + public Mono dataStreamsStats( + Function> fn) { + return dataStreamsStats(fn.apply(new DataStreamsStatsRequest.Builder()).build()); + } + + public Mono dataStreamsStats() { + return dataStreamsStats(builder -> builder); + } + + public Mono delete(DeleteIndexRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, DeleteIndexRequest._ENDPOINT, transportOptions)); + } + + public Mono delete(Function> fn) { + return delete(fn.apply(new DeleteIndexRequest.Builder()).build()); + } + + public Mono deleteAlias(DeleteAliasRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, DeleteAliasRequest._ENDPOINT, transportOptions)); + } + + public Mono deleteAlias( + Function> fn) { + return deleteAlias(fn.apply(new DeleteAliasRequest.Builder()).build()); + } + + public Mono deleteDataStream(DeleteDataStreamRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, DeleteDataStreamRequest._ENDPOINT, transportOptions)); + } + + public Mono deleteDataStream( + Function> fn) { + return deleteDataStream(fn.apply(new DeleteDataStreamRequest.Builder()).build()); + } + + public Mono deleteIndexTemplate(DeleteIndexTemplateRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, DeleteIndexTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono deleteIndexTemplate( + Function> fn) { + return deleteIndexTemplate(fn.apply(new DeleteIndexTemplateRequest.Builder()).build()); + } + + public Mono deleteTemplate(DeleteTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, DeleteTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono deleteTemplate( + Function> fn) { + return deleteTemplate(fn.apply(new DeleteTemplateRequest.Builder()).build()); + } + + public Mono diskUsage(DiskUsageRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, DiskUsageRequest._ENDPOINT, transportOptions)); + } + + public Mono diskUsage(Function> fn) { + return diskUsage(fn.apply(new DiskUsageRequest.Builder()).build()); + } + + public Mono exists(ExistsRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ExistsRequest._ENDPOINT, transportOptions)); + } + + public Mono exists(Function> fn) { + return exists(fn.apply(new ExistsRequest.Builder()).build()); + } + + public Mono existsAlias(ExistsAliasRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ExistsAliasRequest._ENDPOINT, transportOptions)); + } + + public Mono existsAlias(Function> fn) { + return existsAlias(fn.apply(new ExistsAliasRequest.Builder()).build()); + } + + public Mono existsIndexTemplate(ExistsIndexTemplateRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, ExistsIndexTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono existsIndexTemplate( + Function> fn) { + return existsIndexTemplate(fn.apply(new ExistsIndexTemplateRequest.Builder()).build()); + } + + public Mono existsTemplate(ExistsTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ExistsTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono existsTemplate( + Function> fn) { + return existsTemplate(fn.apply(new ExistsTemplateRequest.Builder()).build()); + } + + public Mono flush(FlushRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, FlushRequest._ENDPOINT, transportOptions)); + } + + public Mono flush(Function> fn) { + return flush(fn.apply(new FlushRequest.Builder()).build()); + } + + public Mono flush() { + return flush(builder -> builder); + } + + @SuppressWarnings("SpellCheckingInspection") + public Mono forcemerge(ForcemergeRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ForcemergeRequest._ENDPOINT, transportOptions)); + } + + @SuppressWarnings("SpellCheckingInspection") + public Mono forcemerge(Function> fn) { + return forcemerge(fn.apply(new ForcemergeRequest.Builder()).build()); + } + + @SuppressWarnings("SpellCheckingInspection") + public Mono forcemerge() { + return forcemerge(builder -> builder); + } + + public Mono get(GetIndexRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, GetIndexRequest._ENDPOINT, transportOptions)); + } + + public Mono get(Function> fn) { + return get(fn.apply(new GetIndexRequest.Builder()).build()); + } + + public Mono getAlias(GetAliasRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, GetAliasRequest._ENDPOINT, transportOptions)); + } + + public Mono getAlias(Function> fn) { + return getAlias(fn.apply(new GetAliasRequest.Builder()).build()); + } + + public Mono getAlias() { + return getAlias(builder -> builder); + } + + public Mono getDataStream(GetDataStreamRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, GetDataStreamRequest._ENDPOINT, transportOptions)); + } + + public Mono getDataStream( + Function> fn) { + return getDataStream(fn.apply(new GetDataStreamRequest.Builder()).build()); + } + + public Mono getDataStream() { + return getDataStream(builder -> builder); + } + + public Mono getFieldMapping(GetFieldMappingRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, GetFieldMappingRequest._ENDPOINT, transportOptions)); + } + + public Mono getFieldMapping( + Function> fn) { + return getFieldMapping(fn.apply(new GetFieldMappingRequest.Builder()).build()); + } + + public Mono getIndexTemplate(GetIndexTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, GetIndexTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono getIndexTemplate( + Function> fn) { + return getIndexTemplate(fn.apply(new GetIndexTemplateRequest.Builder()).build()); + } + + public Mono getIndexTemplate() { + return getIndexTemplate(builder -> builder); + } + + public Mono getMapping(GetMappingRequest getMappingRequest) { + return Mono + .fromFuture(transport.performRequestAsync(getMappingRequest, GetMappingRequest._ENDPOINT, transportOptions)); + } + + public Mono getMapping(Function> fn) { + return getMapping(fn.apply(new GetMappingRequest.Builder()).build()); + } + + public Mono getMapping() { + return getMapping(builder -> builder); + } + + public Mono getSettings(GetIndicesSettingsRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, GetIndicesSettingsRequest._ENDPOINT, transportOptions)); + } + + public Mono getSettings( + Function> fn) { + return getSettings(fn.apply(new GetIndicesSettingsRequest.Builder()).build()); + } + + public Mono getSettings() { + return getSettings(builder -> builder); + } + + public Mono getTemplate(GetTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, GetTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono getTemplate( + Function> fn) { + return getTemplate(fn.apply(new GetTemplateRequest.Builder()).build()); + } + + public Mono getTemplate() { + return getTemplate(builder -> builder); + } + + public Mono migrateToDataStream(MigrateToDataStreamRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, MigrateToDataStreamRequest._ENDPOINT, transportOptions)); + } + + public Mono migrateToDataStream( + Function> fn) { + return migrateToDataStream(fn.apply(new MigrateToDataStreamRequest.Builder()).build()); + } + + public Mono open(OpenRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, OpenRequest._ENDPOINT, transportOptions)); + } + + public Mono open(Function> fn) { + return open(fn.apply(new OpenRequest.Builder()).build()); + } + + public Mono promoteDataStream(PromoteDataStreamRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, PromoteDataStreamRequest._ENDPOINT, transportOptions)); + } + + public Mono promoteDataStream( + Function> fn) { + return promoteDataStream(fn.apply(new PromoteDataStreamRequest.Builder()).build()); + } + + public Mono putAlias(PutAliasRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, PutAliasRequest._ENDPOINT, transportOptions)); + } + + public Mono putAlias(Function> fn) { + return putAlias(fn.apply(new PutAliasRequest.Builder()).build()); + } + + public Mono putIndexTemplate(PutIndexTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, PutIndexTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono putIndexTemplate( + Function> fn) { + return putIndexTemplate(fn.apply(new PutIndexTemplateRequest.Builder()).build()); + } + + public Mono putMapping(PutMappingRequest putMappingRequest) { + return Mono + .fromFuture(transport.performRequestAsync(putMappingRequest, PutMappingRequest._ENDPOINT, transportOptions)); + } + + public Mono putMapping(Function> fn) { + return putMapping(fn.apply(new PutMappingRequest.Builder()).build()); + } + + public Mono putSettings(PutIndicesSettingsRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, PutIndicesSettingsRequest._ENDPOINT, transportOptions)); + } + + public Mono putSettings( + Function> fn) { + return putSettings(fn.apply(new PutIndicesSettingsRequest.Builder()).build()); + } + + public Mono putSettings() { + return putSettings(builder -> builder); + } + + public Mono putTemplate(PutTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, PutTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono putTemplate( + Function> fn) { + return putTemplate(fn.apply(new PutTemplateRequest.Builder()).build()); + } + + public Mono recovery(RecoveryRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, RecoveryRequest._ENDPOINT, transportOptions)); + } + + public Mono recovery(Function> fn) { + return recovery(fn.apply(new RecoveryRequest.Builder()).build()); + } + + public Mono recovery() { + return recovery(builder -> builder); + } + + public Mono refresh(RefreshRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, RefreshRequest._ENDPOINT, transportOptions)); + } + + public Mono refresh(Function> fn) { + return refresh(fn.apply(new RefreshRequest.Builder()).build()); + } + + public Mono refresh() { + return refresh(builder -> builder); + } + + public Mono reloadSearchAnalyzers(ReloadSearchAnalyzersRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, ReloadSearchAnalyzersRequest._ENDPOINT, transportOptions)); + } + + public Mono reloadSearchAnalyzers( + Function> fn) { + return reloadSearchAnalyzers(fn.apply(new ReloadSearchAnalyzersRequest.Builder()).build()); + } + + public Mono resolveIndex(ResolveIndexRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ResolveIndexRequest._ENDPOINT, transportOptions)); + } + + public Mono resolveIndex( + Function> fn) { + return resolveIndex(fn.apply(new ResolveIndexRequest.Builder()).build()); + } + + public Mono rollover(RolloverRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, RolloverRequest._ENDPOINT, transportOptions)); + } + + public Mono rollover(Function> fn) { + return rollover(fn.apply(new RolloverRequest.Builder()).build()); + } + + public Mono segments(SegmentsRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, SegmentsRequest._ENDPOINT, transportOptions)); + } + + public Mono segments(Function> fn) { + return segments(fn.apply(new SegmentsRequest.Builder()).build()); + } + + public Mono segments() { + return segments(builder -> builder); + } + + public Mono shardStores(ShardStoresRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ShardStoresRequest._ENDPOINT, transportOptions)); + } + + public Mono shardStores( + Function> fn) { + return shardStores(fn.apply(new ShardStoresRequest.Builder()).build()); + } + + public Mono shardStores() { + return shardStores(builder -> builder); + } + + public Mono shrink(ShrinkRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ShrinkRequest._ENDPOINT, transportOptions)); + } + + public Mono shrink(Function> fn) { + return shrink(fn.apply(new ShrinkRequest.Builder()).build()); + } + + public Mono simulateIndexTemplate(SimulateIndexTemplateRequest request) { + return Mono + .fromFuture(transport.performRequestAsync(request, SimulateIndexTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono simulateIndexTemplate( + Function> fn) { + return simulateIndexTemplate(fn.apply(new SimulateIndexTemplateRequest.Builder()).build()); + } + + public Mono simulateTemplate(SimulateTemplateRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, SimulateTemplateRequest._ENDPOINT, transportOptions)); + } + + public Mono simulateTemplate( + Function> fn) { + return simulateTemplate(fn.apply(new SimulateTemplateRequest.Builder()).build()); + } + + public Mono simulateTemplate() { + return simulateTemplate(builder -> builder); + } + + public Mono split(SplitRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, SplitRequest._ENDPOINT, transportOptions)); + } + + public Mono split(Function> fn) { + return split(fn.apply(new SplitRequest.Builder()).build()); + } + + public Mono stats(IndicesStatsRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, IndicesStatsRequest._ENDPOINT, transportOptions)); + } + + public Mono stats( + Function> fn) { + return stats(fn.apply(new IndicesStatsRequest.Builder()).build()); + } + + public Mono stats() { + return stats(builder -> builder); + } + + public Mono updateAliases(UpdateAliasesRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, UpdateAliasesRequest._ENDPOINT, transportOptions)); + } + + public Mono updateAliases( + Function> fn) { + return updateAliases(fn.apply(new UpdateAliasesRequest.Builder()).build()); + } + + public Mono updateAliases() { + return updateAliases(builder -> builder); + } + + public Mono validateQuery(ValidateQueryRequest request) { + return Mono.fromFuture(transport.performRequestAsync(request, ValidateQueryRequest._ENDPOINT, transportOptions)); + } + + public Mono validateQuery( + Function> fn) { + return validateQuery(fn.apply(new ValidateQueryRequest.Builder()).build()); + } + + public Mono validateQuery() { + return validateQuery(builder -> builder); + } + +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchSqlClient.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchSqlClient.java new file mode 100644 index 0000000000..c14bb48657 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchSqlClient.java @@ -0,0 +1,72 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import co.elastic.clients.ApiClient; +import co.elastic.clients.elasticsearch._types.ElasticsearchException; +import co.elastic.clients.elasticsearch.sql.QueryRequest; +import co.elastic.clients.elasticsearch.sql.QueryResponse; +import co.elastic.clients.transport.ElasticsearchTransport; +import co.elastic.clients.transport.TransportOptions; +import co.elastic.clients.util.ObjectBuilder; +import reactor.core.publisher.Mono; + +import java.io.IOException; +import java.util.function.Function; + +import org.jetbrains.annotations.Nullable; + +/** + * Reactive version of {@link co.elastic.clients.elasticsearch.sql.ElasticsearchSqlClient}. + * + * @author Aouichaoui Youssef + * @since 5.4 + */ +public class ReactiveElasticsearchSqlClient extends ApiClient { + public ReactiveElasticsearchSqlClient(ElasticsearchTransport transport, @Nullable TransportOptions transportOptions) { + super(transport, transportOptions); + } + + @Override + public ReactiveElasticsearchSqlClient withTransportOptions(@Nullable TransportOptions transportOptions) { + return new ReactiveElasticsearchSqlClient(transport, transportOptions); + } + + /** + * Executes a SQL request + * + * @param fn a function that initializes a builder to create the {@link QueryRequest}. + */ + public final Mono query(Function> fn) + throws IOException, ElasticsearchException { + return query(fn.apply(new QueryRequest.Builder()).build()); + } + + /** + * Executes a SQL request. + */ + public Mono query(QueryRequest query) { + return Mono.fromFuture(transport.performRequestAsync(query, QueryRequest._ENDPOINT, transportOptions)); + } + + /** + * Executes a SQL request. + */ + public Mono query() { + return Mono.fromFuture( + transport.performRequestAsync(new QueryRequest.Builder().build(), QueryRequest._ENDPOINT, transportOptions)); + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java new file mode 100644 index 0000000000..a98e41ab90 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java @@ -0,0 +1,682 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.client.elc; + +import static co.elastic.clients.util.ApiTypeHelper.*; +import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*; + +import co.elastic.clients.elasticsearch._types.Result; +import co.elastic.clients.elasticsearch.core.*; +import co.elastic.clients.elasticsearch.core.bulk.BulkResponseItem; +import co.elastic.clients.elasticsearch.core.search.ResponseBody; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.transport.Version; +import co.elastic.clients.transport.endpoints.BooleanResponse; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; + +import java.time.Duration; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort; +import org.springframework.data.elasticsearch.BulkFailureException; +import org.springframework.data.elasticsearch.NoSuchIndexException; +import org.springframework.data.elasticsearch.UncategorizedElasticsearchException; +import org.springframework.data.elasticsearch.client.UnsupportedBackendOperation; +import org.springframework.data.elasticsearch.core.AbstractReactiveElasticsearchTemplate; +import org.springframework.data.elasticsearch.core.AggregationContainer; +import org.springframework.data.elasticsearch.core.IndexedObjectInformation; +import org.springframework.data.elasticsearch.core.MultiGetItem; +import org.springframework.data.elasticsearch.core.ReactiveIndexOperations; +import org.springframework.data.elasticsearch.core.cluster.ReactiveClusterOperations; +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.data.elasticsearch.core.document.Document; +import org.springframework.data.elasticsearch.core.document.SearchDocument; +import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse; +import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates; +import org.springframework.data.elasticsearch.core.query.*; +import org.springframework.data.elasticsearch.core.query.UpdateResponse; +import org.springframework.data.elasticsearch.core.reindex.ReindexRequest; +import org.springframework.data.elasticsearch.core.reindex.ReindexResponse; +import org.springframework.data.elasticsearch.core.script.Script; +import org.springframework.data.elasticsearch.core.sql.SqlResponse; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations} using the new + * Elasticsearch client. + * + * @author Peter-Josef Meisch + * @author Illia Ulianov + * @author Junghoon Ban + * @since 4.4 + */ +public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearchTemplate { + + private static final Log LOGGER = LogFactory.getLog(ReactiveElasticsearchTemplate.class); + + private final ReactiveElasticsearchClient client; + private final ReactiveElasticsearchSqlClient sqlClient; + private final RequestConverter requestConverter; + private final ResponseConverter responseConverter; + private final JsonpMapper jsonpMapper; + private final ElasticsearchExceptionTranslator exceptionTranslator; + + public ReactiveElasticsearchTemplate(ReactiveElasticsearchClient client, ElasticsearchConverter converter) { + super(converter); + + Assert.notNull(client, "client must not be null"); + + this.client = client; + this.sqlClient = client.sql(); + this.jsonpMapper = client._transport().jsonpMapper(); + requestConverter = new RequestConverter(converter, jsonpMapper); + responseConverter = new ResponseConverter(jsonpMapper); + exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); + } + + // region Document operations + @Override + protected Mono> doIndex(T entity, IndexCoordinates index) { + + IndexRequest indexRequest = requestConverter.documentIndexRequest(getIndexQuery(entity), index, + getRefreshPolicy()); + return Mono.just(entity) // + .zipWith(// + Mono.from(execute(client -> client.index(indexRequest))) // + .map(indexResponse -> new IndexResponseMetaData(indexResponse.id(), // + indexResponse.index(), // + indexResponse.seqNo(), // + indexResponse.primaryTerm(), // + indexResponse.version() // + ))); + } + + @Override + public Flux saveAll(Mono> entitiesPublisher, IndexCoordinates index) { + + Assert.notNull(entitiesPublisher, "entitiesPublisher must not be null!"); + + return entitiesPublisher // + .flatMapMany(entities -> Flux.fromIterable(entities) // + .concatMap(entity -> maybeCallbackBeforeConvert(entity, index)) // + ).collectList() // + .map(Entities::new) // + .flatMapMany(entities -> { + + if (entities.isEmpty()) { + return Flux.empty(); + } + + return doBulkOperation(entities.indexQueries(), BulkOptions.defaultOptions(), index)// + .index() // + .flatMap(indexAndResponse -> { + T savedEntity = entities.entityAt(indexAndResponse.getT1()); + BulkResponseItem response = indexAndResponse.getT2(); + var updatedEntity = entityOperations.updateIndexedObject( + savedEntity, new IndexedObjectInformation( // + response.id(), // + response.index(), // + response.seqNo(), // + response.primaryTerm(), // + response.version()), + converter, + routingResolver); + return maybeCallbackAfterSave(updatedEntity, index); + }); + }); + } + + @Override + protected Mono doExists(String id, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(index, "index must not be null"); + + ExistsRequest existsRequest = requestConverter.documentExistsRequest(id, routingResolver.getRouting(), index); + + return Mono.from(execute( + ((ClientCallback>) client -> client.exists(existsRequest)))) + .map(BooleanResponse::value) // + .onErrorReturn(NoSuchIndexException.class, false); + } + + @Override + public Mono delete(DeleteQuery query, Class entityType, IndexCoordinates index) { + Assert.notNull(query, "query must not be null"); + + DeleteByQueryRequest request = requestConverter.documentDeleteByQueryRequest(query, routingResolver.getRouting(), + entityType, index, getRefreshPolicy()); + return Mono.from(execute(client -> client.deleteByQuery(request))).map(responseConverter::byQueryResponse); + } + + @Override + public Mono get(String id, Class entityType, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(entityType, "entityType must not be null"); + Assert.notNull(index, "index must not be null"); + + GetRequest getRequest = requestConverter.documentGetRequest(id, routingResolver.getRouting(), index); + + Mono> getResponse = Mono + .from(execute(client -> client.get(getRequest, EntityAsMap.class))); + + ReadDocumentCallback callback = new ReadDocumentCallback<>(converter, entityType, index); + return getResponse.flatMap(response -> callback.toEntity(DocumentAdapters.from(response))); + } + + @Override + public Mono reindex(ReindexRequest reindexRequest) { + + Assert.notNull(reindexRequest, "reindexRequest must not be null"); + + co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, + true); + + return Mono.from(execute( // + client -> client.reindex(reindexRequestES))).map(responseConverter::reindexResponse); + } + + @Override + public Mono submitReindex(ReindexRequest reindexRequest) { + + Assert.notNull(reindexRequest, "reindexRequest must not be null"); + + co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, + false); + + return Mono.from(execute( // + client -> client.reindex(reindexRequestES))) + .flatMap(response -> (response.task() == null) + ? Mono.error( + new UnsupportedBackendOperation("ElasticsearchClient did not return a task id on submit request")) + : Mono.just(response.task())); + } + + @Override + public Mono update(UpdateQuery updateQuery, IndexCoordinates index) { + + Assert.notNull(updateQuery, "UpdateQuery must not be null"); + Assert.notNull(index, "Index must not be null"); + + UpdateRequest request = requestConverter.documentUpdateRequest(updateQuery, index, getRefreshPolicy(), + routingResolver.getRouting()); + + return Mono.from(execute(client -> client.update(request, Document.class))).flatMap(response -> { + UpdateResponse.Result result = result(response.result()); + return result == null ? Mono.empty() : Mono.just(UpdateResponse.of(result)); + }); + } + + @Override + public Mono updateByQuery(UpdateQuery updateQuery, IndexCoordinates index) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public Mono bulkUpdate(List queries, BulkOptions bulkOptions, IndexCoordinates index) { + + Assert.notNull(queries, "List of UpdateQuery must not be null"); + Assert.notNull(bulkOptions, "BulkOptions must not be null"); + Assert.notNull(index, "Index must not be null"); + + return doBulkOperation(queries, bulkOptions, index).then(); + } + + private Flux doBulkOperation(List queries, BulkOptions bulkOptions, IndexCoordinates index) { + + BulkRequest bulkRequest = requestConverter.documentBulkRequest(queries, bulkOptions, index, getRefreshPolicy()); + return client.bulk(bulkRequest) + .onErrorMap(e -> new UncategorizedElasticsearchException("Error executing bulk request", e)) + .flatMap(this::checkForBulkOperationFailure) // + .flatMapMany(response -> Flux.fromIterable(response.items())); + + } + + private Mono checkForBulkOperationFailure(BulkResponse bulkResponse) { + + if (bulkResponse.errors()) { + Map failedDocuments = new HashMap<>(); + + for (BulkResponseItem item : bulkResponse.items()) { + + if (item.error() != null) { + failedDocuments.put(item.id(), new BulkFailureException.FailureDetails(item.status(), item.error().reason())); + } + } + BulkFailureException exception = new BulkFailureException( + "Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + + failedDocuments + ']', + failedDocuments); + return Mono.error(exception); + } else { + return Mono.just(bulkResponse); + } + } + + @Override + protected Mono doDeleteById(String id, @Nullable String routing, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(index, "index must not be null"); + + return Mono.defer(() -> { + DeleteRequest deleteRequest = requestConverter.documentDeleteRequest(id, routing, index, getRefreshPolicy()); + return doDelete(deleteRequest); + }); + } + + private Mono doDelete(DeleteRequest request) { + + return Mono.from(execute(client -> client.delete(request))) // + .flatMap(deleteResponse -> { + if (deleteResponse.result() == Result.NotFound) { + return Mono.empty(); + } + return Mono.just(deleteResponse.id()); + }).onErrorResume(NoSuchIndexException.class, it -> Mono.empty()); + } + + @Override + public Flux> multiGet(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + + MgetRequest request = requestConverter.documentMgetRequest(query, clazz, index); + + ReadDocumentCallback callback = new ReadDocumentCallback<>(converter, clazz, index); + + Publisher> response = execute(client -> client.mget(request, EntityAsMap.class)); + + return Mono.from(response)// + .flatMapMany(it -> Flux.fromIterable(DocumentAdapters.from(it))) // + .flatMap(multiGetItem -> { + if (multiGetItem.isFailed()) { + return Mono.just(MultiGetItem.of(null, multiGetItem.getFailure())); + } else { + return callback.toEntity(multiGetItem.getItem()) // + .map(t -> MultiGetItem.of(t, multiGetItem.getFailure())); + } + }); + } + + // endregion + + @Override + protected ReactiveElasticsearchTemplate doCopy() { + return new ReactiveElasticsearchTemplate(client, converter); + } + + // region search operations + + @Override + protected Flux doFind(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + Assert.notNull(index, "index must not be null"); + + if (query instanceof SearchTemplateQuery searchTemplateQuery) { + return Flux.defer(() -> doSearch(searchTemplateQuery, clazz, index)); + } else { + return Flux.defer(() -> { + boolean queryIsUnbounded = !(query.getPageable().isPaged() || query.isLimiting()); + return queryIsUnbounded ? doFindUnbounded(query, clazz, index) : doFindBounded(query, clazz, index); + }); + } + } + + private Flux doFindUnbounded(Query query, Class clazz, IndexCoordinates index) { + + if (query instanceof BaseQuery baseQuery) { + var pitKeepAlive = Duration.ofMinutes(5); + // setup functions for Flux.usingWhen() + Mono resourceSupplier = openPointInTime(index, pitKeepAlive, true) + .map(pit -> new PitSearchAfter(baseQuery, pit)); + + Function> asyncComplete = this::cleanupPit; + + BiFunction> asyncError = (psa, ex) -> { + if (LOGGER.isErrorEnabled()) { + LOGGER.error("Error during pit/search_after", ex); + } + return cleanupPit(psa); + }; + + Function> asyncCancel = psa -> { + if (LOGGER.isWarnEnabled()) { + LOGGER.warn("pit/search_after was cancelled"); + } + return cleanupPit(psa); + }; + + Function>> resourceClosure = psa -> { + + baseQuery.setPointInTime(new Query.PointInTime(psa.getPit(), pitKeepAlive)); + + // only add _shard_doc if there is not a field_collapse and a sort with the same name + boolean addShardDoc = true; + + if (query instanceof NativeQuery nativeQuery && nativeQuery.getFieldCollapse() != null) { + var field = nativeQuery.getFieldCollapse().field(); + + if (nativeQuery.getSortOptions().stream() + .anyMatch(sortOptions -> sortOptions.isField() && sortOptions.field().field().equals(field))) { + addShardDoc = false; + } + + if (query.getSort() != null + && query.getSort().stream().anyMatch(order -> order.getProperty().equals(field))) { + addShardDoc = false; + } + } + + if (addShardDoc) { + baseQuery.addSort(Sort.by("_shard_doc")); + } + + SearchRequest firstSearchRequest = requestConverter.searchRequest(baseQuery, routingResolver.getRouting(), + clazz, index, false, true); + + return Mono.from(execute(client -> client.search(firstSearchRequest, EntityAsMap.class))) + .expand(entityAsMapSearchResponse -> { + + var hits = entityAsMapSearchResponse.hits().hits(); + if (CollectionUtils.isEmpty(hits)) { + return Mono.empty(); + } + + List sortOptions = hits.get(hits.size() - 1).sort().stream().map(TypeUtils::toObject) + .collect(Collectors.toList()); + baseQuery.setSearchAfter(sortOptions); + SearchRequest followSearchRequest = requestConverter.searchRequest(baseQuery, + routingResolver.getRouting(), clazz, index, false, true); + return Mono.from(execute(client -> client.search(followSearchRequest, EntityAsMap.class))); + }); + + }; + + Flux> searchResponses = Flux.usingWhen(resourceSupplier, resourceClosure, asyncComplete, + asyncError, asyncCancel); + return searchResponses.flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) + .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); + } else { + return Flux.error(new IllegalArgumentException("Query must be derived from BaseQuery")); + } + } + + private Publisher cleanupPit(PitSearchAfter psa) { + var baseQuery = psa.getBaseQuery(); + baseQuery.setPointInTime(null); + baseQuery.setSearchAfter(null); + baseQuery.setSort(psa.getSort()); + var pit = psa.getPit(); + return StringUtils.hasText(pit) ? closePointInTime(pit) : Mono.empty(); + } + + static private class PitSearchAfter { + private final BaseQuery baseQuery; + @Nullable private final Sort sort; + private final String pit; + + PitSearchAfter(BaseQuery baseQuery, String pit) { + this.baseQuery = baseQuery; + this.sort = baseQuery.getSort(); + this.pit = pit; + } + + public BaseQuery getBaseQuery() { + return baseQuery; + } + + @Nullable + public Sort getSort() { + return sort; + } + + public String getPit() { + return pit; + } + } + + @Override + protected Mono doCount(Query query, Class entityType, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(index, "index must not be null"); + + SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), entityType, index, + true); + + return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) + .map(searchResponse -> searchResponse.hits().total() != null ? searchResponse.hits().total().value() : 0L); + } + + private Flux doFindBounded(Query query, Class clazz, IndexCoordinates index) { + + SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, + false, false); + + return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) // + .flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) // + .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); + } + + private Flux doSearch(SearchTemplateQuery query, Class clazz, IndexCoordinates index) { + + var request = requestConverter.searchTemplate(query, routingResolver.getRouting(), index); + + return Mono.from(execute(client -> client.searchTemplate(request, EntityAsMap.class))) // + .flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) // + .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); + } + + @Override + protected Mono doFindForResponse(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(index, "index must not be null"); + + SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, + false); + + // noinspection unchecked + SearchDocumentCallback callback = new ReadSearchDocumentCallback<>((Class) clazz, index); + SearchDocumentResponse.EntityCreator entityCreator = searchDocument -> callback.toEntity(searchDocument) + .toFuture(); + + return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) + .map(searchResponse -> SearchDocumentResponseBuilder.from(searchResponse, entityCreator, jsonpMapper)); + } + + @Override + public Flux> aggregate(Query query, Class entityType, IndexCoordinates index) { + + return doFindForResponse(query, entityType, index).flatMapMany(searchDocumentResponse -> { + ElasticsearchAggregations aggregations = (ElasticsearchAggregations) searchDocumentResponse.getAggregations(); + return aggregations == null ? Flux.empty() : Flux.fromIterable(aggregations.aggregations()); + }); + } + + @Override + public Mono openPointInTime(IndexCoordinates index, Duration keepAlive, Boolean ignoreUnavailable) { + + Assert.notNull(index, "index must not be null"); + Assert.notNull(keepAlive, "keepAlive must not be null"); + Assert.notNull(ignoreUnavailable, "ignoreUnavailable must not be null"); + + var request = requestConverter.searchOpenPointInTimeRequest(index, keepAlive, ignoreUnavailable); + return Mono.from(execute(client -> client.openPointInTime(request))).map(OpenPointInTimeResponse::id); + } + + @Override + public Mono closePointInTime(String pit) { + + Assert.notNull(pit, "pit must not be null"); + + ClosePointInTimeRequest request = requestConverter.searchClosePointInTime(pit); + return Mono.from(execute(client -> client.closePointInTime(request))).map(ClosePointInTimeResponse::succeeded); + } + + // endregion + + // region script operations + @Override + public Mono putScript(Script script) { + + Assert.notNull(script, "script must not be null"); + + var request = requestConverter.scriptPut(script); + return Mono.from(execute(client -> client.putScript(request))).map(PutScriptResponse::acknowledged); + } + + @Override + public Mono