diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 4ac6144ab2..c1d8714607 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,12 +1,17 @@
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
-- [ ] There is a ticket in the bug tracker for the project in our [issue tracker](https://github.com/spring-projects/spring-data-elasticsearch/issues).
+- [ ] **There is a ticket in the bug tracker for the project in our [issue tracker](https://github.com/spring-projects/spring-data-elasticsearch/issues)**. Add the issue number to the _Closes #issue-number_ line below
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
- [ ] You submit test cases (unit or integration tests) that back your changes.
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
+
+Closes #issue-number
diff --git a/.gitignore b/.gitignore
index 26be4183a9..449f58ea44 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,3 +20,17 @@ target
*.ipr
*.iws
.idea
+/.env
+
+
+/zap.env
+/localdocker.env
+.localdocker-env
+
+build/
+node_modules
+node
+package-lock.json
+
+.mvn/.develocity
+/src/test/resources/testcontainers-local.properties
diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml
new file mode 100644
index 0000000000..e0857eaa25
--- /dev/null
+++ b/.mvn/extensions.xml
@@ -0,0 +1,8 @@
+
+
+
+ io.spring.develocity.conventions
+ develocity-conventions-maven-extension
+ 0.0.22
+
+
diff --git a/.mvn/jvm.config b/.mvn/jvm.config
new file mode 100644
index 0000000000..e27f6e8f5e
--- /dev/null
+++ b/.mvn/jvm.config
@@ -0,0 +1,14 @@
+--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED
+--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
+--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED
+--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED
+--add-opens=java.base/java.util=ALL-UNNAMED
+--add-opens=java.base/java.lang.reflect=ALL-UNNAMED
+--add-opens=java.base/java.text=ALL-UNNAMED
+--add-opens=java.desktop/java.awt.font=ALL-UNNAMED
diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java
new file mode 100644
index 0000000000..64a46202ac
--- /dev/null
+++ b/.mvn/wrapper/MavenWrapperDownloader.java
@@ -0,0 +1,115 @@
+
+/*
+ * Copyright 2007-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.net.*;
+import java.io.*;
+import java.nio.channels.*;
+import java.util.Properties;
+
+public class MavenWrapperDownloader {
+
+ private static final String WRAPPER_VERSION = "0.5.6";
+ /**
+ * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
+ */
+ private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
+
+ /**
+ * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to use instead of the default
+ * one.
+ */
+ private static final String MAVEN_WRAPPER_PROPERTIES_PATH = ".mvn/wrapper/maven-wrapper.properties";
+
+ /**
+ * Path where the maven-wrapper.jar will be saved to.
+ */
+ private static final String MAVEN_WRAPPER_JAR_PATH = ".mvn/wrapper/maven-wrapper.jar";
+
+ /**
+ * Name of the property which should be used to override the default download url for the wrapper.
+ */
+ private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
+
+ public static void main(String args[]) {
+ System.out.println("- Downloader started");
+ File baseDirectory = new File(args[0]);
+ System.out.println("- Using transport directory: " + baseDirectory.getAbsolutePath());
+
+ // If the maven-wrapper.properties exists, read it and check if it contains a custom
+ // wrapperUrl parameter.
+ File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
+ String url = DEFAULT_DOWNLOAD_URL;
+ if (mavenWrapperPropertyFile.exists()) {
+ FileInputStream mavenWrapperPropertyFileInputStream = null;
+ try {
+ mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
+ Properties mavenWrapperProperties = new Properties();
+ mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
+ url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
+ } catch (IOException e) {
+ System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
+ } finally {
+ try {
+ if (mavenWrapperPropertyFileInputStream != null) {
+ mavenWrapperPropertyFileInputStream.close();
+ }
+ } catch (IOException e) {
+ // Ignore ...
+ }
+ }
+ }
+ System.out.println("- Downloading from: " + url);
+
+ File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
+ if (!outputFile.getParentFile().exists()) {
+ if (!outputFile.getParentFile().mkdirs()) {
+ System.out.println("- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
+ }
+ }
+ System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
+ try {
+ downloadFileFromURL(url, outputFile);
+ System.out.println("Done");
+ System.exit(0);
+ } catch (Throwable e) {
+ System.out.println("- Error downloading");
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+
+ private static void downloadFileFromURL(String urlString, File destination) throws Exception {
+ if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
+ String username = System.getenv("MVNW_USERNAME");
+ char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
+ Authenticator.setDefault(new Authenticator() {
+ @Override
+ protected PasswordAuthentication getPasswordAuthentication() {
+ return new PasswordAuthentication(username, password);
+ }
+ });
+ }
+ URL website = new URL(urlString);
+ ReadableByteChannel rbc;
+ rbc = Channels.newChannel(website.openStream());
+ FileOutputStream fos = new FileOutputStream(destination);
+ fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+ fos.close();
+ rbc.close();
+ }
+
+}
diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar
old mode 100755
new mode 100644
index 01e6799737..2cc7d4a55c
Binary files a/.mvn/wrapper/maven-wrapper.jar and b/.mvn/wrapper/maven-wrapper.jar differ
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
old mode 100755
new mode 100644
index 00d32aab1d..e075a74d86
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -1 +1,3 @@
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
\ No newline at end of file
+#Thu Nov 07 09:47:28 CET 2024
+wrapperUrl=https\://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
+distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
diff --git a/CI.adoc b/CI.adoc
index 613add5162..56af9d15ee 100644
--- a/CI.adoc
+++ b/CI.adoc
@@ -1,6 +1,6 @@
= Continuous Integration
-image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2Fmaster&subject=2020.0.0%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/]
+image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2Fmain&subject=2020.0.0%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/]
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2F4.0.x&subject=Neumann%20(4.0.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/]
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2F3.2.x&subject=Moore%20(3.2.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/]
diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc
index de521df6e7..1cff01d255 100644
--- a/CONTRIBUTING.adoc
+++ b/CONTRIBUTING.adoc
@@ -1,6 +1,9 @@
= Spring Data contribution guidelines
-You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
+You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
+**Please read these carefully!**
+
+Do not submit a Pull Request before having created an issue and having discussed it. This prevents you from doing work that might be rejected.
== Running the test locally
diff --git a/Jenkinsfile b/Jenkinsfile
index 63fb2aebd1..1d2500ed1e 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,9 +1,15 @@
+def p = [:]
+node {
+ checkout scm
+ p = readProperties interpolate: true, file: 'ci/pipeline.properties'
+}
+
pipeline {
agent none
triggers {
pollSCM 'H/10 * * * *'
- upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
+ upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
}
options {
@@ -12,10 +18,11 @@ pipeline {
}
stages {
- stage("test: baseline (jdk8)") {
+ stage("test: baseline (main)") {
when {
+ beforeAgent(true)
anyOf {
- branch 'master'
+ branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
not { triggeredBy 'UpstreamCause' }
}
}
@@ -25,16 +32,16 @@ pipeline {
options { timeout(time: 30, unit: 'MINUTES') }
environment {
- DOCKER_HUB = credentials('hub.docker.com-springbuildmaster')
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}")
}
steps {
script {
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
- docker.image('adoptopenjdk/openjdk8:latest').inside('-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home') {
- sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}"
- sh 'PROFILE=none ci/verify.sh'
- sh "ci/clean.sh"
+ docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) {
+ docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) {
+ sh "PROFILE=none JENKINS_USER_NAME=${p['jenkins.user.name']} ci/verify.sh"
+ sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh"
}
}
}
@@ -43,52 +50,28 @@ pipeline {
stage("Test other configurations") {
when {
+ beforeAgent(true)
allOf {
- branch 'master'
+ branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
not { triggeredBy 'UpstreamCause' }
}
}
parallel {
- stage("test: baseline (jdk11)") {
+ stage("test: baseline (next)") {
agent {
label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
-
environment {
- DOCKER_HUB = credentials('hub.docker.com-springbuildmaster')
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}")
}
-
steps {
script {
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
- docker.image('adoptopenjdk/openjdk11:latest').inside('-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home') {
- sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}"
- sh 'PROFILE=java11 ci/verify.sh'
- sh "ci/clean.sh"
- }
- }
- }
- }
- }
-
- stage("test: baseline (jdk15)") {
- agent {
- label 'data'
- }
- options { timeout(time: 30, unit: 'MINUTES') }
-
- environment {
- DOCKER_HUB = credentials('hub.docker.com-springbuildmaster')
- }
-
- steps {
- script {
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
- docker.image('adoptopenjdk/openjdk15:latest').inside('-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home') {
- sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}"
- sh 'PROFILE=java11 ci/verify.sh'
- sh "ci/clean.sh"
+ docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) {
+ docker.image(p['docker.java.next.image']).inside(p['docker.java.inside.docker']) {
+ sh "PROFILE=none JENKINS_USER_NAME=${p['jenkins.user.name']} ci/verify.sh"
+ sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh"
}
}
}
@@ -99,8 +82,9 @@ pipeline {
stage('Release to artifactory') {
when {
+ beforeAgent(true)
anyOf {
- branch 'master'
+ branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
not { triggeredBy 'UpstreamCause' }
}
}
@@ -108,51 +92,25 @@ pipeline {
label 'data'
}
options { timeout(time: 20, unit: 'MINUTES') }
-
environment {
- ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
+ ARTIFACTORY = credentials("${p['artifactory.credentials']}")
+ DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}")
}
-
steps {
script {
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
- docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch-non-root ' +
- '-Dartifactory.server=https://repo.spring.io ' +
+ docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) {
+ docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) {
+ sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' +
+ "./mvnw -s settings.xml -Pci,artifactory " +
+ "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " +
+ "-Dartifactory.server=${p['artifactory.url']} " +
"-Dartifactory.username=${ARTIFACTORY_USR} " +
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
- "-Dartifactory.staging-repository=libs-snapshot-local " +
+ "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " +
"-Dartifactory.build-name=spring-data-elasticsearch " +
- "-Dartifactory.build-number=${BUILD_NUMBER} " +
- '-Dmaven.test.skip=true clean deploy -U -B'
- }
- }
- }
- }
- }
- stage('Publish documentation') {
- when {
- branch 'master'
- }
- agent {
- label 'data'
- }
- options { timeout(time: 20, unit: 'MINUTES') }
-
- environment {
- ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
- }
-
- steps {
- script {
- docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
- docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
- sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch-non-root ' +
- '-Dartifactory.server=https://repo.spring.io ' +
- "-Dartifactory.username=${ARTIFACTORY_USR} " +
- "-Dartifactory.password=${ARTIFACTORY_PSW} " +
- "-Dartifactory.distribution-repository=temp-private-local " +
- '-Dmaven.test.skip=true clean deploy -U -B'
+ "-Dartifactory.build-number=spring-data-elasticsearch-${BRANCH_NAME}-build-${BUILD_NUMBER} " +
+ "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch " +
+ "-Dmaven.test.skip=true clean deploy -U -B"
}
}
}
@@ -163,10 +121,6 @@ pipeline {
post {
changed {
script {
- slackSend(
- color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
- channel: '#spring-data-dev',
- message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
emailext(
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
mimeType: 'text/html',
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000000..ff77379631
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.adoc b/README.adoc
index f3a1e2674f..0242089d82 100644
--- a/README.adoc
+++ b/README.adoc
@@ -1,17 +1,17 @@
-image:https://spring.io/badges/spring-data-elasticsearch/ga.svg[Spring Data Elasticsearch,link=https://projects.spring.io/spring-data-elasticsearch#quick-start] image:https://spring.io/badges/spring-data-elasticsearch/snapshot.svg[Spring Data Elasticsearch,link=https://projects.spring.io/spring-data-elasticsearch#quick-start]
-
-= Spring Data for Elasticsearch image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
+= Spring Data for Elasticsearch image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-elasticsearch/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring%20Data%20Elasticsearch"]
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
-The Spring Data Elasticsearch project provides integration with the https://www.elastic.co/[Elasticsearch] search engine. Key functional areas of Spring Data Elasticsearch are a POJO centric model for interacting with a Elasticsearch Documents and easily writing a Repository style data access layer.
+The Spring Data Elasticsearch project provides integration with the https://www.elastic.co/[Elasticsearch] search engine.
+Key functional areas of Spring Data Elasticsearch are a POJO centric model for interacting with Elasticsearch Documents and easily writing a Repository style data access layer.
This project is lead and maintained by the community.
== Features
-* Spring configuration support using Java based `@Configuration` classes or an XML namespace for a ES clients instances.
-* `ElasticsearchRestTemplate` helper class that increases productivity performing common ES operations. Includes integrated object mapping between documents and POJOs.
+* Spring configuration support using Java based `@Configuration` classes or an XML namespace for an ES client instances.
+* `ElasticsearchOperations` class and implementations that increases productivity performing common ES operations.
+Includes integrated object mapping between documents and POJOs.
* Feature Rich Object Mapping integrated with Spring’s Conversion Service
* Annotation based mapping metadata
* Automatic implementation of `Repository` interfaces including support for custom search methods.
@@ -19,7 +19,9 @@ This project is lead and maintained by the community.
== Code of Conduct
-This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
+This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct].
+By participating, you are expected to uphold this code of conduct.
+Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
== Getting Started
@@ -58,52 +60,9 @@ public class MyService {
}
----
-=== Using Transport Client
-
-NOTE: Usage of the TransportClient is deprecated as of version 4.0, use RestClient instead.
-
-
-[source,java]
-----
-@Configuration
-public class TransportClientConfig extends ElasticsearchConfigurationSupport {
-
- @Bean
- public Client elasticsearchClient() throws UnknownHostException {
- Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build();
- TransportClient client = new PreBuiltTransportClient(settings);
- client.addTransportAddress(new TransportAddress(InetAddress.getByName("127.0.0.1"), 9300));
- return client;
- }
-
- @Bean(name = { "elasticsearchOperations", "elasticsearchTemplate" })
- public ElasticsearchTemplate elasticsearchTemplate() throws UnknownHostException {
- return new ElasticsearchTemplate(elasticsearchClient());
- }
-}
-----
-
=== Using the RestClient
-Provide a configuration like this:
-
-[source,java]
-----
-@Configuration
-public class RestClientConfig extends AbstractElasticsearchConfiguration {
-
- @Override
- @Bean
- public RestHighLevelClient elasticsearchClient() {
-
- final ClientConfiguration clientConfiguration = ClientConfiguration.builder()
- .connectedTo("localhost:9200")
- .build();
-
- return RestClients.create(clientConfiguration).rest();
- }
-}
-----
+Please check the https://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/#elasticsearch.clients.configuration[official documentation].
=== Maven configuration
@@ -114,13 +73,10 @@ Add the Maven dependency:
org.springframework.dataspring-data-elasticsearch
- ${version}.RELEASE
+ ${version}
----
-// NOTE: since Github does not support include directives, the content of
-// the src/main/asciidoc/reference/preface.adoc file is duplicated here
-// Always change both files!
**Compatibility Matrix**
The compatibility between Spring Data Elasticsearch, Elasticsearch client drivers and Spring Boot versions can be found in the https://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/#preface.versions[reference documentation].
@@ -136,9 +92,9 @@ To use the Release candidate versions of the upcoming major version, use our Mav
- spring-libs-snapshot
+ spring-snapshotSpring Snapshot Repository
- https://repo.spring.io/libs-milestone
+ https://repo.spring.io/milestone
----
@@ -149,44 +105,47 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
org.springframework.dataspring-data-elasticsearch
- ${version}.BUILD-SNAPSHOT
+ ${version}-SNAPSHOT
- spring-libs-snapshot
+ spring-snapshotSpring Snapshot Repository
- https://repo.spring.io/libs-snapshot
+ https://repo.spring.io/snapshot
----
== Getting Help
-Having trouble with Spring Data? We’d love to help!
+Having trouble with Spring Data?
+We’d love to help!
* Check the
https://docs.spring.io/spring-data/elasticsearch/docs/current/reference/html/[reference documentation], and https://docs.spring.io/spring-data/elasticsearch/docs/current/api/[Javadocs].
* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation.
If you are just starting out with Spring, try one of the https://spring.io/guides[guides].
-* If you are upgrading, check out the https://docs.spring.io/spring-data/elasticsearch/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features.
-* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-elasticsearch`].
-You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
+* Ask a question or chat with the community on https://app.gitter.im/#/room/#spring-projects_spring-data:gitter.im[Gitter].
* Report bugs with Spring Data for Elasticsearch at https://github.com/spring-projects/spring-data-elasticsearch/issues[https://github.com/spring-projects/spring-data-elasticsearch/issues].
== Reporting Issues
-Spring Data uses GitHub as issue tracking system to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below:
+Spring Data uses GitHub as issue tracking system to record bugs and feature requests.
+If you want to raise an issue, please follow the recommendations below:
* Before you log a bug, please search the
https://github.com/spring-projects/spring-data-elasticsearch/issues[issue tracker] to see if someone has already reported the problem.
-* If the issue doesn’t already exist, https://github.com/spring-projects/spring-data-elasticsearch/issues/new[create a new issue].
+* If the issue doesn't already exist, https://github.com/spring-projects/spring-data-elasticsearch/issues/new[create a new issue].
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data Elasticsearch that you are using and JVM version.
* If you need to paste code, or include a stack trace use Markdown +++```+++ escapes before and after your text.
-* If possible try to create a test-case or project that replicates the issue. Attach a link to your code or a compressed file containing your code.
+* If possible try to create a test-case or project that replicates the issue.
+Attach a link to your code or a compressed file containing your code.
== Building from Source
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
-You also need JDK 1.8.
+
+You need JDK 17 or above to build the _main_ branch.
+For the branches up to and including release 4.4, JDK 8 is required.
[source,bash]
----
@@ -197,12 +156,8 @@ If you want to build with the regular `mvn` command, you will need https://maven
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before submitting your first pull request._
-IMPORTANT: When contributing, please make sure an issue exists in https://github.com/spring-projects/spring-data-elasticsearch/issues[issue tracker] and comment on this issue with how you want to address it. By this we not only know that someone is working on an issue, we can also align architectural questions and possible solutions before work is invested
-. We
-so
-can prevent that much work is put into Pull Requests that have little
-or no chances of being merged.
-
+IMPORTANT: When contributing, please make sure an issue exists in https://github.com/spring-projects/spring-data-elasticsearch/issues[issue tracker] and comment on this issue with how you want to address it.
+By this we not only know that someone is working on an issue, we can also align architectural questions and possible solutions before work is invested . We so can prevent that much work is put into Pull Requests that have little or no chances of being merged.
=== Building reference documentation
@@ -210,14 +165,14 @@ Building the documentation builds also the project without running tests.
[source,bash]
----
- $ ./mvnw clean install -Pdistribute
+ $ ./mvnw clean install -Pantora
----
-The generated documentation is available from `target/site/reference/html/index.html`.
+The generated documentation is available from `target/site/index.html`.
== Examples
-For examples on using the Spring Data for Elasticsearch, see the https://github.com/spring-projects/spring-data-examples/tree/master/elasticsearch/example[spring-data-examples] project.
+For examples on using the Spring Data for Elasticsearch, see the https://github.com/spring-projects/spring-data-examples/tree/main/elasticsearch/example[spring-data-examples] project.
== License
diff --git a/TESTING.adoc b/TESTING.adoc
index 0f9b447040..f30c7efe34 100644
--- a/TESTING.adoc
+++ b/TESTING.adoc
@@ -18,24 +18,3 @@ is run. There must be _docker_ running, as the integration tests use docker to s
Integration tests are tests that have the Junit5 Tag `@Tag("integration-test")` on the test class. Normally this should not be set explicitly, but the annotation `@SpringIntegrationTest` should be used. This not only marks the test as integration test, but integrates an automatic setup of an Elasticsearch Testcontainer and integrate this with Spring, so
that the required Beans can be automatically injected. Check _src/test/java/org/springframework/data/elasticsearch/JUnit5SampleRestClientBasedTests.java_ as a reference setup
-
-== Mutation testing
-
-The pom includes a plugin dependency to run mutation tests using [pitest](https://pitest.org/). These tests must be explicitly configured and run, they are not included in the normal build steps. Before pitest can run, a normal `./mvnw test` must be executed. The configuration excludes integration tests, only unit tests are considered.
-
-
-pitest can be run directly from the commandline
-----
-./mvnw org.pitest:pitest-maven:mutationCoverage
-----
-This will output an html report to _target/pit-reports/YYYYMMDDHHMI_.
-
-To speed-up repeated analysis of the same codebase set the withHistory parameter to true.
-----
-./mvnw -DwithHistory org.pitest:pitest-maven:mutationCoverage
-----
-
-The classes to test are defined either in the pom.xml or can be set on the commandline:
-----
-./mvnw -DwithHistory org.pitest:pitest-maven:mutationCoverage -DtargetClasses="org.springframework.data.elasticsearch.support.*"
-----
diff --git a/ci/clean.sh b/ci/clean.sh
index 26f14033f5..ca174330ee 100755
--- a/ci/clean.sh
+++ b/ci/clean.sh
@@ -2,5 +2,7 @@
set -euo pipefail
-MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" \
- ./mvnw clean -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch
+export JENKINS_USER=${JENKINS_USER_NAME}
+
+MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \
+ ./mvnw -s settings.xml clean -Dscan=false -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root
diff --git a/ci/pipeline.properties b/ci/pipeline.properties
new file mode 100644
index 0000000000..cde4a8e881
--- /dev/null
+++ b/ci/pipeline.properties
@@ -0,0 +1,31 @@
+# Java versions
+java.main.tag=24.0.1_9-jdk-noble
+java.next.tag=24.0.1_9-jdk-noble
+
+# Docker container images - standard
+docker.java.main.image=library/eclipse-temurin:${java.main.tag}
+docker.java.next.image=library/eclipse-temurin:${java.next.tag}
+
+# Supported versions of MongoDB
+docker.mongodb.6.0.version=6.0.23
+docker.mongodb.7.0.version=7.0.20
+docker.mongodb.8.0.version=8.0.9
+
+# Supported versions of Redis
+docker.redis.6.version=6.2.13
+docker.redis.7.version=7.2.4
+
+# Docker environment settings
+docker.java.inside.basic=-v $HOME:/tmp/jenkins-home
+docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home
+
+# Credentials
+docker.registry=
+docker.credentials=hub.docker.com-springbuildmaster
+docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com
+docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token
+artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c
+artifactory.url=https://repo.spring.io
+artifactory.repository.snapshot=libs-snapshot-local
+develocity.access-key=gradle_enterprise_secret_access_key
+jenkins.user.name=spring-builds+jenkins
diff --git a/ci/verify.sh b/ci/verify.sh
index 744718c479..46afc80280 100755
--- a/ci/verify.sh
+++ b/ci/verify.sh
@@ -3,8 +3,8 @@
set -euo pipefail
mkdir -p /tmp/jenkins-home/.m2/spring-data-elasticsearch
-chown -R 1001:1001 .
+export JENKINS_USER=${JENKINS_USER_NAME}
-MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" \
- ./mvnw \
- -P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch
\ No newline at end of file
+MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \
+ ./mvnw -s settings.xml \
+ -P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root
diff --git a/mvnw b/mvnw
index 8b9da3b8b6..9091adf188 100755
--- a/mvnw
+++ b/mvnw
@@ -8,7 +8,7 @@
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
@@ -19,7 +19,7 @@
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
-# Maven2 Start Up Batch script
+# Maven Start Up Batch script
#
# Required ENV vars:
# ------------------
@@ -114,7 +114,6 @@ if $mingw ; then
M2_HOME="`(cd "$M2_HOME"; pwd)`"
[ -n "$JAVA_HOME" ] &&
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
- # TODO classpath?
fi
if [ -z "$JAVA_HOME" ]; then
@@ -163,7 +162,7 @@ fi
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
# traverses directory structure from process work directory to filesystem root
-# first directory with .mvn subdirectory is considered project base directory
+# first directory with .mvn subdirectory is considered project transport directory
find_maven_basedir() {
if [ -z "$1" ]
@@ -212,7 +211,11 @@ else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
fi
- jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
+ if [ -n "$MVNW_REPOURL" ]; then
+ jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+ else
+ jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+ fi
while IFS="=" read key value; do
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
esac
@@ -221,22 +224,38 @@ else
echo "Downloading from: $jarUrl"
fi
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
+ if $cygwin; then
+ wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
+ fi
if command -v wget > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found wget ... using wget"
fi
- wget "$jarUrl" -O "$wrapperJarPath"
+ if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+ wget "$jarUrl" -O "$wrapperJarPath"
+ else
+ wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
+ fi
elif command -v curl > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found curl ... using curl"
fi
- curl -o "$wrapperJarPath" "$jarUrl"
+ if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+ curl -o "$wrapperJarPath" "$jarUrl" -f
+ else
+ curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
+ fi
+
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Falling back to using Java to download"
fi
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
+ # For Cygwin, switch paths to Windows format before running javac
+ if $cygwin; then
+ javaClass=`cygpath --path --windows "$javaClass"`
+ fi
if [ -e "$javaClass" ]; then
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
@@ -277,6 +296,11 @@ if $cygwin; then
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
fi
+# Provide a "standardized" way to retrieve the CLI args that will
+# work with both Windows and non-Windows executions.
+MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
+export MAVEN_CMD_LINE_ARGS
+
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
exec "$JAVACMD" \
diff --git a/mvnw.cmd b/mvnw.cmd
old mode 100755
new mode 100644
index fef5a8f7f9..86115719e5
--- a/mvnw.cmd
+++ b/mvnw.cmd
@@ -7,7 +7,7 @@
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
-@REM https://www.apache.org/licenses/LICENSE-2.0
+@REM http://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@@ -18,7 +18,7 @@
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
-@REM Maven2 Start Up Batch script
+@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@@ -26,7 +26,7 @@
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
-@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@@ -37,7 +37,7 @@
@echo off
@REM set title of command window
title %0
-@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
+@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
@@ -120,23 +120,44 @@ SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
-set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
-FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
- IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+
+FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
+ IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
- echo Found %WRAPPER_JAR%
+ if "%MVNW_VERBOSE%" == "true" (
+ echo Found %WRAPPER_JAR%
+ )
) else (
- echo Couldn't find %WRAPPER_JAR%, downloading it ...
- echo Downloading from: %DOWNLOAD_URL%
- powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
- echo Finished downloading %WRAPPER_JAR%
+ if not "%MVNW_REPOURL%" == "" (
+ SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+ )
+ if "%MVNW_VERBOSE%" == "true" (
+ echo Couldn't find %WRAPPER_JAR%, downloading it ...
+ echo Downloading from: %DOWNLOAD_URL%
+ )
+
+ powershell -Command "&{"^
+ "$webclient = new-object System.Net.WebClient;"^
+ "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
+ "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
+ "}"^
+ "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
+ "}"
+ if "%MVNW_VERBOSE%" == "true" (
+ echo Finished downloading %WRAPPER_JAR%
+ )
)
@REM End of extension
+@REM Provide a "standardized" way to retrieve the CLI args that will
+@REM work with both Windows and non-Windows executions.
+set MAVEN_CMD_LINE_ARGS=%*
+
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
diff --git a/package.json b/package.json
new file mode 100644
index 0000000000..4689506b3f
--- /dev/null
+++ b/package.json
@@ -0,0 +1,10 @@
+{
+ "dependencies": {
+ "antora": "3.2.0-alpha.6",
+ "@antora/atlas-extension": "1.0.0-alpha.2",
+ "@antora/collector-extension": "1.0.0-alpha.7",
+ "@asciidoctor/tabs": "1.0.0-beta.6",
+ "@springio/antora-extensions": "1.13.0",
+ "@springio/asciidoctor-extensions": "1.0.0-alpha.11"
+ }
+}
diff --git a/pom.xml b/pom.xml
index 191786d373..4fcfd20c49 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,12 +5,12 @@
org.springframework.dataspring-data-elasticsearch
- 4.2.0-SNAPSHOT
+ 6.0.0-SNAPSHOTorg.springframework.data.buildspring-data-parent
- 2.5.0-SNAPSHOT
+ 4.0.0-SNAPSHOTSpring Data Elasticsearch
@@ -18,13 +18,26 @@
https://github.com/spring-projects/spring-data-elasticsearch
- 2.6
- 7.11.1
- 2.13.3
- 4.1.52.Final
- 2.5.0-SNAPSHOT
- 1.15.1
+ 4.0.0-SNAPSHOT
+
+
+ 9.0.2
+
+ 0.19.0
+ 2.23.1
+ 1.5.3
+ 1.20.0
+ 3.9.1
+
spring.data.elasticsearch
+
+
+ test
+ integration-test
@@ -74,30 +87,12 @@
https://github.com/spring-projects/spring-data-elasticsearch/issues
-
-
-
- io.netty
- netty-bom
- ${netty}
- pom
- import
-
-
-
-
org.springframeworkspring-context
-
-
- commons-logging
- commons-logging
-
-
@@ -119,90 +114,105 @@
true
-
- io.projectreactor.netty
- reactor-netty-http
- true
-
-
io.projectreactorreactor-testtest
-
- commons-lang
- commons-lang
- ${commonslang}
- test
+ co.elastic.clients
+ elasticsearch-java
+ ${elasticsearch-java}
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+
+ org.elasticsearch.client
+ elasticsearch-rest-client
+ ${elasticsearch-java}
+
+
+ commons-logging
+ commons-logging
+
+
-
- joda-time
- joda-time
- ${jodatime}
+ com.querydsl
+ querydsl-core
+ ${querydsl}true
-
+
- org.elasticsearch.client
- transport
- ${elasticsearch}
+ com.fasterxml.jackson.core
+ jackson-core
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
-
- org.elasticsearch.plugin
- transport-netty4-client
- ${elasticsearch}
+ javax.interceptor
+ javax.interceptor-api
+ 1.2.2
+ test
- org.elasticsearch.client
- elasticsearch-rest-high-level-client
- ${elasticsearch}
-
-
- commons-logging
- commons-logging
-
-
+ jakarta.enterprise
+ jakarta.enterprise.cdi-api
+ provided
+ true
- org.slf4j
- log4j-over-slf4j
- ${slf4j}
+ jakarta.annotation
+ jakarta.annotation-api
+ ${jakarta-annotation-api}test
- org.apache.logging.log4j
- log4j-core
- ${log4j}
+ org.apache.openwebbeans
+ openwebbeans-se
+ ${webbeans}test
-
+
- com.fasterxml.jackson.core
- jackson-core
+ org.jetbrains.kotlin
+ kotlin-stdlib
+ true
+
- com.fasterxml.jackson.core
- jackson-databind
+ org.jetbrains.kotlin
+ kotlin-reflect
+ true
-
- javax.enterprise
- cdi-api
- ${cdi}
- provided
+ org.jetbrains.kotlinx
+ kotlinx-coroutines-core
+ true
+
+
+
+ org.jetbrains.kotlinx
+ kotlinx-coroutines-reactortrue
@@ -219,41 +229,43 @@
+
+ org.jetbrains.kotlinx
+ kotlinx-coroutines-test
+ test
+ true
+
+
+
+ org.slf4j
+ log4j-over-slf4j
+ ${slf4j}
+ test
+ org.apache.logging.log4j
- log4j-to-slf4j
+ log4j-core${log4j}test
-
- org.apache.openwebbeans.test
- cditest-owb
- 1.2.8
+ org.apache.logging.log4j
+ log4j-to-slf4j
+ ${log4j}test
-
-
- org.apache.geronimo.specs
- geronimo-jcdi_1.0_spec
-
-
- org.apache.geronimo.specs
- geronimo-atinject_1.0_spec
-
- org.skyscreamerjsonassert
- 1.5.0
+ ${jsonassert}test
- com.github.tomakehurst
- wiremock-jre8
- 2.26.3
+ org.wiremock
+ wiremock
+ ${wiremock}test
@@ -271,7 +283,7 @@
io.spectohoverfly-java-junit5
- 0.13.1
+ ${hoverfly}test
@@ -304,6 +316,21 @@
test
+
+
+ commons-codec
+ commons-codec
+ 1.15
+ test
+
+
+
+ com.tngtech.archunit
+ archunit-junit5
+ ${archunit}
+ test
+
+
@@ -325,9 +352,6 @@
-
org.apache.maven.pluginsmaven-surefire-plugin
@@ -346,7 +370,7 @@
default-test
- test
+ ${mvn.unit-test.goal}test
@@ -354,55 +378,45 @@
integration-test
-
+
- integration-test
- integration-test
+ integration-test-elasticsearch
+ ${mvn.integration-test-elasticsearch.goal}testintegration-test
+
+ elasticsearch
+
-
- org.pitest
- pitest-maven
- 1.5.2
-
-
- org.pitest
- pitest-junit5-plugin
- 0.12
-
-
-
- integration-test
-
- org.springframework.data.elasticsearch.core.geo.*
-
- toString
-
-
-
org.apache.maven.pluginsmaven-assembly-plugin
- org.asciidoctor
- asciidoctor-maven-plugin
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+
+ org.apache.logging.log4j
+ log4j-core
+ ${log4j}
+
+
+
-
ci
-
@@ -419,34 +433,56 @@
**/*
- .git/**/*,target/**/*,**/target/**/*,.idea/**/*,**/spring.schemas,**/*.svg,mvnw,mvnw.cmd,**/*.policy
+
+ .git/**/*,target/**/*,**/target/**/*,.idea/**/*,**/spring.schemas,**/*.svg,mvnw,mvnw.cmd,**/*.policy
+ ./
+
+
+ antora-process-resources
+
+
+
+ src/main/antora/resources/antora-resources
+ true
+
+
+
+
+ antora
+
+
+
+ org.antora
+ antora-maven-plugin
+
+
+
+
- spring-libs-snapshot
- https://repo.spring.io/libs-snapshot
+ spring-snapshot
+ https://repo.spring.io/snapshot
+
+ true
+
+
+ false
+
+
+
+ spring-milestone
+ https://repo.spring.io/milestone
-
-
- spring-plugins-release
- https://repo.spring.io/plugins-release
-
-
- bintray-plugins
- bintray-plugins
- https://jcenter.bintray.com
-
-
-
diff --git a/settings.xml b/settings.xml
new file mode 100644
index 0000000000..b3227cc110
--- /dev/null
+++ b/settings.xml
@@ -0,0 +1,29 @@
+
+
+
+
+ spring-plugins-release
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+ spring-libs-snapshot
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+ spring-libs-milestone
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+ spring-libs-release
+ ${env.ARTIFACTORY_USR}
+ ${env.ARTIFACTORY_PSW}
+
+
+
+
\ No newline at end of file
diff --git a/src/main/antora/antora-playbook.yml b/src/main/antora/antora-playbook.yml
new file mode 100644
index 0000000000..1a4f73c1e6
--- /dev/null
+++ b/src/main/antora/antora-playbook.yml
@@ -0,0 +1,40 @@
+# PACKAGES antora@3.2.0-alpha.2 @antora/atlas-extension:1.0.0-alpha.1 @antora/collector-extension@1.0.0-alpha.3 @springio/antora-extensions@1.1.0-alpha.2 @asciidoctor/tabs@1.0.0-alpha.12 @opendevise/antora-release-line-extension@1.0.0-alpha.2
+#
+# The purpose of this Antora playbook is to build the docs in the current branch.
+antora:
+ extensions:
+ - require: '@springio/antora-extensions'
+ root_component_name: 'data-elasticsearch'
+site:
+ title: Spring Data Elasticsearch
+ url: https://docs.spring.io/spring-data-elasticsearch/reference/
+content:
+ sources:
+ - url: ./../../..
+ branches: HEAD
+ start_path: src/main/antora
+ worktrees: true
+ - url: https://github.com/spring-projects/spring-data-commons
+ # Refname matching:
+ # https://docs.antora.org/antora/latest/playbook/content-refname-matching/
+ branches: [ main, 3.4.x, 3.3.x ]
+ start_path: src/main/antora
+asciidoc:
+ attributes:
+ hide-uri-scheme: '@'
+ tabs-sync-option: '@'
+ extensions:
+ - '@asciidoctor/tabs'
+ - '@springio/asciidoctor-extensions'
+ - '@springio/asciidoctor-extensions/javadoc-extension'
+ sourcemap: true
+urls:
+ latest_version_segment: ''
+runtime:
+ log:
+ failure_level: warn
+ format: pretty
+ui:
+ bundle:
+ url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.16/ui-bundle.zip
+ snapshot: true
diff --git a/src/main/antora/antora.yml b/src/main/antora/antora.yml
new file mode 100644
index 0000000000..2348fca613
--- /dev/null
+++ b/src/main/antora/antora.yml
@@ -0,0 +1,17 @@
+name: data-elasticsearch
+version: true
+title: Spring Data Elasticsearch
+nav:
+ - modules/ROOT/nav.adoc
+ext:
+ collector:
+ - run:
+ command: ./mvnw validate process-resources -am -Pantora-process-resources
+ local: true
+ scan:
+ dir: target/classes/
+ - run:
+ command: ./mvnw package -Pdistribute
+ local: true
+ scan:
+ dir: target/antora
diff --git a/src/main/antora/modules/ROOT/nav.adoc b/src/main/antora/modules/ROOT/nav.adoc
new file mode 100644
index 0000000000..fa1ee8110d
--- /dev/null
+++ b/src/main/antora/modules/ROOT/nav.adoc
@@ -0,0 +1,47 @@
+* xref:index.adoc[Overview]
+** xref:commons/upgrade.adoc[]
+** xref:migration-guides.adoc[]
+*** xref:migration-guides/migration-guide-3.2-4.0.adoc[]
+*** xref:migration-guides/migration-guide-4.0-4.1.adoc[]
+*** xref:migration-guides/migration-guide-4.1-4.2.adoc[]
+*** xref:migration-guides/migration-guide-4.2-4.3.adoc[]
+*** xref:migration-guides/migration-guide-4.3-4.4.adoc[]
+*** xref:migration-guides/migration-guide-4.4-5.0.adoc[]
+*** xref:migration-guides/migration-guide-5.0-5.1.adoc[]
+*** xref:migration-guides/migration-guide-5.1-5.2.adoc[]
+*** xref:migration-guides/migration-guide-5.2-5.3.adoc[]
+*** xref:migration-guides/migration-guide-5.3-5.4.adoc[]
+*** xref:migration-guides/migration-guide-5.4-5.5.adoc[]
+*** xref:migration-guides/migration-guide-5.5-6.0.adoc[]
+
+
+* xref:elasticsearch.adoc[]
+** xref:elasticsearch/clients.adoc[]
+** xref:elasticsearch/object-mapping.adoc[]
+** xref:elasticsearch/template.adoc[]
+** xref:elasticsearch/reactive-template.adoc[]
+** xref:elasticsearch/entity-callbacks.adoc[]
+** xref:elasticsearch/auditing.adoc[]
+** xref:elasticsearch/join-types.adoc[]
+** xref:elasticsearch/routing.adoc[]
+** xref:elasticsearch/misc.adoc[]
+** xref:elasticsearch/scripted-and-runtime-fields.adoc[]
+
+* xref:repositories.adoc[]
+** xref:repositories/core-concepts.adoc[]
+** xref:repositories/definition.adoc[]
+** xref:elasticsearch/repositories/elasticsearch-repositories.adoc[]
+** xref:elasticsearch/repositories/reactive-elasticsearch-repositories.adoc[]
+** xref:repositories/create-instances.adoc[]
+** xref:repositories/query-methods-details.adoc[]
+** xref:elasticsearch/repositories/elasticsearch-repository-queries.adoc[]
+** xref:repositories/projections.adoc[]
+** xref:repositories/custom-implementations.adoc[]
+** xref:repositories/core-domain-events.adoc[]
+** xref:repositories/null-handling.adoc[]
+** xref:elasticsearch/repositories/cdi-integration.adoc[]
+** xref:repositories/query-keywords-reference.adoc[]
+** xref:repositories/query-return-types-reference.adoc[]
+
+* xref:attachment$api/java/index.html[Javadoc,role=link-external,window=_blank]
+* https://github.com/spring-projects/spring-data-commons/wiki[Wiki,role=link-external,window=_blank]
diff --git a/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc
new file mode 100644
index 0000000000..51a9189aa0
--- /dev/null
+++ b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc
@@ -0,0 +1 @@
+include::{commons}@data-commons::page$upgrade.adoc[]
diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch.adoc
new file mode 100644
index 0000000000..fe0bddbf20
--- /dev/null
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch.adoc
@@ -0,0 +1,16 @@
+[[elasticsearch.core]]
+= Elasticsearch Support
+:page-section-summary-toc: 1
+
+Spring Data support for Elasticsearch contains a wide range of features:
+
+* Spring configuration support for various xref:elasticsearch/clients.adoc[Elasticsearch clients].
+* The xref:elasticsearch/template.adoc[`ElasticsearchTemplate` and `ReactiveElasticsearchTemplate`] helper classes that provide object mapping between ES index operations and POJOs.
+* xref:elasticsearch/template.adoc#exception-translation[Exception translation] into Spring's portable {springDocsUrl}data-access.html#dao-exceptions[Data Access Exception Hierarchy].
+* Feature rich xref:elasticsearch/object-mapping.adoc[object mapping] integrated with _Spring's_ {springDocsUrl}core.html#core-convert[Conversion Service].
+* xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations[Annotation-based mapping] metadata that is extensible to support other metadata formats.
+* Java-based xref:elasticsearch/template.adoc#cassandra.template.query[query, criteria, and update DSLs].
+* Automatic implementation of xref:repositories.adoc[imperative and reactive `Repository` interfaces] including support for xref:repositories/custom-implementations.adoc[custom query methods].
+
+For most data-oriented tasks, you can use the `[Reactive]ElasticsearchTemplate` or the `Repository` support, both of which use the rich object-mapping functionality.
+Spring Data Elasticsearch uses consistent naming conventions on objects in various APIs to those found in the DataStax Java Driver so that they are familiar and so that you can map your existing knowledge onto the Spring APIs.
diff --git a/src/main/asciidoc/reference/elasticsearch-auditing.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/auditing.adoc
similarity index 86%
rename from src/main/asciidoc/reference/elasticsearch-auditing.adoc
rename to src/main/antora/modules/ROOT/pages/elasticsearch/auditing.adoc
index f465e4e742..f9633dec4f 100644
--- a/src/main/asciidoc/reference/elasticsearch-auditing.adoc
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/auditing.adoc
@@ -1,7 +1,8 @@
[[elasticsearch.auditing]]
-== Elasticsearch Auditing
+= Elasticsearch Auditing
-=== Preparing entities
+[[elasticsearch.auditing.preparing]]
+== Preparing entities
In order for the auditing code to be able to decide whether an entity instance is new, the entity must implement the `Persistable` interface which is defined as follows:
@@ -9,7 +10,7 @@ In order for the auditing code to be able to decide whether an entity instance i
----
package org.springframework.data.domain;
-import org.springframework.lang.Nullable;
+import org.jspecify.annotations.Nullable;
public interface Persistable {
@Nullable
@@ -30,11 +31,15 @@ public class Person implements Persistable {
@Id private Long id;
private String lastName;
private String firstName;
+ @CreatedDate
@Field(type = FieldType.Date, format = DateFormat.basic_date_time)
private Instant createdDate;
+ @CreatedBy
private String createdBy
@Field(type = FieldType.Date, format = DateFormat.basic_date_time)
+ @LastModifiedDate
private Instant lastModifiedDate;
+ @LastModifiedBy
private String lastModifiedBy;
public Long getId() { // <.>
@@ -50,7 +55,8 @@ public class Person implements Persistable {
<.> the getter is the required implementation from the interface
<.> an object is new if it either has no `id` or none of fields containing creation attributes are set.
-=== Activating auditing
+[[elasticsearch.auditing.activating]]
+== Activating auditing
After the entities have been set up and providing the `AuditorAware` - or `ReactiveAuditorAware` - the Auditing must be activated by setting the `@EnableElasticsearchAuditing` on a configuration class:
@@ -75,5 +81,5 @@ class MyConfiguration {
}
----
-If your code contains more than one `AuditorAware` bean for different types, you must provide the name of the bean to use as an argument to the `auditorAwareRef` parameter of the
- `@EnableElasticsearchAuditing` annotation.
+If your code contains more than one `AuditorAware` bean for different types, you must provide the name of the bean to use as an argument to the `auditorAwareRef` parameter of the
+ `@EnableElasticsearchAuditing` annotation.
diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc
new file mode 100644
index 0000000000..0cf7d5ea3c
--- /dev/null
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc
@@ -0,0 +1,234 @@
+[[elasticsearch.clients]]
+= Elasticsearch Clients
+
+This chapter illustrates configuration and usage of supported Elasticsearch client implementations.
+
+Spring Data Elasticsearch operates upon an Elasticsearch client (provided by Elasticsearch client libraries) that is connected to a single Elasticsearch node or a cluster.
+Although the Elasticsearch Client can be used directly to work with the cluster, applications using Spring Data Elasticsearch normally use the higher level abstractions of xref:elasticsearch/template.adoc[Elasticsearch Operations] and xref:elasticsearch/repositories/elasticsearch-repositories.adoc[Elasticsearch Repositories].
+
+[[elasticsearch.clients.restclient]]
+== Imperative Rest Client
+
+To use the imperative (non-reactive) client, a configuration bean must be configured like this:
+
+====
+[source,java]
+----
+import org.springframework.data.elasticsearch.client.elc.ElasticsearchConfiguration;
+
+@Configuration
+public class MyClientConfig extends ElasticsearchConfiguration {
+
+ @Override
+ public ClientConfiguration clientConfiguration() {
+ return ClientConfiguration.builder() <.>
+ .connectedTo("localhost:9200")
+ .build();
+ }
+}
+----
+
+<.> for a detailed description of the builder methods see xref:elasticsearch/clients.adoc#elasticsearch.clients.configuration[Client Configuration]
+====
+
+The javadoc:org.springframework.data.elasticsearch.client.elc.ElasticsearchConfiguration[]] class allows further configuration by overriding for example the `jsonpMapper()` or `transportOptions()` methods.
+
+
+The following beans can then be injected in other Spring components:
+
+====
+[source,java]
+----
+import org.springframework.beans.factory.annotation.Autowired;@Autowired
+ElasticsearchOperations operations; <.>
+
+@Autowired
+ElasticsearchClient elasticsearchClient; <.>
+
+@Autowired
+RestClient restClient; <.>
+
+@Autowired
+JsonpMapper jsonpMapper; <.>
+----
+
+<.> an implementation of javadoc:org.springframework.data.elasticsearch.core.ElasticsearchOperations[]
+<.> the `co.elastic.clients.elasticsearch.ElasticsearchClient` that is used.
+<.> the low level `RestClient` from the Elasticsearch libraries
+<.> the `JsonpMapper` user by the Elasticsearch `Transport`
+====
+
+Basically one should just use the javadoc:org.springframework.data.elasticsearch.core.ElasticsearchOperations[] to interact with the Elasticsearch cluster.
+When using repositories, this instance is used under the hood as well.
+
+[[elasticsearch.clients.reactiverestclient]]
+== Reactive Rest Client
+
+When working with the reactive stack, the configuration must be derived from a different class:
+
+====
+[source,java]
+----
+import org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchConfiguration;
+
+@Configuration
+public class MyClientConfig extends ReactiveElasticsearchConfiguration {
+
+ @Override
+ public ClientConfiguration clientConfiguration() {
+ return ClientConfiguration.builder() <.>
+ .connectedTo("localhost:9200")
+ .build();
+ }
+}
+----
+
+<.> for a detailed description of the builder methods see xref:elasticsearch/clients.adoc#elasticsearch.clients.configuration[Client Configuration]
+====
+
+The javadoc:org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchConfiguration[] class allows further configuration by overriding for example the `jsonpMapper()` or `transportOptions()` methods.
+
+The following beans can then be injected in other Spring components:
+
+====
+[source,java]
+----
+@Autowired
+ReactiveElasticsearchOperations operations; <.>
+
+@Autowired
+ReactiveElasticsearchClient elasticsearchClient; <.>
+
+@Autowired
+RestClient restClient; <.>
+
+@Autowired
+JsonpMapper jsonpMapper; <.>
+----
+
+the following can be injected:
+
+<.> an implementation of javadoc:org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations[]
+<.> the `org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchClient` that is used.
+This is a reactive implementation based on the Elasticsearch client implementation.
+<.> the low level `RestClient` from the Elasticsearch libraries
+<.> the `JsonpMapper` user by the Elasticsearch `Transport`
+====
+
+Basically one should just use the javadoc:org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations[] to interact with the Elasticsearch cluster.
+When using repositories, this instance is used under the hood as well.
+
+[[elasticsearch.clients.configuration]]
+== Client Configuration
+
+Client behaviour can be changed via the javadoc:org.springframework.data.elasticsearch.client.ClientConfiguration[] that allows to set options for SSL, connect and socket timeouts, headers and other parameters.
+
+.Client Configuration
+====
+[source,java]
+----
+import org.springframework.data.elasticsearch.client.ClientConfiguration;
+import org.springframework.data.elasticsearch.support.HttpHeaders;
+
+import static org.springframework.data.elasticsearch.client.elc.ElasticsearchClients.*;
+
+HttpHeaders httpHeaders = new HttpHeaders();
+httpHeaders.add("some-header", "on every request") <.>
+
+ClientConfiguration clientConfiguration = ClientConfiguration.builder()
+ .connectedTo("localhost:9200", "localhost:9291") <.>
+ .usingSsl() <.>
+ .withProxy("localhost:8888") <.>
+ .withPathPrefix("ela") <.>
+ .withConnectTimeout(Duration.ofSeconds(5)) <.>
+ .withSocketTimeout(Duration.ofSeconds(3)) <.>
+ .withDefaultHeaders(defaultHeaders) <.>
+ .withBasicAuth(username, password) <.>
+ .withHeaders(() -> { <.>
+ HttpHeaders headers = new HttpHeaders();
+ headers.add("currentTime", LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
+ return headers;
+ })
+ .withClientConfigurer( <.>
+ ElasticsearchHttpClientConfigurationCallback.from(clientBuilder -> {
+ // ...
+ return clientBuilder;
+ }))
+ . // ... other options
+ .build();
+
+----
+
+<.> Define default headers, if they need to be customized
+<.> Use the builder to provide cluster addresses, set default `HttpHeaders` or enable SSL.
+<.> Optionally enable SSL.There exist overloads of this function that can take a `SSLContext` or as an alternative the fingerprint of the certificate as it is output by Elasticsearch 8 on startup.
+<.> Optionally set a proxy.
+<.> Optionally set a path prefix, mostly used when different clusters a behind some reverse proxy.
+<.> Set the connection timeout.
+<.> Set the socket timeout.
+<.> Optionally set headers.
+<.> Add basic authentication.
+<.> A `Supplier` function can be specified which is called every time before a request is sent to Elasticsearch - here, as an example, the current time is written in a header.
+<.> a function to configure the created client (see xref:elasticsearch/clients.adoc#elasticsearch.clients.configuration.callbacks[Client configuration callbacks]), can be added multiple times.
+====
+
+IMPORTANT: Adding a Header supplier as shown in above example allows to inject headers that may change over the time, like authentication JWT tokens.
+If this is used in the reactive setup, the supplier function *must not* block!
+
+[[elasticsearch.clients.configuration.callbacks]]
+=== Client configuration callbacks
+
+The javadoc:org.springframework.data.elasticsearch.client.ClientConfiguration[] class offers the most common parameters to configure the client.
+In the case this is not enough, the user can add callback functions by using the `withClientConfigurer(ClientConfigurationCallback>)` method.
+
+The following callbacks are provided:
+
+[[elasticsearch.clients.configuration.callbacks.rest]]
+==== Configuration of the low level Elasticsearch `RestClient`:
+
+This callback provides a `org.elasticsearch.client.RestClientBuilder` that can be used to configure the Elasticsearch
+`RestClient`:
+====
+[source,java]
+----
+ClientConfiguration.builder()
+ .connectedTo("localhost:9200", "localhost:9291")
+ .withClientConfigurer(ElasticsearchClients.ElasticsearchRestClientConfigurationCallback.from(restClientBuilder -> {
+ // configure the Elasticsearch RestClient
+ return restClientBuilder;
+ }))
+ .build();
+----
+====
+
+[[elasticsearch.clients.configurationcallbacks.httpasync]]
+==== Configuration of the HttpAsyncClient used by the low level Elasticsearch `RestClient`:
+
+This callback provides a `org.apache.http.impl.nio.client.HttpAsyncClientBuilder` to configure the HttpCLient that is
+used by the `RestClient`.
+
+====
+[source,java]
+----
+ClientConfiguration.builder()
+ .connectedTo("localhost:9200", "localhost:9291")
+ .withClientConfigurer(ElasticsearchClients.ElasticsearchHttpClientConfigurationCallback.from(httpAsyncClientBuilder -> {
+ // configure the HttpAsyncClient
+ return httpAsyncClientBuilder;
+ }))
+ .build();
+----
+====
+
+[[elasticsearch.clients.logging]]
+== Client Logging
+
+To see what is actually sent to and received from the server `Request` / `Response` logging on the transport level needs to be turned on as outlined in the snippet below.
+This can be enabled in the Elasticsearch client by setting the level of the `tracer` package to "trace" (see
+https://www.elastic.co/guide/en/elasticsearch/client/java-api-client/current/java-rest-low-usage-logging.html)
+
+.Enable transport layer logging
+[source,xml]
+----
+
+----
diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc
new file mode 100644
index 0000000000..d4a9c565d0
--- /dev/null
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc
@@ -0,0 +1,121 @@
+[[new-features]]
+= What's new
+
+[[new-features.6-0-0]]
+== New in Spring Data Elasticsearch 6.0
+
+* Upgarde to Spring 7
+* Switch to jspecify nullability annotations
+* Upgrade to Elasticsearch 9.0.2
+
+
+[[new-features.5-5-0]]
+== New in Spring Data Elasticsearch 5.5
+
+* Upgrade to Elasticsearch 8.18.1.
+* Add support for the `@SearchTemplateQuery` annotation on repository methods.
+* Scripted field properties of type collection can be populated from scripts returning arrays.
+
+[[new-features.5-4-0]]
+== New in Spring Data Elasticsearch 5.4
+
+* Upgrade to Elasticsearch 8.15.3.
+* Allow to customize the mapped type name for `@InnerField` and `@Field` annotations.
+* Support for Elasticsearch SQL.
+* Add support for retrieving request executionDuration.
+
+[[new-features.5-3-0]]
+== New in Spring Data Elasticsearch 5.3
+
+* Upgrade to Elasticsearch 8.13.2.
+* Add support for highlight queries in highlighting.
+* Add shard statistics to the `SearchHit` class.
+* Add support for multi search template API.
+* Add support for SpEL in @Query.
+* Add support for field aliases in the index mapping.
+* Add support for has_child and has_parent queries.
+
+[[new-features.5-2-0]]
+== New in Spring Data Elasticsearch 5.2
+
+* Upgrade to Elasticsearch 8.11.1
+* The `JsonpMapper` for Elasticsearch is now configurable and provided as bean.
+* Improved AOT runtime hints for Elasticsearch client library classes.
+* Add Kotlin extensions and repository coroutine support.
+* Introducing `VersionConflictException` class thrown in case thatElasticsearch reports an 409 error with a version conflict.
+* Enable MultiField annotation on property getter
+* Support nested sort option
+* Improved scripted und runtime field support
+* Improved refresh policy support
+
+[[new-features.5-1-0]]
+== New in Spring Data Elasticsearch 5.1
+
+* Upgrade to Elasticsearch 8.7.1
+* Allow specification of the TLS certificate when connecting to an Elasticsearch 8 cluster
+
+[[new-features.5-0-0]]
+== New in Spring Data Elasticsearch 5.0
+
+* Upgrade to Java 17 baseline
+* Upgrade to Spring Framework 6
+* Upgrade to Elasticsearch 8.5.0
+* Use the new Elasticsearch client library
+
+[[new-features.4-4-0]]
+== New in Spring Data Elasticsearch 4.4
+
+* Introduction of new imperative and reactive clients using the classes from the new Elasticsearch Java client
+* Upgrade to Elasticsearch 7.17.3.
+
+[[new-features.4-3-0]]
+== New in Spring Data Elasticsearch 4.3
+
+* Upgrade to Elasticsearch 7.15.2.
+* Allow runtime_fields to be defined in the index mapping.
+* Add native support for range field types by using a range object.
+* Add repository search for nullable or empty properties.
+* Enable custom converters for single fields.
+* Supply a custom `Sort.Order` providing Elasticsearch specific parameters.
+
+[[new-features.4-2-0]]
+== New in Spring Data Elasticsearch 4.2
+
+* Upgrade to Elasticsearch 7.10.0.
+* Support for custom routing values
+
+[[new-features.4-1-0]]
+== New in Spring Data Elasticsearch 4.1
+
+* Uses Spring 5.3.
+* Upgrade to Elasticsearch 7.9.3.
+* Improved API for alias management.
+* Introduction of `ReactiveIndexOperations` for index management.
+* Index templates support.
+* Support for Geo-shape data with GeoJson.
+
+[[new-features.4-0-0]]
+== New in Spring Data Elasticsearch 4.0
+
+* Uses Spring 5.2.
+* Upgrade to Elasticsearch 7.6.2.
+* Deprecation of `TransportClient` usage.
+* Implements most of the mapping-types available for the index mappings.
+* Removal of the Jackson `ObjectMapper`, now using the xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[MappingElasticsearchConverter]
+* Cleanup of the API in the `*Operations` interfaces, grouping and renaming methods so that they match the Elasticsearch API, deprecating the old methods, aligning with other Spring Data modules.
+* Introduction of `SearchHit` class to represent a found document together with the relevant result metadata for this document (i.e. _sortValues_).
+* Introduction of the `SearchHits` class to represent a whole search result together with the metadata for the complete search result (i.e. _max_score_).
+* Introduction of `SearchPage` class to represent a paged result containing a `SearchHits` instance.
+* Introduction of the `GeoDistanceOrder` class to be able to create sorting by geographical distance
+* Implementation of Auditing Support
+* Implementation of lifecycle entity callbacks
+
+[[new-features.3-2-0]]
+== New in Spring Data Elasticsearch 3.2
+
+* Secured Elasticsearch cluster support with Basic Authentication and SSL transport.
+* Upgrade to Elasticsearch 6.8.1.
+* Reactive programming support with xref:elasticsearch/repositories/reactive-elasticsearch-repositories.adoc[Reactive Elasticsearch Repositories] and xref:.
+* Introduction of the xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[ElasticsearchEntityMapper] as an alternative to the Jackson `ObjectMapper`.
+* Field name customization in `@Field`.
+* Support for Delete by Query.
diff --git a/src/main/asciidoc/reference/elasticsearch-entity-callbacks.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/entity-callbacks.adoc
similarity index 68%
rename from src/main/asciidoc/reference/elasticsearch-entity-callbacks.adoc
rename to src/main/antora/modules/ROOT/pages/elasticsearch/entity-callbacks.adoc
index fda1c1ad8c..cbc08eee39 100644
--- a/src/main/asciidoc/reference/elasticsearch-entity-callbacks.adoc
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/entity-callbacks.adoc
@@ -1,5 +1,7 @@
+include::{commons}@data-commons::page$entity-callbacks.adoc[]
+
[[elasticsearch.entity-callbacks]]
-= Elasticsearch EntityCallbacks
+== Store specific EntityCallbacks
Spring Data Elasticsearch uses the `EntityCallback` API internally for its auditing support and reacts on the following callbacks:
@@ -13,7 +15,13 @@ Spring Data Elasticsearch uses the `EntityCallback` API internally for its audit
| Reactive/BeforeConvertCallback
| `onBeforeConvert(T entity, IndexCoordinates index)`
-| Invoked before a domain object is converted to `org.springframework.data.elasticsearch.core.document.Document`. Can return the `entity` or a modified entity which then will be converted.
+| Invoked before a domain object is converted to `org.springframework.data.elasticsearch.core.document.Document`.
+Can return the `entity` or a modified entity which then will be converted.
+| `Ordered.LOWEST_PRECEDENCE`
+
+| Reactive/AfterLoadCallback
+| `onAfterLoad(Document document, Class type, IndexCoordinates indexCoordinates)`
+| Invoked after the result from Elasticsearch has been read into a `org.springframework.data.elasticsearch.core.document.Document`.
| `Ordered.LOWEST_PRECEDENCE`
| Reactive/AfterConvertCallback
@@ -32,4 +40,3 @@ Spring Data Elasticsearch uses the `EntityCallback` API internally for its audit
| `Ordered.LOWEST_PRECEDENCE`
|===
-
diff --git a/src/main/asciidoc/reference/elasticsearch-join-types.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/join-types.adoc
similarity index 86%
rename from src/main/asciidoc/reference/elasticsearch-join-types.adoc
rename to src/main/antora/modules/ROOT/pages/elasticsearch/join-types.adoc
index d588ee278f..a1bc3df192 100644
--- a/src/main/asciidoc/reference/elasticsearch-join-types.adoc
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/join-types.adoc
@@ -3,6 +3,7 @@
Spring Data Elasticsearch supports the https://www.elastic.co/guide/en/elasticsearch/reference/current/parent-join.html[Join data type] for creating the corresponding index mappings and for storing the relevant information.
+[[elasticsearch.jointype.setting-up]]
== Setting up the data
For an entity to be used in a parent child join relationship, it must have a property of type `JoinField` which must be annotated.
@@ -51,7 +52,7 @@ public class Statement {
return routing;
}
- public void setRouting(Routing routing) {
+ public void setRouting(String routing) {
this.routing = routing;
}
@@ -111,7 +112,7 @@ public class Statement {
}
}
----
-<.> for routing related info see <>
+<.> for routing related info see xref:elasticsearch/routing.adoc[Routing values]
<.> a question can have answers and comments
<.> an answer can have votes
<.> the `JoinField` property is used to combine the name (_question_, _answer_, _comment_ or _vote_) of the relation with the parent id.
@@ -160,6 +161,7 @@ Spring Data Elasticsearch will build the following mapping for this class:
----
====
+[[elasticsearch.jointype.storing]]
== Storing data
Given a repository for this class the following code inserts a question, two answers, a comment and a vote:
@@ -197,7 +199,7 @@ void init() {
repository.save(
Statement.builder()
.withText("+1 for the sun")
- ,withRouting(savedWeather.getId())
+ .withRouting(savedWeather.getId())
.withRelation(new JoinField<>("vote", sunnyAnswer.getId())) <5>
.build());
}
@@ -206,12 +208,13 @@ void init() {
<2> the first answer to the question
<3> the second answer
<4> a comment to the question
-<5> a vote for the first answer, this needs to have the routing set to the weather document, see <>.
+<5> a vote for the first answer, this needs to have the routing set to the weather document, see xref:elasticsearch/routing.adoc[Routing values].
====
+[[elasticsearch.jointype.retrieving]]
== Retrieving data
-Currently native search queries must be used to query the data, so there is no support from standard repository methods. <> can be used instead.
+Currently native queries must be used to query the data, so there is no support from standard repository methods. xref:repositories/custom-implementations.adoc[] can be used instead.
The following code shows as an example how to retrieve all entries that have a _vote_ (which must be _answers_, because only answers can have a vote) using an `ElasticsearchOperations` instance:
@@ -219,11 +222,18 @@ The following code shows as an example how to retrieve all entries that have a _
[source,java]
----
SearchHits hasVotes() {
- NativeSearchQuery query = new NativeSearchQueryBuilder()
- .withQuery(hasChildQuery("vote", matchAllQuery(), ScoreMode.None))
- .build();
- return operations.search(query, Statement.class);
+ Query query = NativeQuery.builder()
+ .withQuery(co.elastic.clients.elasticsearch._types.query_dsl.Query.of(qb -> qb
+ .hasChild(hc -> hc
+ .type("answer")
+ .queryName("vote")
+ .query(matchAllQueryAsQuery())
+ .scoreMode(ChildScoreMode.None)
+ )))
+ .build();
+
+ return operations.search(query, Statement.class);
}
----
====
diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/misc.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/misc.adoc
new file mode 100644
index 0000000000..7f3ac8f0ff
--- /dev/null
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/misc.adoc
@@ -0,0 +1,453 @@
+[[elasticsearch.misc]]
+= Miscellaneous Elasticsearch Operation Support
+
+This chapter covers additional support for Elasticsearch operations that cannot be directly accessed via the repository interface.
+It is recommended to add those operations as custom implementation as described in xref:repositories/custom-implementations.adoc[] .
+
+[[elasticsearc.misc.index.settings]]
+== Index settings
+
+When creating Elasticsearch indices with Spring Data Elasticsearch different index settings can be defined by using the `@Setting` annotation.
+The following arguments are available:
+
+* `useServerConfiguration` does not send any settings parameters, so the Elasticsearch server configuration determines them.
+* `settingPath` refers to a JSON file defining the settings that must be resolvable in the classpath
+* `shards` the number of shards to use, defaults to _1_
+* `replicas` the number of replicas, defaults to _1_
+* `refreshIntervall`, defaults to _"1s"_
+* `indexStoreType`, defaults to _"fs"_
+
+It is as well possible to define https://www.elastic.co/guide/en/elasticsearch/reference/7.11/index-modules-index-sorting.html[index sorting] (check the linked Elasticsearch documentation for the possible field types and values):
+
+====
+[source,java]
+----
+@Document(indexName = "entities")
+@Setting(
+ sortFields = { "secondField", "firstField" }, <.>
+ sortModes = { Setting.SortMode.max, Setting.SortMode.min }, <.>
+ sortOrders = { Setting.SortOrder.desc, Setting.SortOrder.asc },
+ sortMissingValues = { Setting.SortMissing._last, Setting.SortMissing._first })
+class Entity {
+ @Nullable
+ @Id private String id;
+
+ @Nullable
+ @Field(name = "first_field", type = FieldType.Keyword)
+ private String firstField;
+
+ @Nullable @Field(name = "second_field", type = FieldType.Keyword)
+ private String secondField;
+
+ // getter and setter...
+}
+----
+
+<.> when defining sort fields, use the name of the Java property (_firstField_), not the name that might be defined for Elasticsearch (_first_field_)
+<.> `sortModes`, `sortOrders` and `sortMissingValues` are optional, but if they are set, the number of entries must match the number of `sortFields` elements
+====
+
+[[elasticsearch.misc.mappings]]
+== Index Mapping
+
+When Spring Data Elasticsearch creates the index mapping with the `IndexOperations.createMapping()` methods, it uses the annotations described in xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations[Mapping Annotation Overview], especially the `@Field` annotation.
+In addition to that it is possible to add the `@Mapping` annotation to a class.
+This annotation has the following properties:
+
+* `mappingPath` a classpath resource in JSON format; if this is not empty it is used as the mapping, no other mapping processing is done.
+* `enabled` when set to false, this flag is written to the mapping and no further processing is done.
+* `dateDetection` and `numericDetection` set the corresponding properties in the mapping when not set to `DEFAULT`.
+* `dynamicDateFormats` when this String array is not empty, it defines the date formats used for automatic date detection.
+* `runtimeFieldsPath` a classpath resource in JSON format containing the definition of runtime fields which is written to the index mappings, for example:
+
+====
+[source,json]
+----
+{
+ "day_of_week": {
+ "type": "keyword",
+ "script": {
+ "source": "emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))"
+ }
+ }
+}
+----
+====
+
+[[elasticsearch.misc.filter]]
+== Filter Builder
+
+Filter Builder improves query speed.
+
+====
+[source,java]
+----
+private ElasticsearchOperations operations;
+
+IndexCoordinates index = IndexCoordinates.of("sample-index");
+
+Query query = NativeQuery.builder()
+ .withQuery(q -> q
+ .matchAll(ma -> ma))
+ .withFilter( q -> q
+ .bool(b -> b
+ .must(m -> m
+ .term(t -> t
+ .field("id")
+ .value(documentId))
+ )))
+ .build();
+
+SearchHits sampleEntities = operations.search(query, SampleEntity.class, index);
+----
+====
+
+[[elasticsearch.scroll]]
+== Using Scroll For Big Result Set
+
+Elasticsearch has a scroll API for getting big result set in chunks.
+This is internally used by Spring Data Elasticsearch to provide the implementations of the ` SearchHitsIterator SearchOperations.searchForStream(Query query, Class clazz, IndexCoordinates index)` method.
+
+====
+[source,java]
+----
+IndexCoordinates index = IndexCoordinates.of("sample-index");
+
+Query searchQuery = NativeQuery.builder()
+ .withQuery(q -> q
+ .matchAll(ma -> ma))
+ .withFields("message")
+ .withPageable(PageRequest.of(0, 10))
+ .build();
+
+SearchHitsIterator stream = elasticsearchOperations.searchForStream(searchQuery, SampleEntity.class,
+index);
+
+List sampleEntities = new ArrayList<>();
+while (stream.hasNext()) {
+ sampleEntities.add(stream.next());
+}
+
+stream.close();
+----
+====
+
+There are no methods in the `SearchOperations` API to access the scroll id, if it should be necessary to access this, the following methods of the `AbstractElasticsearchTemplate` can be used (this is the base implementation for the different `ElasticsearchOperations` implementations):
+
+====
+[source,java]
+----
+
+@Autowired ElasticsearchOperations operations;
+
+AbstractElasticsearchTemplate template = (AbstractElasticsearchTemplate)operations;
+
+IndexCoordinates index = IndexCoordinates.of("sample-index");
+
+Query query = NativeQuery.builder()
+ .withQuery(q -> q
+ .matchAll(ma -> ma))
+ .withFields("message")
+ .withPageable(PageRequest.of(0, 10))
+ .build();
+
+SearchScrollHits scroll = template.searchScrollStart(1000, query, SampleEntity.class, index);
+
+String scrollId = scroll.getScrollId();
+List sampleEntities = new ArrayList<>();
+while (scroll.hasSearchHits()) {
+ sampleEntities.addAll(scroll.getSearchHits());
+ scrollId = scroll.getScrollId();
+ scroll = template.searchScrollContinue(scrollId, 1000, SampleEntity.class);
+}
+template.searchScrollClear(scrollId);
+----
+====
+
+To use the Scroll API with repository methods, the return type must defined as `Stream` in the Elasticsearch Repository.
+The implementation of the method will then use the scroll methods from the ElasticsearchTemplate.
+
+====
+[source,java]
+----
+interface SampleEntityRepository extends Repository {
+
+ Stream findBy();
+
+}
+----
+====
+
+[[elasticsearch.misc.sorts]]
+== Sort options
+
+In addition to the default sort options described in xref:repositories/query-methods-details.adoc#repositories.paging-and-sorting[Paging and Sorting], Spring Data Elasticsearch provides the class `org.springframework.data.elasticsearch.core.query.Order` which derives from `org.springframework.data.domain.Sort.Order`.
+It offers additional parameters that can be sent to Elasticsearch when specifying the sorting of the result (see https://www.elastic.co/guide/en/elasticsearch/reference/7.15/sort-search-results.html).
+
+There also is the `org.springframework.data.elasticsearch.core.query.GeoDistanceOrder` class which can be used to have the result of a search operation ordered by geographical distance.
+
+If the class to be retrieved has a `GeoPoint` property named _location_, the following `Sort` would sort the results by distance to the given point:
+
+====
+[source,java]
+----
+Sort.by(new GeoDistanceOrder("location", new GeoPoint(48.137154, 11.5761247)))
+----
+====
+
+[[elasticsearch.misc.runtime-fields]]
+== Runtime Fields
+
+From version 7.12 on Elasticsearch has added the feature of runtime fields (https://www.elastic.co/guide/en/elasticsearch/reference/7.12/runtime.html).
+Spring Data Elasticsearch supports this in two ways:
+
+[[elasticsearch.misc.runtime-fields.index-mappings]]
+=== Runtime field definitions in the index mappings
+
+The first way to define runtime fields is by adding the definitions to the index mappings (see https://www.elastic.co/guide/en/elasticsearch/reference/7.12/runtime-mapping-fields.html).
+To use this approach in Spring Data Elasticsearch the user must provide a JSON file that contains the corresponding definition, for example:
+
+.runtime-fields.json
+====
+[source,json]
+----
+{
+ "day_of_week": {
+ "type": "keyword",
+ "script": {
+ "source": "emit(doc['@timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.FULL, Locale.ROOT))"
+ }
+ }
+}
+----
+====
+
+The path to this JSON file, which must be present on the classpath, must then be set in the `@Mapping` annotation of the entity:
+
+====
+[source,java]
+----
+@Document(indexName = "runtime-fields")
+@Mapping(runtimeFieldsPath = "/runtime-fields.json")
+public class RuntimeFieldEntity {
+ // properties, getter, setter,...
+}
+
+----
+====
+
+[[elasticsearch.misc.runtime-fields.query]]
+=== Runtime fields definitions set on a Query
+
+The second way to define runtime fields is by adding the definitions to a search query (see https://www.elastic.co/guide/en/elasticsearch/reference/7.12/runtime-search-request.html).
+The following code example shows how to do this with Spring Data Elasticsearch :
+
+The entity used is a simple object that has a `price` property:
+
+====
+[source,java]
+----
+@Document(indexName = "some_index_name")
+public class SomethingToBuy {
+
+ private @Id @Nullable String id;
+ @Nullable @Field(type = FieldType.Text) private String description;
+ @Nullable @Field(type = FieldType.Double) private Double price;
+
+ // getter and setter
+}
+
+----
+====
+
+The following query uses a runtime field that calculates a `priceWithTax` value by adding 19% to the price and uses this value in the search query to find all entities where `priceWithTax` is higher or equal than a given value:
+
+====
+[source,java]
+----
+RuntimeField runtimeField = new RuntimeField("priceWithTax", "double", "emit(doc['price'].value * 1.19)");
+Query query = new CriteriaQuery(new Criteria("priceWithTax").greaterThanEqual(16.5));
+query.addRuntimeField(runtimeField);
+
+SearchHits searchHits = operations.search(query, SomethingToBuy.class);
+----
+====
+
+This works with every implementation of the `Query` interface.
+
+[[elasticsearch.misc.point-in-time]]
+== Point In Time (PIT) API
+
+`ElasticsearchOperations` supports the point in time API of Elasticsearch (see https://www.elastic.co/guide/en/elasticsearch/reference/8.3/point-in-time-api.html).
+The following code snippet shows how to use this feature with a fictional `Person` class:
+
+====
+[source,java]
+----
+ElasticsearchOperations operations; // autowired
+Duration tenSeconds = Duration.ofSeconds(10);
+
+String pit = operations.openPointInTime(IndexCoordinates.of("person"), tenSeconds); <.>
+
+// create query for the pit
+Query query1 = new CriteriaQueryBuilder(Criteria.where("lastName").is("Smith"))
+ .withPointInTime(new Query.PointInTime(pit, tenSeconds)) <.>
+ .build();
+SearchHits searchHits1 = operations.search(query1, Person.class);
+// do something with the data
+
+// create 2nd query for the pit, use the id returned in the previous result
+Query query2 = new CriteriaQueryBuilder(Criteria.where("lastName").is("Miller"))
+ .withPointInTime(
+ new Query.PointInTime(searchHits1.getPointInTimeId(), tenSeconds)) <.>
+ .build();
+SearchHits searchHits2 = operations.search(query2, Person.class);
+// do something with the data
+
+operations.closePointInTime(searchHits2.getPointInTimeId()); <.>
+
+----
+
+<.> create a point in time for an index (can be multiple names) and a keep-alive duration and retrieve its id
+<.> pass that id into the query to search together with the next keep-alive value
+<.> for the next query, use the id returned from the previous search
+<.> when done, close the point in time using the last returned id
+====
+
+[[elasticsearch.misc.searchtemplates]]
+== Search Template support
+
+Use of the search template API is supported.
+To use this, it first is necessary to create a stored script.
+The `ElasticsearchOperations` interface extends `ScriptOperations` which provides the necessary functions.
+The example used here assumes that we have `Person` entity with a property named `firstName`.
+A search template script can be saved like this:
+
+====
+[source,java]
+----
+import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
+import org.springframework.data.elasticsearch.core.script.Script;
+
+operations.putScript( <.>
+ Script.builder()
+ .withId("person-firstname") <.>
+ .withLanguage("mustache") <.>
+ .withSource(""" <.>
+ {
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match": {
+ "firstName": "{{firstName}}" <.>
+ }
+ }
+ ]
+ }
+ },
+ "from": "{{from}}", <.>
+ "size": "{{size}}" <.>
+ }
+ """)
+ .build()
+);
+----
+
+<.> Use the `putScript()` method to store a search template script
+<.> The name / id of the script
+<.> Scripts that are used in search templates must be in the _mustache_ language.
+<.> The script source
+<.> The search parameter in the script
+<.> Paging request offset
+<.> Paging request size
+====
+
+To use a search template in a search query, Spring Data Elasticsearch provides the `SearchTemplateQuery`, an implementation of the `org.springframework.data.elasticsearch.core.query.Query` interface.
+
+NOTE: Although `SearchTemplateQuery` is an implementation of the `Query` interface, not all of the functionality provided by the base class is available for a `SearchTemplateQuery` like setting a `Pageable` or a `Sort`. Values for this functionality must be added to the stored script like shown in the following example for paging parameters. If these values are set on the `Query` object, they will be ignored.
+
+In the following code, we will add a call using a search template query to a custom repository implementation (see
+xref:repositories/custom-implementations.adoc[]) as an example how this can be integrated into a repository call.
+
+We first define the custom repository fragment interface:
+
+====
+[source,java]
+----
+interface PersonCustomRepository {
+ SearchPage findByFirstNameWithSearchTemplate(String firstName, Pageable pageable);
+}
+----
+====
+
+The implementation of this repository fragment looks like this:
+
+====
+[source,java]
+----
+public class PersonCustomRepositoryImpl implements PersonCustomRepository {
+
+ private final ElasticsearchOperations operations;
+
+ public PersonCustomRepositoryImpl(ElasticsearchOperations operations) {
+ this.operations = operations;
+ }
+
+ @Override
+ public SearchPage findByFirstNameWithSearchTemplate(String firstName, Pageable pageable) {
+
+ var query = SearchTemplateQuery.builder() <.>
+ .withId("person-firstname") <.>
+ .withParams(
+ Map.of( <.>
+ "firstName", firstName,
+ "from", pageable.getOffset(),
+ "size", pageable.getPageSize()
+ )
+ )
+ .build();
+
+ SearchHits searchHits = operations.search(query, Person.class); <.>
+
+ return SearchHitSupport.searchPageFor(searchHits, pageable);
+ }
+}
+----
+
+<.> Create a `SearchTemplateQuery`
+<.> Provide the id of the search template
+<.> The parameters are passed in a `Map`
+<.> Do the search in the same way as with the other query types.
+====
+
+[[elasticsearch.misc.nested-sort]]
+== Nested sort
+Spring Data Elasticsearch supports sorting within nested objects (https://www.elastic.co/guide/en/elasticsearch/reference/8.9/sort-search-results.html#nested-sorting)
+
+The following example, taken from the `org.springframework.data.elasticsearch.core.query.sort.NestedSortIntegrationTests` class, shows how to define the nested sort.
+
+====
+[source,java]
+----
+var filter = StringQuery.builder("""
+ { "term": {"movies.actors.sex": "m"} }
+ """).build();
+var order = new org.springframework.data.elasticsearch.core.query.Order(Sort.Direction.DESC,
+ "movies.actors.yearOfBirth")
+ .withNested(
+ Nested.builder("movies")
+ .withNested(
+ Nested.builder("movies.actors")
+ .withFilter(filter)
+ .build())
+ .build());
+
+var query = Query.findAll().addSort(Sort.by(order));
+
+----
+====
+
+About the filter query: It is not possible to use a `CriteriaQuery` here, as this query would be converted into a Elasticsearch nested query which does not work in the filter context. So only `StringQuery` or `NativeQuery` can be used here. When using one of these, like the term query above, the Elasticsearch field names must be used, so take care, when these are redefined with the `@Field(name="...")` definition.
+
+For the definition of the order path and the nested paths, the Java entity property names should be used.
diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/object-mapping.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/object-mapping.adoc
new file mode 100644
index 0000000000..6ca12728c0
--- /dev/null
+++ b/src/main/antora/modules/ROOT/pages/elasticsearch/object-mapping.adoc
@@ -0,0 +1,478 @@
+[[elasticsearch.mapping]]
+= Elasticsearch Object Mapping
+
+Spring Data Elasticsearch Object Mapping is the process that maps a Java object - the domain entity - into the JSON representation that is stored in Elasticsearch and back.
+The class that is internally used for this mapping is the
+`MappingElasticsearchConverter`.
+
+[[elasticsearch.mapping.meta-model]]
+== Meta Model Object Mapping
+
+The Metamodel based approach uses domain type information for reading/writing from/to Elasticsearch.
+This allows to register `Converter` instances for specific domain type mapping.
+
+[[elasticsearch.mapping.meta-model.annotations]]
+=== Mapping Annotation Overview
+
+The `MappingElasticsearchConverter` uses metadata to drive the mapping of objects to documents.
+The metadata is taken from the entity's properties which can be annotated.
+
+The following annotations are available:
+
+* `@Document`: Applied at the class level to indicate this class is a candidate for mapping to the database.
+The most important attributes are (check the API documentation for the complete list of attributes):
+** `indexName`: the name of the index to store this entity in.
+This can contain a SpEL template expression like `"log-#{T(java.time.LocalDate).now().toString()}"`
+** `createIndex`: flag whether to create an index on repository bootstrapping.
+Default value is _true_.
+See xref:elasticsearch/repositories/elasticsearch-repositories.adoc#elasticsearch.repositories.autocreation[Automatic creation of indices with the corresponding mapping]
+
+
+* `@Id`: Applied at the field level to mark the field used for identity purpose.
+* `@Transient`, `@ReadOnlyProperty`, `@WriteOnlyProperty`: see the following section xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations.read-write[Controlling which properties are written to and read from Elasticsearch] for detailed information.
+* `@PersistenceConstructor`: Marks a given constructor - even a package protected one - to use when instantiating the object from the database.
+Constructor arguments are mapped by name to the key values in the retrieved Document.
+* `@Field`: Applied at the field level and defines properties of the field, most of the attributes map to the respective https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html[Elasticsearch Mapping] definitions (the following list is not complete, check the annotation Javadoc for a complete reference):
+** `name`: The name of the field as it will be represented in the Elasticsearch document, if not set, the Java field name is used.
+** `type`: The field type, can be one of _Text, Keyword, Long, Integer, Short, Byte, Double, Float, Half_Float, Scaled_Float, Date, Date_Nanos, Boolean, Binary, Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range, Ip_Range, Object, Nested, Ip, TokenCount, Percolator, Flattened, Search_As_You_Type_.
+See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html[Elasticsearch Mapping Types].
+If the field type is not specified, it defaults to `FieldType.Auto`.
+This means, that no mapping entry is written for the property and that Elasticsearch will add a mapping entry dynamically when the first data for this property is stored (check the Elasticsearch documentation for dynamic mapping rules).
+** `format`: One or more built-in date formats, see the next section xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations.date-formats[Date format mapping].
+** `pattern`: One or more custom date formats, see the next section xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.annotations.date-formats[Date format mapping].
+** `store`: Flag whether the original field value should be store in Elasticsearch, default value is _false_.
+** `analyzer`, `searchAnalyzer`, `normalizer` for specifying custom analyzers and normalizer.
+* `@GeoPoint`: Marks a field as _geo_point_ datatype.
+Can be omitted if the field is an instance of the `GeoPoint` class.
+* `@ValueConverter` defines a class to be used to convert the given property.
+In difference to a registered Spring `Converter` this only converts the annotated property and not every property of the given type.
+
+The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic.
+
+[[elasticsearch.mapping.meta-model.annotations.read-write]]
+==== Controlling which properties are written to and read from Elasticsearch
+
+This section details the annotations that define if the value of a property is written to or read from Elasticsearch.
+
+`@Transient`: A property annotated with this annotation will not be written to the mapping, it's value will not be sent to Elasticsearch and when documents are returned from Elasticsearch, this property will not be set in the resulting entity.
+
+`@ReadOnlyProperty`: A property with this annotation will not have its value written to Elasticsearch, but when returning data, the property will be filled with the value returned in the document from Elasticsearch.
+One use case for this are runtime fields defined in the index mapping.
+
+`@WriteOnlyProperty`: A property with this annotation will have its value stored in Elasticsearch but will not be set with any value when reading document.
+This can be used for example for synthesized fields which should go into the Elasticsearch index but are not used elsewhere.
+
+[[elasticsearch.mapping.meta-model.annotations.date-formats]]
+==== Date format mapping
+
+Properties that derive from `TemporalAccessor` or are of type `java.util.Date` must either have a `@Field` annotation of type `FieldType.Date` or a custom converter must be registered for this type.
+This paragraph describes the use of
+`FieldType.Date`.
+
+There are two attributes of the `@Field` annotation that define which date format information is written to the mapping (also see https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html#built-in-date-formats[Elasticsearch Built In Formats] and https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html#custom-date-formats[Elasticsearch Custom Date Formats])
+
+The `format` attribute is used to define at least one of the predefined formats.
+If it is not defined, then a default value of __date_optional_time_ and _epoch_millis_ is used.
+
+The `pattern` attribute can be used to add additional custom format strings.
+If you want to use only custom date formats, you must set the `format` property to empty `{}`.
+
+The following table shows the different attributes and the mapping created from their values:
+
+[cols=2*,options=header]
+|===
+| annotation
+| format string in Elasticsearch mapping
+
+| @Field(type=FieldType.Date)
+| "date_optional_time\|\|epoch_millis",
+
+| @Field(type=FieldType.Date, format=DateFormat.basic_date)
+| "basic_date"
+
+| @Field(type=FieldType.Date, format={DateFormat.basic_date, DateFormat.basic_time})
+| "basic_date\|\|basic_time"
+
+| @Field(type=FieldType.Date, pattern="dd.MM.uuuu")
+| "date_optional_time\|\|epoch_millis\|\|dd.MM.uuuu",
+
+| @Field(type=FieldType.Date, format={}, pattern="dd.MM.uuuu")
+| "dd.MM.uuuu"
+
+|===
+
+NOTE: If you are using a custom date format, you need to use _uuuu_ for the year instead of _yyyy_.
+This is due to a https://www.elastic.co/guide/en/elasticsearch/reference/current/migrate-to-java-time.html#java-time-migration-incompatible-date-formats[change in Elasticsearch 7].
+
+Check the code of the `org.springframework.data.elasticsearch.annotations.DateFormat` enum for a complete list of predefined values and their patterns.
+
+[[elasticsearch.mapping.meta-model.annotations.range]]
+==== Range types
+
+When a field is annotated with a type of one of _Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range,_ or _Ip_Range_ the field must be an instance of a class that will be mapped to an Elasticsearch range, for example:
+
+====
+[source,java]
+----
+class SomePersonData {
+
+ @Field(type = FieldType.Integer_Range)
+ private ValidAge validAge;
+
+ // getter and setter
+}
+
+class ValidAge {
+ @Field(name="gte")
+ private Integer from;
+
+ @Field(name="lte")
+ private Integer to;
+
+ // getter and setter
+}
+----
+====
+
+As an alternative Spring Data Elasticsearch provides a `Range` class so that the previous example can be written as:
+
+====
+[source,java]
+----
+class SomePersonData {
+
+ @Field(type = FieldType.Integer_Range)
+ private Range validAge;
+
+ // getter and setter
+}
+----
+====
+
+Supported classes for the type `` are `Integer`, `Long`, `Float`, `Double`, `Date` and classes that implement the
+`TemporalAccessor` interface.
+
+[[elasticsearch.mapping.meta-model.annotations.mapped-names]]
+==== Mapped field names
+
+Without further configuration, Spring Data Elasticsearch will use the property name of an object as field name in Elasticsearch.
+This can be changed for individual field by using the `@Field` annotation on that property.
+
+It is also possible to define a `FieldNamingStrategy` in the configuration of the client (xref:elasticsearch/clients.adoc[Elasticsearch Clients]).
+If for example a `SnakeCaseFieldNamingStrategy` is configured, the property _sampleProperty_ of the object would be mapped to _sample_property_ in Elasticsearch.
+A `FieldNamingStrategy` applies to all entities; it can be overwritten by setting a specific name with `@Field` on a property.
+
+[[elasticsearch.mapping.meta-model.annotations.non-field-backed-properties]]
+==== Non-field-backed properties
+
+Normally the properties used in an entity are fields of the entity class.
+There might be cases, when a property value is calculated in the entity and should be stored in Elasticsearch.
+In this case, the getter method (`getProperty()`) can be annotated with the `@Field` annotation, in addition to that the method must be annotated with `@AccessType(AccessType.Type
+.PROPERTY)`.
+The third annotation that is needed in such a case is `@WriteOnlyProperty`, as such a value is only written to Elasticsearch.
+A full example:
+
+====
+[source,java]
+----
+@Field(type = Keyword)
+@WriteOnlyProperty
+@AccessType(AccessType.Type.PROPERTY)
+public String getProperty() {
+ return "some value that is calculated here";
+}
+----
+====
+
+[[elasticsearch.mapping.meta-model.annotations.misc]]
+==== Other property annotations
+
+[[indexedindexname]]
+===== @IndexedIndexName
+
+This annotation can be set on a String property of an entity.
+This property will not be written to the mapping, it will not be stored in Elasticsearch and its value will not be read from an Elasticsearch document.
+After an entity is persisted, for example with a call to `ElasticsearchOperations.save(T entity)`, the entity returned from that call will contain the name of the index that an entity was saved to in that property.
+This is useful when the index name is dynamically set by a bean, or when writing to a write alias.
+
+Putting some value into such a property does not set the index into which an entity is stored!
+
+[[elasticsearch.mapping.meta-model.rules]]
+=== Mapping Rules
+
+[[elasticsearch.mapping.meta-model.rules.typehints]]
+==== Type Hints
+
+Mapping uses _type hints_ embedded in the document sent to the server to allow generic type mapping.
+Those type hints are represented as `_class` attributes within the document and are written for each aggregate root.
+
+.Type Hints
+====
+[source,java]
+----
+public class Person { <1>
+ @Id String id;
+ String firstname;
+ String lastname;
+}
+----
+
+[source,json]
+----
+{
+ "_class" : "com.example.Person", <1>
+ "id" : "cb7bef",
+ "firstname" : "Sarah",
+ "lastname" : "Connor"
+}
+----
+
+<1> By default the domain types class name is used for the type hint.
+====
+
+Type hints can be configured to hold custom information.
+Use the `@TypeAlias` annotation to do so.
+
+NOTE: Make sure to add types with `@TypeAlias` to the initial entity set (`AbstractElasticsearchConfiguration#getInitialEntitySet`) to already have entity information available when first reading data from the store.
+
+.Type Hints with Alias
+====
+[source,java]
+----
+@TypeAlias("human") <1>
+public class Person {
+
+ @Id String id;
+ // ...
+}
+----
+
+[source,json]
+----
+{
+ "_class" : "human", <1>
+ "id" : ...
+}
+----
+
+<1> The configured alias is used when writing the entity.
+====
+
+NOTE: Type hints will not be written for nested Objects unless the properties type is `Object`, an interface or the actual value type does not match the properties declaration.
+
+[[disabling-type-hints]]
+===== Disabling Type Hints
+
+It may be necessary to disable writing of type hints when the index that should be used already exists without having the type hints defined in its mapping and with the mapping mode set to strict.
+In this case, writing the type hint will produce an error, as the field cannot be added automatically.
+
+Type hints can be disabled for the whole application by overriding the method `writeTypeHints()` in a configuration class derived from `AbstractElasticsearchConfiguration` (see xref:elasticsearch/clients.adoc[Elasticsearch Clients]).
+
+As an alternative they can be disabled for a single index with the `@Document` annotation:
+
+====
+[source,java]
+----
+@Document(indexName = "index", writeTypeHint = WriteTypeHint.FALSE)
+----
+====
+
+WARNING: We strongly advise against disabling Type Hints.
+Only do this if you are forced to.
+Disabling type hints can lead to documents not being retrieved correctly from Elasticsearch in case of polymorphic data or document retrieval may fail completely.
+
+[[elasticsearch.mapping.meta-model.rules.geospatial]]
+==== Geospatial Types
+
+Geospatial types like `Point` & `GeoPoint` are converted into _lat/lon_ pairs.
+
+.Geospatial types
+====
+[source,java]
+----
+public class Address {
+ String city, street;
+ Point location;
+}
+----
+
+[source,json]
+----
+{
+ "city" : "Los Angeles",
+ "street" : "2800 East Observatory Road",
+ "location" : { "lat" : 34.118347, "lon" : -118.3026284 }
+}
+----
+====
+
+[[elasticsearch.mapping.meta-model.rules.geojson]]
+==== GeoJson Types
+
+Spring Data Elasticsearch supports the GeoJson types by providing an interface `GeoJson` and implementations for the different geometries.
+They are mapped to Elasticsearch documents according to the GeoJson specification.
+The corresponding properties of the entity are specified in the index mappings as `geo_shape` when the index mappings is written. (check the https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-shape.html[Elasticsearch documentation] as well)
+
+.GeoJson types
+====
+[source,java]
+----
+public class Address {
+
+ String city, street;
+ GeoJsonPoint location;
+}
+----
+
+[source,json]
+----
+{
+ "city": "Los Angeles",
+ "street": "2800 East Observatory Road",
+ "location": {
+ "type": "Point",
+ "coordinates": [-118.3026284, 34.118347]
+ }
+}
+----
+====
+
+The following GeoJson types are implemented:
+
+* `GeoJsonPoint`
+* `GeoJsonMultiPoint`
+* `GeoJsonLineString`
+* `GeoJsonMultiLineString`
+* `GeoJsonPolygon`
+* `GeoJsonMultiPolygon`
+* `GeoJsonGeometryCollection`
+
+[[elasticsearch.mapping.meta-model.rules.collections]]
+==== Collections
+
+For values inside Collections apply the same mapping rules as for aggregate roots when it comes to _type hints_ and xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.conversions[Custom Conversions].
+
+.Collections
+====
+[source,java]
+----
+public class Person {
+
+ // ...
+
+ List friends;
+
+}
+----
+
+[source,json]
+----
+{
+ // ...
+
+ "friends" : [ { "firstname" : "Kyle", "lastname" : "Reese" } ]
+}
+----
+====
+
+[[elasticsearch.mapping.meta-model.rules.maps]]
+==== Maps
+
+For values inside Maps apply the same mapping rules as for aggregate roots when it comes to _type hints_ and xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model.conversions[Custom Conversions].
+However the Map key needs to a String to be processed by Elasticsearch.
+
+.Collections
+====
+[source,java]
+----
+public class Person {
+
+ // ...
+
+ Map knownLocations;
+
+}
+----
+
+[source,json]
+----
+{
+ // ...
+
+ "knownLocations" : {
+ "arrivedAt" : {
+ "city" : "Los Angeles",
+ "street" : "2800 East Observatory Road",
+ "location" : { "lat" : 34.118347, "lon" : -118.3026284 }
+ }
+ }
+}
+----
+====
+
+[[elasticsearch.mapping.meta-model.conversions]]
+=== Custom Conversions
+
+Looking at the `Configuration` from the xref:elasticsearch/object-mapping.adoc#elasticsearch.mapping.meta-model[previous section] `ElasticsearchCustomConversions` allows registering specific rules for mapping domain and simple types.
+
+.Meta Model Object Mapping Configuration
+====
+[source,java]
+----
+@Configuration
+public class Config extends ElasticsearchConfiguration {
+
+ @Override
+ public ClientConfiguration clientConfiguration() {
+ return ClientConfiguration.builder() //
+ .connectedTo("localhost:9200") //
+ .build();
+ }
+
+ @Bean
+ @Override
+ public ElasticsearchCustomConversions elasticsearchCustomConversions() {
+ return new ElasticsearchCustomConversions(
+ Arrays.asList(new AddressToMap(), new MapToAddress())); <1>
+ }
+
+ @WritingConverter <2>
+ static class AddressToMap implements Converter> {
+
+ @Override
+ public Map convert(Address source) {
+
+ LinkedHashMap target = new LinkedHashMap<>();
+ target.put("ciudad", source.getCity());
+ // ...
+
+ return target;
+ }
+ }
+
+ @ReadingConverter <3>
+ static class MapToAddress implements Converter