From 7af76338fc407751ae1cf681dace15adafaac183 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 27 Nov 2023 11:01:20 +0100 Subject: [PATCH 001/204] Introduce property for Jenkins user and Artifactory server details. Closes #2781 --- Jenkinsfile | 26 ++++++++++++-------------- ci/clean.sh | 5 +++-- ci/pipeline.properties | 3 +++ ci/verify.sh | 5 +++-- 4 files changed, 21 insertions(+), 18 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 685270466..9779dd237 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -40,8 +40,8 @@ pipeline { steps { script { docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { - sh 'PROFILE=none ci/verify.sh' - sh "ci/clean.sh" + sh "PROFILE=none JENKINS_USER_NAME=${p['jenkins.user.name']} ci/verify.sh" + sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh" } } } @@ -69,8 +69,8 @@ pipeline { steps { script { docker.image(p['docker.java.next.image']).inside(p['docker.java.inside.docker']) { - sh 'PROFILE=none ci/verify.sh' - sh "ci/clean.sh" + sh "PROFILE=none JENKINS_USER_NAME=${p['jenkins.user.name']} ci/verify.sh" + sh "JENKINS_USER_NAME=${p['jenkins.user.name']} ci/clean.sh" } } } @@ -90,28 +90,26 @@ pipeline { label 'data' } options { timeout(time: 20, unit: 'MINUTES') } - environment { ARTIFACTORY = credentials("${p['artifactory.credentials']}") DEVELOCITY_CACHE = credentials("${p['develocity.cache.credentials']}") DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") } - steps { script { docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) { - sh 'MAVEN_OPTS="-Duser.name=spring-builds+jenkins -Duser.home=/tmp/jenkins-home" ' + - 'DEVELOCITY_CACHE_USERNAME=${DEVELOCITY_CACHE_USR} ' + - 'DEVELOCITY_CACHE_PASSWORD=${DEVELOCITY_CACHE_PSW} ' + - 'GRADLE_ENTERPRISE_ACCESS_KEY=${DEVELOCITY_ACCESS_KEY} ' + - './mvnw -s settings.xml -Pci,artifactory -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch-non-root ' + - '-Dartifactory.server=https://repo.spring.io ' + + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "DEVELOCITY_CACHE_USERNAME=${DEVELOCITY_CACHE_USR} " + + "DEVELOCITY_CACHE_PASSWORD=${DEVELOCITY_CACHE_PSW} " + + "GRADLE_ENTERPRISE_ACCESS_KEY=${DEVELOCITY_ACCESS_KEY} " + + "./mvnw -s settings.xml -Pci,artifactory -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch-non-root " + + "-Dartifactory.server=${p['artifactory.url']} " + "-Dartifactory.username=${ARTIFACTORY_USR} " + "-Dartifactory.password=${ARTIFACTORY_PSW} " + - "-Dartifactory.staging-repository=libs-snapshot-local " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + "-Dartifactory.build-name=spring-data-elasticsearch " + "-Dartifactory.build-number=${BUILD_NUMBER} " + - '-Dmaven.test.skip=true clean deploy -U -B' + "-Dmaven.test.skip=true clean deploy -U -B" } } } diff --git a/ci/clean.sh b/ci/clean.sh index 9f86e51eb..34ba4ffcc 100755 --- a/ci/clean.sh +++ b/ci/clean.sh @@ -4,9 +4,10 @@ set -euo pipefail export DEVELOCITY_CACHE_USERNAME=${DEVELOCITY_CACHE_USR} export DEVELOCITY_CACHE_PASSWORD=${DEVELOCITY_CACHE_PSW} +export JENKINS_USER=${JENKINS_USER_NAME} # The environment variable to configure access key is still GRADLE_ENTERPRISE_ACCESS_KEY export GRADLE_ENTERPRISE_ACCESS_KEY=${DEVELOCITY_ACCESS_KEY} -MAVEN_OPTS="-Duser.name=spring-builds+jenkins -Duser.home=/tmp/jenkins-home" \ - ./mvnw -s settings.xml clean -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch +MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \ + ./mvnw -s settings.xml clean -Dscan=false -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch diff --git a/ci/pipeline.properties b/ci/pipeline.properties index d65c9db58..dc8b6fb0a 100644 --- a/ci/pipeline.properties +++ b/ci/pipeline.properties @@ -26,5 +26,8 @@ docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock - docker.registry= docker.credentials=hub.docker.com-springbuildmaster artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local develocity.cache.credentials=gradle_enterprise_cache_user develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/verify.sh b/ci/verify.sh index 82734212b..98b9c79cc 100755 --- a/ci/verify.sh +++ b/ci/verify.sh @@ -7,10 +7,11 @@ chown -R 1001:1001 . export DEVELOCITY_CACHE_USERNAME=${DEVELOCITY_CACHE_USR} export DEVELOCITY_CACHE_PASSWORD=${DEVELOCITY_CACHE_PSW} +export JENKINS_USER=${JENKINS_USER_NAME} # The environment variable to configure access key is still GRADLE_ENTERPRISE_ACCESS_KEY export GRADLE_ENTERPRISE_ACCESS_KEY=${DEVELOCITY_ACCESS_KEY} -MAVEN_OPTS="-Duser.name=spring-builds+jenkins -Duser.home=/tmp/jenkins-home" \ +MAVEN_OPTS="-Duser.name=${JENKINS_USER} -Duser.home=/tmp/jenkins-home" \ ./mvnw -s settings.xml \ - -P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch \ No newline at end of file + -P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch From 05ca90ecc176fb82e2410b4b6c90ee0679e4b077 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Mon, 27 Nov 2023 22:02:17 +0100 Subject: [PATCH 002/204] Add strict date formats. Original Pull Requests #2782 Closes #2779 --- .../elasticsearch/annotations/DateFormat.java | 134 +++++++++++++++++- 1 file changed, 133 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java b/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java index 1119374dc..745b5bc09 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java @@ -39,41 +39,173 @@ public enum DateFormat { basic_t_time("'T'HHmmss.SSSXXX"), // basic_t_time_no_millis("'T'HHmmssXXX"), // basic_week_date("YYYY'W'wwe"), // week-based-year! + /** + * @since 5.3 + */ + strict_basic_week_date("YYYY'W'wwe"), // week-based-year! basic_week_date_time("YYYY'W'wwe'T'HHmmss.SSSX"), // here Elasticsearch uses a different zone format + /** + * @since 5.3 + */ + strict_basic_week_date_time("YYYY'W'wwe'T'HHmmss.SSSX"), // here Elasticsearch uses a different zone format basic_week_date_time_no_millis("YYYY'W'wwe'T'HHmmssX"), // + /** + * @since 5.3 + */ + strict_basic_week_date_time_no_millis("YYYY'W'wwe'T'HHmmssX"), // date("uuuu-MM-dd"), // + /** + * @since 5.3 + */ + strict_date("uuuu-MM-dd"), // date_hour("uuuu-MM-dd'T'HH"), // + /** + * @since 5.3 + */ + strict_date_hour("uuuu-MM-dd'T'HH"), // date_hour_minute("uuuu-MM-dd'T'HH:mm"), // + /** + * @since 5.3 + */ + strict_date_hour_minute("uuuu-MM-dd'T'HH:mm"), // date_hour_minute_second("uuuu-MM-dd'T'HH:mm:ss"), // + /** + * @since 5.3 + */ + strict_date_hour_minute_second("uuuu-MM-dd'T'HH:mm:ss"), // date_hour_minute_second_fraction("uuuu-MM-dd'T'HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_date_hour_minute_second_fraction("uuuu-MM-dd'T'HH:mm:ss.SSS"), // date_hour_minute_second_millis("uuuu-MM-dd'T'HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_date_hour_minute_second_millis("uuuu-MM-dd'T'HH:mm:ss.SSS"), // date_optional_time("uuuu-MM-dd['T'HH:mm:ss.SSSXXX]"), // + /** + * @since 5.3 + */ + strict_date_optional_time("uuuu-MM-dd['T'HH:mm:ss.SSSXXX]"), // strict_date_optional_time_nanos("uuuu-MM-dd['T'HH:mm:ss.SSSSSSXXX]"), // date_time("uuuu-MM-dd'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_date_time("uuuu-MM-dd'T'HH:mm:ss.SSSXXX"), // date_time_no_millis("uuuu-MM-dd'T'HH:mm:ssVV"), // here Elasticsearch uses the zone-id in its implementation + /** + * @since 5.3 + */ + strict_date_time_no_millis("uuuu-MM-dd'T'HH:mm:ssVV"), // here Elasticsearch uses the zone-id in its implementation epoch_millis("epoch_millis"), // epoch_second("epoch_second"), // hour("HH"), // + /** + * @since 5.3 + */ + strict_hour("HH"), // hour_minute("HH:mm"), // + /** + * @since 5.3 + */ + strict_hour_minute("HH:mm"), // hour_minute_second("HH:mm:ss"), // + /** + * @since 5.3 + */ + strict_hour_minute_second("HH:mm:ss"), // hour_minute_second_fraction("HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_hour_minute_second_fraction("HH:mm:ss.SSS"), // hour_minute_second_millis("HH:mm:ss.SSS"), // + /** + * @since 5.3 + */ + strict_hour_minute_second_millis("HH:mm:ss.SSS"), // ordinal_date("uuuu-DDD"), // + /** + * @since 5.3 + */ + strict_ordinal_date("uuuu-DDD"), // ordinal_date_time("uuuu-DDD'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_ordinal_date_time("uuuu-DDD'T'HH:mm:ss.SSSXXX"), // ordinal_date_time_no_millis("uuuu-DDD'T'HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_ordinal_date_time_no_millis("uuuu-DDD'T'HH:mm:ssXXX"), // time("HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_time("HH:mm:ss.SSSXXX"), // time_no_millis("HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_time_no_millis("HH:mm:ssXXX"), // t_time("'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_t_time("'T'HH:mm:ss.SSSXXX"), // t_time_no_millis("'T'HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_t_time_no_millis("'T'HH:mm:ssXXX"), // week_date("YYYY-'W'ww-e"), // + /** + * @since 5.3 + */ + strict_week_date("YYYY-'W'ww-e"), // week_date_time("YYYY-'W'ww-e'T'HH:mm:ss.SSSXXX"), // + /** + * @since 5.3 + */ + strict_week_date_time("YYYY-'W'ww-e'T'HH:mm:ss.SSSXXX"), // week_date_time_no_millis("YYYY-'W'ww-e'T'HH:mm:ssXXX"), // + /** + * @since 5.3 + */ + strict_week_date_time_no_millis("YYYY-'W'ww-e'T'HH:mm:ssXXX"), // weekyear(""), // no TemporalAccessor available for these 3 + /** + * @since 5.3 + */ + strict_weekyear(""), // no TemporalAccessor available for these 3 weekyear_week(""), // + /** + * @since 5.3 + */ + strict_weekyear_week(""), // weekyear_week_day(""), // + /** + * @since 5.3 + */ + strict_strict_weekyear_week_day(""), // year("uuuu"), // + /** + * @since 5.3 + */ + strict_year("uuuu"), // year_month("uuuu-MM"), // - year_month_day("uuuu-MM-dd"); // + /** + * @since 5.3 + */ + strict_year_month("uuuu-MM"), // + year_month_day("uuuu-MM-dd"), // + /** + * @since 5.3 + */ + strict_year_month_day("uuuu-MM-dd"); // private final String pattern; From 3833975a1ad76ef9c90b1260b090ff1eb9af523c Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Thu, 30 Nov 2023 20:29:34 +0100 Subject: [PATCH 003/204] Fix type of returned sort values. Original Pull Request #2786 Closes #2777 --- .../client/elc/DocumentAdapters.java | 3 +- .../sort/NestedSortIntegrationTests.java | 54 +++++++++---------- 2 files changed, 29 insertions(+), 28 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java index 78ae3b0d5..4a2d5cf33 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java @@ -138,7 +138,7 @@ public static SearchDocument from(Hit hit, JsonpMapper jsonpMapper) { document.setPrimaryTerm(hit.primaryTerm() != null && hit.primaryTerm() > 0 ? hit.primaryTerm() : 0); float score = hit.score() != null ? hit.score().floatValue() : Float.NaN; - return new SearchDocumentAdapter(document, score, hit.sort().stream().map(TypeUtils::toString).toArray(), + return new SearchDocumentAdapter(document, score, hit.sort().stream().map(TypeUtils::toObject).toArray(), documentFields, highlightFields, innerHits, nestedMetaData, explanation, matchedQueries, hit.routing()); } @@ -237,3 +237,4 @@ public static List> from(MgetResponse mgetRe .collect(Collectors.toList()); } } +🚝 diff --git a/src/test/java/org/springframework/data/elasticsearch/core/query/sort/NestedSortIntegrationTests.java b/src/test/java/org/springframework/data/elasticsearch/core/query/sort/NestedSortIntegrationTests.java index dbb90fc59..449545f4e 100644 --- a/src/test/java/org/springframework/data/elasticsearch/core/query/sort/NestedSortIntegrationTests.java +++ b/src/test/java/org/springframework/data/elasticsearch/core/query/sort/NestedSortIntegrationTests.java @@ -103,27 +103,27 @@ void shouldSortDirectorsByYearOfBirthOfActorInTheirMoviesAscending() { assertThat(searchHits.getSearchHit(0).getContent().id).isEqualTo(francisFordCoppola.id); var sortValues = searchHits.getSearchHit(0).getSortValues(); assertThat(sortValues).hasSize(1); - assertThat(sortValues.get(0)).isEqualTo("1924"); + assertThat(sortValues.get(0)).isEqualTo(1924L); assertThat(searchHits.getSearchHit(1).getContent().id).isEqualTo(stanleyKubrik.id); sortValues = searchHits.getSearchHit(1).getSortValues(); assertThat(sortValues).hasSize(1); - assertThat(sortValues.get(0)).isEqualTo("1937"); + assertThat(sortValues.get(0)).isEqualTo(1937L); } @Test // #1784 @DisplayName("should sort directors by year of birth of actor in their movies descending") void shouldSortDirectorsByYearOfBirthOfActorInTheirMoviesDescending() { -var order = new org.springframework.data.elasticsearch.core.query.Order(Sort.Direction.DESC, - "movies.actors.yearOfBirth") // - .withNested( // - Nested.builder("movies") // - .withNested(Nested.builder("movies.actors") // - .build()) // - .build()); + var order = new org.springframework.data.elasticsearch.core.query.Order(Sort.Direction.DESC, + "movies.actors.yearOfBirth") // + .withNested( // + Nested.builder("movies") // + .withNested(Nested.builder("movies.actors") // + .build()) // + .build()); -var query = Query.findAll().addSort(Sort.by(order)); + var query = Query.findAll().addSort(Sort.by(order)); var searchHits = operations.search(query, Director.class); @@ -132,32 +132,32 @@ void shouldSortDirectorsByYearOfBirthOfActorInTheirMoviesDescending() { assertThat(searchHits.getSearchHit(0).getContent().id).isEqualTo(stanleyKubrik.id); var sortValues = searchHits.getSearchHit(0).getSortValues(); assertThat(sortValues).hasSize(1); - assertThat(sortValues.get(0)).isEqualTo("1959"); + assertThat(sortValues.get(0)).isEqualTo(1959L); assertThat(searchHits.getSearchHit(1).getContent().id).isEqualTo(francisFordCoppola.id); sortValues = searchHits.getSearchHit(1).getSortValues(); assertThat(sortValues).hasSize(1); - assertThat(sortValues.get(0)).isEqualTo("1946"); + assertThat(sortValues.get(0)).isEqualTo(1946L); } @Test // #1784 @DisplayName("should sort directors by year of birth of male actor in their movies descending") void shouldSortDirectorsByYearOfBirthOfMaleActorInTheirMoviesDescending() { -var filter = StringQuery.builder(""" - { "term": {"movies.actors.sex": "m"} } - """).build(); -var order = new org.springframework.data.elasticsearch.core.query.Order(Sort.Direction.DESC, - "movies.actors.yearOfBirth") // - .withNested( // - Nested.builder("movies") // - .withNested( // - Nested.builder("movies.actors") // - .withFilter(filter) // - .build()) // - .build()); + var filter = StringQuery.builder(""" + { "term": {"movies.actors.sex": "m"} } + """).build(); + var order = new org.springframework.data.elasticsearch.core.query.Order(Sort.Direction.DESC, + "movies.actors.yearOfBirth") // + .withNested( // + Nested.builder("movies") // + .withNested( // + Nested.builder("movies.actors") // + .withFilter(filter) // + .build()) // + .build()); -var query = Query.findAll().addSort(Sort.by(order)); + var query = Query.findAll().addSort(Sort.by(order)); var searchHits = operations.search(query, Director.class); @@ -166,12 +166,12 @@ void shouldSortDirectorsByYearOfBirthOfMaleActorInTheirMoviesDescending() { assertThat(searchHits.getSearchHit(0).getContent().id).isEqualTo(stanleyKubrik.id); var sortValues = searchHits.getSearchHit(0).getSortValues(); assertThat(sortValues).hasSize(1); - assertThat(sortValues.get(0)).isEqualTo("1959"); + assertThat(sortValues.get(0)).isEqualTo(1959L); assertThat(searchHits.getSearchHit(1).getContent().id).isEqualTo(francisFordCoppola.id); sortValues = searchHits.getSearchHit(1).getSortValues(); assertThat(sortValues).hasSize(1); - assertThat(sortValues.get(0)).isEqualTo("1940"); + assertThat(sortValues.get(0)).isEqualTo(1940L); } @Document(indexName = "#{@indexNameProvider.indexName()}") From 415d5e0385fa00709fcc599b0087a77c80a79270 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Thu, 30 Nov 2023 20:34:57 +0100 Subject: [PATCH 004/204] Removed junk characters from code. --- .../data/elasticsearch/client/elc/DocumentAdapters.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java index 4a2d5cf33..fbd3b622a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java @@ -237,4 +237,3 @@ public static List> from(MgetResponse mgetRe .collect(Collectors.toList()); } } -🚝 From 8613eb26e0d8b4d6889f3d70354d326f10649835 Mon Sep 17 00:00:00 2001 From: Junghoon Ban Date: Sun, 3 Dec 2023 21:22:06 +0900 Subject: [PATCH 005/204] Use pattern matching instead of type casting. Original Pull Request #2784 Closes #2785 --- .../client/elc/CriteriaFilterProcessor.java | 11 +-- .../elc/ElasticsearchExceptionTranslator.java | 3 +- .../elc/ReactiveElasticsearchTemplate.java | 3 +- .../MappingElasticsearchConverter.java | 13 ++-- .../parser/ElasticsearchQueryCreator.java | 78 ++++++++++--------- 5 files changed, 59 insertions(+), 49 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java index 9b52c375e..ed388c471 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/CriteriaFilterProcessor.java @@ -50,6 +50,7 @@ * filter. * * @author Peter-Josef Meisch + * @author Junghoon Ban * @since 4.4 */ class CriteriaFilterProcessor { @@ -169,7 +170,7 @@ private static ObjectBuilder withinQuery(String fieldName, Obj Assert.isTrue(values[1] instanceof String || values[1] instanceof Distance, "Second element of a geo distance filter must be a text or a Distance"); - String dist = (values[1] instanceof Distance) ? extractDistanceString((Distance) values[1]) : (String) values[1]; + String dist = (values[1] instanceof Distance distance) ? extractDistanceString(distance) : (String) values[1]; return QueryBuilders.geoDistance() // .field(fieldName) // @@ -178,8 +179,8 @@ private static ObjectBuilder withinQuery(String fieldName, Obj .location(location -> { if (values[0]instanceof GeoPoint loc) { location.latlon(latlon -> latlon.lat(loc.getLat()).lon(loc.getLon())); - } else if (values[0] instanceof Point) { - GeoPoint loc = GeoPoint.fromPoint((Point) values[0]); + } else if (values[0] instanceof Point point) { + GeoPoint loc = GeoPoint.fromPoint(point); location.latlon(latlon -> latlon.lat(loc.getLat()).lon(loc.getLon())); } else { String loc = (String) values[0]; @@ -220,8 +221,8 @@ private static void oneParameterBBox(GeoBoundingBoxQuery.Builder queryBuilder, O "single-element of boundedBy filter must be type of GeoBox or Box"); GeoBox geoBBox; - if (value instanceof Box) { - geoBBox = GeoBox.fromBox((Box) value); + if (value instanceof Box box) { + geoBBox = GeoBox.fromBox(box); } else { geoBBox = (GeoBox) value; } diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java index 2851190d9..b773506d7 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ElasticsearchExceptionTranslator.java @@ -40,6 +40,7 @@ * appropriate: any other exception may have resulted from user code, and should not be translated. * * @author Peter-Josef Meisch + * @author Junghoon Ban * @since 4.4 */ public class ElasticsearchExceptionTranslator implements PersistenceExceptionTranslator { @@ -59,7 +60,7 @@ public ElasticsearchExceptionTranslator(JsonpMapper jsonpMapper) { */ public RuntimeException translateException(Throwable throwable) { - RuntimeException runtimeException = throwable instanceof RuntimeException ? (RuntimeException) throwable + RuntimeException runtimeException = throwable instanceof RuntimeException ex ? ex : new RuntimeException(throwable.getMessage(), throwable); RuntimeException potentiallyTranslatedException = translateExceptionIfPossible(runtimeException); diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java index 06b177b51..1d60ab94a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java @@ -79,6 +79,7 @@ * * @author Peter-Josef Meisch * @author Illia Ulianov + * @author Junghoon Ban * @since 4.4 */ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearchTemplate { @@ -645,7 +646,7 @@ public Publisher execute(ReactiveElasticsearchTemplate.ClientCallback getCollectionComponentType(TypeInformation type) { private Object propertyConverterRead(ElasticsearchPersistentProperty property, Object source) { PropertyValueConverter propertyValueConverter = Objects.requireNonNull(property.getPropertyValueConverter()); - if (source instanceof String[]) { + if (source instanceof String[] strings) { // convert to a List - source = Arrays.asList((String[]) source); + source = Arrays.asList(strings); } - if (source instanceof List) { - source = ((List) source).stream().map(it -> convertOnRead(propertyValueConverter, it)) + if (source instanceof List list) { + source = list.stream().map(it -> convertOnRead(propertyValueConverter, it)) .collect(Collectors.toList()); - } else if (source instanceof Set) { - source = ((Set) source).stream().map(it -> convertOnRead(propertyValueConverter, it)) + } else if (source instanceof Set set) { + source = set.stream().map(it -> convertOnRead(propertyValueConverter, it)) .collect(Collectors.toSet()); } else { source = convertOnRead(propertyValueConverter, source); diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java index c8079922b..600e02a8a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java @@ -17,7 +17,6 @@ import java.util.Collection; import java.util.Iterator; - import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.domain.Sort; import org.springframework.data.elasticsearch.core.geo.GeoBox; @@ -44,6 +43,7 @@ * @author Franck Marchand * @author Artur Konczak * @author Peter-Josef Meisch + * @author Junghoon Ban */ public class ElasticsearchQueryCreator extends AbstractQueryCreator { @@ -62,8 +62,8 @@ public ElasticsearchQueryCreator(PartTree tree, MappingContext iterator) { - PersistentPropertyPath path = context - .getPersistentPropertyPath(part.getProperty()); + PersistentPropertyPath path = context.getPersistentPropertyPath( + part.getProperty()); return new CriteriaQuery(from(part, new Criteria(path.toDotPath(ElasticsearchPersistentProperty.QueryPropertyToFieldNameConverter.INSTANCE)), iterator)); @@ -74,8 +74,8 @@ protected CriteriaQuery and(Part part, CriteriaQuery base, Iterator iter if (base == null) { return create(part, iterator); } - PersistentPropertyPath path = context - .getPersistentPropertyPath(part.getProperty()); + PersistentPropertyPath path = context.getPersistentPropertyPath( + part.getProperty()); return base.addCriteria(from(part, new Criteria(path.toDotPath(ElasticsearchPersistentProperty.QueryPropertyToFieldNameConverter.INSTANCE)), iterator)); @@ -109,8 +109,7 @@ private Criteria from(Part part, Criteria criteria, Iterator parameters) { return criteria.is(parameters.next()).not(); case REGEX: return criteria.expression(parameters.next().toString()); - case LIKE: - case STARTING_WITH: + case LIKE, STARTING_WITH: return criteria.startsWith(parameters.next().toString()); case ENDING_WITH: return criteria.endsWith(parameters.next().toString()); @@ -118,13 +117,11 @@ private Criteria from(Part part, Criteria criteria, Iterator parameters) { return criteria.contains(parameters.next().toString()); case GREATER_THAN: return criteria.greaterThan(parameters.next()); - case AFTER: - case GREATER_THAN_EQUAL: + case AFTER, GREATER_THAN_EQUAL: return criteria.greaterThanEqual(parameters.next()); case LESS_THAN: return criteria.lessThan(parameters.next()); - case BEFORE: - case LESS_THAN_EQUAL: + case BEFORE, LESS_THAN_EQUAL: return criteria.lessThanEqual(parameters.next()); case BETWEEN: return criteria.between(parameters.next(), parameters.next()); @@ -132,8 +129,7 @@ private Criteria from(Part part, Criteria criteria, Iterator parameters) { return criteria.in(asArray(parameters.next())); case NOT_IN: return criteria.notIn(asArray(parameters.next())); - case SIMPLE_PROPERTY: - case WITHIN: { + case SIMPLE_PROPERTY, WITHIN: { Object firstParameter = parameters.next(); Object secondParameter = null; if (type == Part.Type.SIMPLE_PROPERTY) { @@ -154,40 +150,24 @@ private Criteria from(Part part, Criteria criteria, Iterator parameters) { secondParameter = parameters.next(); } - if (firstParameter instanceof GeoPoint && secondParameter instanceof String) - return criteria.within((GeoPoint) firstParameter, (String) secondParameter); - - if (firstParameter instanceof Point && secondParameter instanceof Distance) - return criteria.within((Point) firstParameter, (Distance) secondParameter); - - if (firstParameter instanceof String && secondParameter instanceof String) - return criteria.within((String) firstParameter, (String) secondParameter); + return doWithinIfPossible(criteria, firstParameter, secondParameter); } case NEAR: { Object firstParameter = parameters.next(); - if (firstParameter instanceof GeoBox) { - return criteria.boundedBy((GeoBox) firstParameter); + if (firstParameter instanceof GeoBox geoBox) { + return criteria.boundedBy(geoBox); } - if (firstParameter instanceof Box) { - return criteria.boundedBy(GeoBox.fromBox((Box) firstParameter)); + if (firstParameter instanceof Box box) { + return criteria.boundedBy(GeoBox.fromBox(box)); } Object secondParameter = parameters.next(); - // "near" query can be the same query as the "within" query - if (firstParameter instanceof GeoPoint && secondParameter instanceof String) - return criteria.within((GeoPoint) firstParameter, (String) secondParameter); - - if (firstParameter instanceof Point && secondParameter instanceof Distance) - return criteria.within((Point) firstParameter, (Distance) secondParameter); - - if (firstParameter instanceof String && secondParameter instanceof String) - return criteria.within((String) firstParameter, (String) secondParameter); + return doWithinIfPossible(criteria, firstParameter, secondParameter); } - case EXISTS: - case IS_NOT_NULL: + case EXISTS, IS_NOT_NULL: return criteria.exists(); case IS_NULL: return criteria.not().exists(); @@ -200,6 +180,32 @@ private Criteria from(Part part, Criteria criteria, Iterator parameters) { } } + /** + * Do a within query if possible, otherwise return the criteria unchanged. + * + * @param criteria must not be {@literal null} + * @param firstParameter must not be {@literal null} + * @param secondParameter must not be {@literal null} + * @return the criteria with the within query applied if possible. + * @author Junghoon Ban + */ + private Criteria doWithinIfPossible(Criteria criteria, Object firstParameter, Object secondParameter) { + + if (firstParameter instanceof GeoPoint geoPoint && secondParameter instanceof String string) { + return criteria.within(geoPoint, string); + } + + if (firstParameter instanceof Point point && secondParameter instanceof Distance distance) { + return criteria.within(point, distance); + } + + if (firstParameter instanceof String firstString && secondParameter instanceof String secondString) { + return criteria.within(firstString, secondString); + } + + return criteria; + } + private Object[] asArray(Object o) { if (o instanceof Collection) { return ((Collection) o).toArray(); From 4edf9bee41331ded2cf5ee1d61abcb49c827b9be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E6=BD=87?= <34333488+knightdreams6@users.noreply.github.com> Date: Fri, 8 Dec 2023 17:24:18 +0800 Subject: [PATCH 006/204] Update elasticsearch-repositories.adoc. Original Pull Request #2789 Closes #2791 --- .../repositories/elasticsearch-repositories.adoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc index 8a1f7fe66..1d08868c1 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/repositories/elasticsearch-repositories.adoc @@ -12,10 +12,10 @@ class Book { @Id private String id; - @Field(type = FieldType.text) + @Field(type = FieldType.Text) private String name; - @Field(type = FieldType.text) + @Field(type = FieldType.Text) private String summary; @Field(type = FieldType.Integer) From 72e8f41de5c079ff05474776c7fc342ad324ecfe Mon Sep 17 00:00:00 2001 From: Patrick Baumgartner Date: Fri, 8 Dec 2023 10:27:23 +0100 Subject: [PATCH 007/204] Fixes Typo ElasticsearchHttpClientConfigurationCallback. Original Pull Request #2790 Closes #2792 --- src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc index d7cfa9826..0c2b4a66f 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc @@ -150,7 +150,7 @@ ClientConfiguration clientConfiguration = ClientConfiguration.builder() return headers; }) .withClientConfigurer( <.> - ElasticsearchClientConfigurationCallback.from(clientBuilder -> { + ElasticsearchHttpClientConfigurationCallback.from(clientBuilder -> { // ... return clientBuilder; })) From 1d6a1b0f2fd9f272121cafdac1439a767fdd847d Mon Sep 17 00:00:00 2001 From: Junghoon Ban Date: Wed, 13 Dec 2023 02:45:03 +0900 Subject: [PATCH 008/204] Use switch expressions to simplify case branches. Original Pull Request #2795 Closes #2794 --- .../parser/ElasticsearchQueryCreator.java | 128 ++++++++---------- 1 file changed, 58 insertions(+), 70 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java index 600e02a8a..d132dd117 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java @@ -100,84 +100,72 @@ private Criteria from(Part part, Criteria criteria, Iterator parameters) { Part.Type type = part.getType(); - switch (type) { - case TRUE: - return criteria.is(true); - case FALSE: - return criteria.is(false); - case NEGATING_SIMPLE_PROPERTY: - return criteria.is(parameters.next()).not(); - case REGEX: - return criteria.expression(parameters.next().toString()); - case LIKE, STARTING_WITH: - return criteria.startsWith(parameters.next().toString()); - case ENDING_WITH: - return criteria.endsWith(parameters.next().toString()); - case CONTAINING: - return criteria.contains(parameters.next().toString()); - case GREATER_THAN: - return criteria.greaterThan(parameters.next()); - case AFTER, GREATER_THAN_EQUAL: - return criteria.greaterThanEqual(parameters.next()); - case LESS_THAN: - return criteria.lessThan(parameters.next()); - case BEFORE, LESS_THAN_EQUAL: - return criteria.lessThanEqual(parameters.next()); - case BETWEEN: - return criteria.between(parameters.next(), parameters.next()); - case IN: - return criteria.in(asArray(parameters.next())); - case NOT_IN: - return criteria.notIn(asArray(parameters.next())); - case SIMPLE_PROPERTY, WITHIN: { - Object firstParameter = parameters.next(); - Object secondParameter = null; - if (type == Part.Type.SIMPLE_PROPERTY) { - if (part.getProperty().getType() != GeoPoint.class) { - if (firstParameter != null) { - return criteria.is(firstParameter); - } else { - // searching for null is a must_not (exists) - return criteria.exists().not(); - } - } else { - // it means it's a simple find with exact geopoint matching (e.g. findByLocation) - // and because Elasticsearch does not have any kind of query with just a geopoint - // as argument we use a "geo distance" query with a distance of one meter. - secondParameter = ".001km"; - } + return switch (type) { + case TRUE -> criteria.is(true); + case FALSE -> criteria.is(false); + case NEGATING_SIMPLE_PROPERTY -> criteria.is(parameters.next()).not(); + case REGEX -> criteria.expression(parameters.next().toString()); + case LIKE, STARTING_WITH -> criteria.startsWith(parameters.next().toString()); + case ENDING_WITH -> criteria.endsWith(parameters.next().toString()); + case CONTAINING -> criteria.contains(parameters.next().toString()); + case GREATER_THAN -> criteria.greaterThan(parameters.next()); + case AFTER, GREATER_THAN_EQUAL -> criteria.greaterThanEqual(parameters.next()); + case LESS_THAN -> criteria.lessThan(parameters.next()); + case BEFORE, LESS_THAN_EQUAL -> criteria.lessThanEqual(parameters.next()); + case BETWEEN -> criteria.between(parameters.next(), parameters.next()); + case IN -> criteria.in(asArray(parameters.next())); + case NOT_IN -> criteria.notIn(asArray(parameters.next())); + case SIMPLE_PROPERTY, WITHIN -> this.within(part, criteria, parameters); + case NEAR -> this.near(criteria, parameters); + case EXISTS, IS_NOT_NULL -> criteria.exists(); + case IS_NULL -> criteria.not().exists(); + case IS_EMPTY -> criteria.empty(); + case IS_NOT_EMPTY -> criteria.notEmpty(); + default -> throw new InvalidDataAccessApiUsageException("Illegal criteria found '" + type + "'."); + }; + } + + private Criteria within(Part part, Criteria criteria, Iterator parameters) { + + Object firstParameter = parameters.next(); + Object secondParameter; + + if (part.getType() == Part.Type.SIMPLE_PROPERTY) { + if (part.getProperty().getType() != GeoPoint.class) { + if (firstParameter != null) { + return criteria.is(firstParameter); } else { - secondParameter = parameters.next(); + // searching for null is a must_not (exists) + return criteria.exists().not(); } - - return doWithinIfPossible(criteria, firstParameter, secondParameter); + } else { + // it means it's a simple find with exact geopoint matching (e.g. findByLocation) + // and because Elasticsearch does not have any kind of query with just a geopoint + // as argument we use a "geo distance" query with a distance of one meter. + secondParameter = ".001km"; } - case NEAR: { - Object firstParameter = parameters.next(); + } else { + secondParameter = parameters.next(); + } - if (firstParameter instanceof GeoBox geoBox) { - return criteria.boundedBy(geoBox); - } + return doWithinIfPossible(criteria, firstParameter, secondParameter); + } - if (firstParameter instanceof Box box) { - return criteria.boundedBy(GeoBox.fromBox(box)); - } + private Criteria near(Criteria criteria, Iterator parameters) { - Object secondParameter = parameters.next(); + Object firstParameter = parameters.next(); - return doWithinIfPossible(criteria, firstParameter, secondParameter); - } - case EXISTS, IS_NOT_NULL: - return criteria.exists(); - case IS_NULL: - return criteria.not().exists(); - case IS_EMPTY: - return criteria.empty(); - case IS_NOT_EMPTY: - return criteria.notEmpty(); - default: - throw new InvalidDataAccessApiUsageException("Illegal criteria found '" + type + "'."); + if (firstParameter instanceof GeoBox geoBox) { + return criteria.boundedBy(geoBox); } + + if (firstParameter instanceof Box box) { + return criteria.boundedBy(GeoBox.fromBox(box)); + } + + Object secondParameter = parameters.next(); + + return doWithinIfPossible(criteria, firstParameter, secondParameter); } /** From fb9ccf7b449836b40d64c8576507111bd87b3803 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Tue, 12 Dec 2023 18:58:34 +0100 Subject: [PATCH 009/204] Polishing. --- .../parser/ElasticsearchQueryCreator.java | 315 +++++++++--------- 1 file changed, 158 insertions(+), 157 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java index d132dd117..d83020873 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/parser/ElasticsearchQueryCreator.java @@ -15,8 +15,6 @@ */ package org.springframework.data.elasticsearch.repository.query.parser; -import java.util.Collection; -import java.util.Iterator; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.domain.Sort; import org.springframework.data.elasticsearch.core.geo.GeoBox; @@ -35,6 +33,9 @@ import org.springframework.data.repository.query.parser.PartTree; import org.springframework.lang.Nullable; +import java.util.Collection; +import java.util.Iterator; + /** * ElasticsearchQueryCreator * @@ -47,159 +48,159 @@ */ public class ElasticsearchQueryCreator extends AbstractQueryCreator { - private final MappingContext context; - - public ElasticsearchQueryCreator(PartTree tree, ParameterAccessor parameters, - MappingContext context) { - super(tree, parameters); - this.context = context; - } - - public ElasticsearchQueryCreator(PartTree tree, MappingContext context) { - super(tree); - this.context = context; - } - - @Override - protected CriteriaQuery create(Part part, Iterator iterator) { - PersistentPropertyPath path = context.getPersistentPropertyPath( - part.getProperty()); - return new CriteriaQuery(from(part, - new Criteria(path.toDotPath(ElasticsearchPersistentProperty.QueryPropertyToFieldNameConverter.INSTANCE)), - iterator)); - } - - @Override - protected CriteriaQuery and(Part part, CriteriaQuery base, Iterator iterator) { - if (base == null) { - return create(part, iterator); - } - PersistentPropertyPath path = context.getPersistentPropertyPath( - part.getProperty()); - return base.addCriteria(from(part, - new Criteria(path.toDotPath(ElasticsearchPersistentProperty.QueryPropertyToFieldNameConverter.INSTANCE)), - iterator)); - } - - @Override - protected CriteriaQuery or(CriteriaQuery base, CriteriaQuery query) { - return new CriteriaQuery(base.getCriteria().or(query.getCriteria())); - } - - @Override - protected CriteriaQuery complete(@Nullable CriteriaQuery query, Sort sort) { - - if (query == null) { - // this is the case in a findAllByOrderByField method, add empty criteria - query = new CriteriaQuery(new Criteria()); - } - return query.addSort(sort); - } - - private Criteria from(Part part, Criteria criteria, Iterator parameters) { - - Part.Type type = part.getType(); - - return switch (type) { - case TRUE -> criteria.is(true); - case FALSE -> criteria.is(false); - case NEGATING_SIMPLE_PROPERTY -> criteria.is(parameters.next()).not(); - case REGEX -> criteria.expression(parameters.next().toString()); - case LIKE, STARTING_WITH -> criteria.startsWith(parameters.next().toString()); - case ENDING_WITH -> criteria.endsWith(parameters.next().toString()); - case CONTAINING -> criteria.contains(parameters.next().toString()); - case GREATER_THAN -> criteria.greaterThan(parameters.next()); - case AFTER, GREATER_THAN_EQUAL -> criteria.greaterThanEqual(parameters.next()); - case LESS_THAN -> criteria.lessThan(parameters.next()); - case BEFORE, LESS_THAN_EQUAL -> criteria.lessThanEqual(parameters.next()); - case BETWEEN -> criteria.between(parameters.next(), parameters.next()); - case IN -> criteria.in(asArray(parameters.next())); - case NOT_IN -> criteria.notIn(asArray(parameters.next())); - case SIMPLE_PROPERTY, WITHIN -> this.within(part, criteria, parameters); - case NEAR -> this.near(criteria, parameters); - case EXISTS, IS_NOT_NULL -> criteria.exists(); - case IS_NULL -> criteria.not().exists(); - case IS_EMPTY -> criteria.empty(); - case IS_NOT_EMPTY -> criteria.notEmpty(); - default -> throw new InvalidDataAccessApiUsageException("Illegal criteria found '" + type + "'."); - }; - } - - private Criteria within(Part part, Criteria criteria, Iterator parameters) { - - Object firstParameter = parameters.next(); - Object secondParameter; - - if (part.getType() == Part.Type.SIMPLE_PROPERTY) { - if (part.getProperty().getType() != GeoPoint.class) { - if (firstParameter != null) { - return criteria.is(firstParameter); - } else { - // searching for null is a must_not (exists) - return criteria.exists().not(); - } - } else { - // it means it's a simple find with exact geopoint matching (e.g. findByLocation) - // and because Elasticsearch does not have any kind of query with just a geopoint - // as argument we use a "geo distance" query with a distance of one meter. - secondParameter = ".001km"; - } - } else { - secondParameter = parameters.next(); - } - - return doWithinIfPossible(criteria, firstParameter, secondParameter); - } - - private Criteria near(Criteria criteria, Iterator parameters) { - - Object firstParameter = parameters.next(); - - if (firstParameter instanceof GeoBox geoBox) { - return criteria.boundedBy(geoBox); - } - - if (firstParameter instanceof Box box) { - return criteria.boundedBy(GeoBox.fromBox(box)); - } - - Object secondParameter = parameters.next(); - - return doWithinIfPossible(criteria, firstParameter, secondParameter); - } - - /** - * Do a within query if possible, otherwise return the criteria unchanged. - * - * @param criteria must not be {@literal null} - * @param firstParameter must not be {@literal null} - * @param secondParameter must not be {@literal null} - * @return the criteria with the within query applied if possible. - * @author Junghoon Ban - */ - private Criteria doWithinIfPossible(Criteria criteria, Object firstParameter, Object secondParameter) { - - if (firstParameter instanceof GeoPoint geoPoint && secondParameter instanceof String string) { - return criteria.within(geoPoint, string); - } - - if (firstParameter instanceof Point point && secondParameter instanceof Distance distance) { - return criteria.within(point, distance); - } - - if (firstParameter instanceof String firstString && secondParameter instanceof String secondString) { - return criteria.within(firstString, secondString); - } - - return criteria; - } - - private Object[] asArray(Object o) { - if (o instanceof Collection) { - return ((Collection) o).toArray(); - } else if (o.getClass().isArray()) { - return (Object[]) o; - } - return new Object[] { o }; - } + private final MappingContext context; + + public ElasticsearchQueryCreator(PartTree tree, ParameterAccessor parameters, + MappingContext context) { + super(tree, parameters); + this.context = context; + } + + public ElasticsearchQueryCreator(PartTree tree, MappingContext context) { + super(tree); + this.context = context; + } + + @Override + protected CriteriaQuery create(Part part, Iterator iterator) { + PersistentPropertyPath path = context.getPersistentPropertyPath( + part.getProperty()); + return new CriteriaQuery(from(part, + new Criteria(path.toDotPath(ElasticsearchPersistentProperty.QueryPropertyToFieldNameConverter.INSTANCE)), + iterator)); + } + + @Override + protected CriteriaQuery and(Part part, CriteriaQuery base, Iterator iterator) { + if (base == null) { + return create(part, iterator); + } + PersistentPropertyPath path = context.getPersistentPropertyPath( + part.getProperty()); + return base.addCriteria(from(part, + new Criteria(path.toDotPath(ElasticsearchPersistentProperty.QueryPropertyToFieldNameConverter.INSTANCE)), + iterator)); + } + + @Override + protected CriteriaQuery or(CriteriaQuery base, CriteriaQuery query) { + return new CriteriaQuery(base.getCriteria().or(query.getCriteria())); + } + + @Override + protected CriteriaQuery complete(@Nullable CriteriaQuery query, Sort sort) { + + if (query == null) { + // this is the case in a findAllByOrderByField method, add empty criteria + query = new CriteriaQuery(new Criteria()); + } + return query.addSort(sort); + } + + private Criteria from(Part part, Criteria criteria, Iterator parameters) { + + Part.Type type = part.getType(); + + return switch (type) { + case TRUE -> criteria.is(true); + case FALSE -> criteria.is(false); + case NEGATING_SIMPLE_PROPERTY -> criteria.is(parameters.next()).not(); + case REGEX -> criteria.expression(parameters.next().toString()); + case LIKE, STARTING_WITH -> criteria.startsWith(parameters.next().toString()); + case ENDING_WITH -> criteria.endsWith(parameters.next().toString()); + case CONTAINING -> criteria.contains(parameters.next().toString()); + case GREATER_THAN -> criteria.greaterThan(parameters.next()); + case AFTER, GREATER_THAN_EQUAL -> criteria.greaterThanEqual(parameters.next()); + case LESS_THAN -> criteria.lessThan(parameters.next()); + case BEFORE, LESS_THAN_EQUAL -> criteria.lessThanEqual(parameters.next()); + case BETWEEN -> criteria.between(parameters.next(), parameters.next()); + case IN -> criteria.in(asArray(parameters.next())); + case NOT_IN -> criteria.notIn(asArray(parameters.next())); + case SIMPLE_PROPERTY, WITHIN -> within(part, criteria, parameters); + case NEAR -> near(criteria, parameters); + case EXISTS, IS_NOT_NULL -> criteria.exists(); + case IS_NULL -> criteria.not().exists(); + case IS_EMPTY -> criteria.empty(); + case IS_NOT_EMPTY -> criteria.notEmpty(); + default -> throw new InvalidDataAccessApiUsageException("Illegal criteria found '" + type + "'."); + }; + } + + private Criteria within(Part part, Criteria criteria, Iterator parameters) { + + Object firstParameter = parameters.next(); + Object secondParameter; + + if (part.getType() == Part.Type.SIMPLE_PROPERTY) { + if (part.getProperty().getType() != GeoPoint.class) { + if (firstParameter != null) { + return criteria.is(firstParameter); + } else { + // searching for null is a must_not (exists) + return criteria.exists().not(); + } + } else { + // it means it's a simple find with exact geopoint matching (e.g. findByLocation) + // and because Elasticsearch does not have any kind of query with just a geopoint + // as argument we use a "geo distance" query with a distance of one meter. + secondParameter = ".001km"; + } + } else { + secondParameter = parameters.next(); + } + + return doWithinIfPossible(criteria, firstParameter, secondParameter); + } + + private Criteria near(Criteria criteria, Iterator parameters) { + + Object firstParameter = parameters.next(); + + if (firstParameter instanceof GeoBox geoBox) { + return criteria.boundedBy(geoBox); + } + + if (firstParameter instanceof Box box) { + return criteria.boundedBy(GeoBox.fromBox(box)); + } + + Object secondParameter = parameters.next(); + + return doWithinIfPossible(criteria, firstParameter, secondParameter); + } + + /** + * Do a within query if possible, otherwise return the criteria unchanged. + * + * @param criteria must not be {@literal null} + * @param firstParameter must not be {@literal null} + * @param secondParameter must not be {@literal null} + * @return the criteria with the within query applied if possible. + * @author Junghoon Ban + */ + private Criteria doWithinIfPossible(Criteria criteria, Object firstParameter, Object secondParameter) { + + if (firstParameter instanceof GeoPoint geoPoint && secondParameter instanceof String string) { + return criteria.within(geoPoint, string); + } + + if (firstParameter instanceof Point point && secondParameter instanceof Distance distance) { + return criteria.within(point, distance); + } + + if (firstParameter instanceof String firstString && secondParameter instanceof String secondString) { + return criteria.within(firstString, secondString); + } + + return criteria; + } + + private Object[] asArray(Object o) { + if (o instanceof Collection) { + return ((Collection) o).toArray(); + } else if (o.getClass().isArray()) { + return (Object[]) o; + } + return new Object[]{o}; + } } From 8a3df63493c1456c58c1018a289969567331ad02 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Tue, 12 Dec 2023 19:27:58 +0100 Subject: [PATCH 010/204] Upgrade to Elasticsearch 8.11.2. Original Pull Request #2798 Closes #2797 --- pom.xml | 2 +- .../modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc | 5 +++++ .../antora/modules/ROOT/pages/elasticsearch/versions.adoc | 1 + src/test/resources/testcontainers-elasticsearch.properties | 2 +- 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index c13bab9a2..e45e494d6 100644 --- a/pom.xml +++ b/pom.xml @@ -21,7 +21,7 @@ 3.3.0-SNAPSHOT - 8.11.1 + 8.11.2 1.0.8.RELEASE 0.14.4 diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc index 021161650..e546b0dc7 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/elasticsearch-new.adoc @@ -1,6 +1,11 @@ [[new-features]] = What's new +[[new-features.5-3-0]] +== New in Spring Data Elasticsearch 5.3 + +* Upgrade to Elasticsearch 8.11.2 + [[new-features.5-2-0]] == New in Spring Data Elasticsearch 5.2 diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc index 19d824b00..5b2223b83 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc @@ -7,6 +7,7 @@ The Elasticsearch version given shows with which client libraries Spring Data El [cols="^,^,^,^,^",options="header"] |=== | Spring Data Release Train | Spring Data Elasticsearch | Elasticsearch | Spring Framework | Spring Boot +| 2024.0 (?) | 5.3.x | 8.11.2 | ? | ? | 2023.1 (Vaughan) | 5.2.x | 8.11.1 | 6.1.x | 3.2.x | 2023.0 (Ullmann) | 5.1.x | 8.7.1 | 6.0.x | 3.1.x | 2022.0 (Turing) | 5.0.xfootnote:oom[Out of maintenance] | 8.5.3 | 6.0.x | 3.0.x diff --git a/src/test/resources/testcontainers-elasticsearch.properties b/src/test/resources/testcontainers-elasticsearch.properties index 4ed32f600..569f9644b 100644 --- a/src/test/resources/testcontainers-elasticsearch.properties +++ b/src/test/resources/testcontainers-elasticsearch.properties @@ -15,7 +15,7 @@ # # sde.testcontainers.image-name=docker.elastic.co/elasticsearch/elasticsearch -sde.testcontainers.image-version=8.11.1 +sde.testcontainers.image-version=8.11.2 # # # needed as we do a DELETE /* at the end of the tests, will be required from 8.0 on, produces a warning since 7.13 From 0e419133a2e8077e85f55f51d6a2ee0016cf3d80 Mon Sep 17 00:00:00 2001 From: puppylpg Date: Thu, 14 Dec 2023 04:03:59 +0800 Subject: [PATCH 011/204] support highlight_query (#2793) * support highlight_query * implement highlight query with spring data elasticsearch query * highight query by StringQuery * split highligh fields assertion into different parts --- .../migration-guide-4.4-5.0.adoc | 2 +- .../client/elc/HighlightQueryBuilder.java | 17 ++- .../client/elc/RequestConverter.java | 7 +- .../highlight/HighlightCommonParameters.java | 20 +++ .../core/ElasticsearchIntegrationTests.java | 124 ++++++++++++++++++ 5 files changed, 163 insertions(+), 7 deletions(-) diff --git a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc index eb627f141..e11955fef 100644 --- a/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc +++ b/src/main/antora/modules/ROOT/pages/migration-guides/migration-guide-4.4-5.0.adoc @@ -49,7 +49,7 @@ Also the reactive implementation that was provided up to now has been moved here If you are using `ElasticsearchRestTemplate` directly and not the `ElasticsearchOperations` interface you'll need to adjust your imports as well. When working with the `NativeSearchQuery` class, you'll need to switch to the `NativeQuery` class, which can take a -`Query` instance comign from the new Elasticsearch client libraries. +`Query` instance coming from the new Elasticsearch client libraries. You'll find plenty of examples in the test code. [[elasticsearch-migration-guide-4.4-5.0.breaking-changes-records]] diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java index fa7ccc9ec..f75d1ef76 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/HighlightQueryBuilder.java @@ -35,14 +35,17 @@ * {@link co.elastic.clients.elasticsearch.core.search.Highlight}. * * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.4 */ class HighlightQueryBuilder { private final MappingContext, ElasticsearchPersistentProperty> mappingContext; + private final RequestConverter requestConverter; HighlightQueryBuilder( - MappingContext, ElasticsearchPersistentProperty> mappingContext) { + MappingContext, ElasticsearchPersistentProperty> mappingContext, RequestConverter requestConverter) { this.mappingContext = mappingContext; + this.requestConverter = requestConverter; } public co.elastic.clients.elasticsearch.core.search.Highlight getHighlight(Highlight highlight, @@ -52,7 +55,7 @@ public co.elastic.clients.elasticsearch.core.search.Highlight getHighlight(Highl // in the old implementation we could use one addParameters method, but in the new Elasticsearch client // the builder for highlight and highlightfield share no code - addParameters(highlight.getParameters(), highlightBuilder); + addParameters(highlight.getParameters(), highlightBuilder, type); for (HighlightField highlightField : highlight.getFields()) { String mappedName = mapFieldName(highlightField.getName(), type); @@ -69,7 +72,7 @@ public co.elastic.clients.elasticsearch.core.search.Highlight getHighlight(Highl * the builder for highlight and highlight fields don't share code, so we have these two methods here that basically are almost copies */ private void addParameters(HighlightParameters parameters, - co.elastic.clients.elasticsearch.core.search.Highlight.Builder builder) { + co.elastic.clients.elasticsearch.core.search.Highlight.Builder builder, @Nullable Class type) { if (StringUtils.hasLength(parameters.getBoundaryChars())) { builder.boundaryChars(parameters.getBoundaryChars()); @@ -103,6 +106,10 @@ private void addParameters(HighlightParameters parameters, builder.numberOfFragments(parameters.getNumberOfFragments()); } + if (parameters.getHighlightQuery() != null) { + builder.highlightQuery(requestConverter.getQuery(parameters.getHighlightQuery(), type)); + } + if (StringUtils.hasLength(parameters.getOrder())) { builder.order(highlighterOrder(parameters.getOrder())); } @@ -174,6 +181,10 @@ private void addParameters(HighlightFieldParameters parameters, builder.numberOfFragments(parameters.getNumberOfFragments()); } + if (parameters.getHighlightQuery() != null) { + builder.highlightQuery(requestConverter.getQuery(parameters.getHighlightQuery(), type)); + } + if (StringUtils.hasLength(parameters.getOrder())) { builder.order(highlighterOrder(parameters.getOrder())); } diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/RequestConverter.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/RequestConverter.java index bb9d0dcec..607929696 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/RequestConverter.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/RequestConverter.java @@ -105,6 +105,7 @@ * @author Sascha Woo * @author cdalxndr * @author scoobyzhang + * @author Haibo Liu * @since 4.4 */ @SuppressWarnings("ClassCanBeRecord") @@ -1494,7 +1495,7 @@ private Rescore getRescore(RescorerQuery rescorerQuery) { private void addHighlight(Query query, SearchRequest.Builder builder) { Highlight highlight = query.getHighlightQuery() - .map(highlightQuery -> new HighlightQueryBuilder(elasticsearchConverter.getMappingContext()) + .map(highlightQuery -> new HighlightQueryBuilder(elasticsearchConverter.getMappingContext(), this) .getHighlight(highlightQuery.getHighlight(), highlightQuery.getType())) .orElse(null); @@ -1504,7 +1505,7 @@ private void addHighlight(Query query, SearchRequest.Builder builder) { private void addHighlight(Query query, MultisearchBody.Builder builder) { Highlight highlight = query.getHighlightQuery() - .map(highlightQuery -> new HighlightQueryBuilder(elasticsearchConverter.getMappingContext()) + .map(highlightQuery -> new HighlightQueryBuilder(elasticsearchConverter.getMappingContext(), this) .getHighlight(highlightQuery.getHighlight(), highlightQuery.getType())) .orElse(null); @@ -1646,7 +1647,7 @@ private void prepareNativeSearch(NativeQuery query, MultisearchBody.Builder buil } @Nullable - private co.elastic.clients.elasticsearch._types.query_dsl.Query getQuery(@Nullable Query query, + co.elastic.clients.elasticsearch._types.query_dsl.Query getQuery(@Nullable Query query, @Nullable Class clazz) { if (query == null) { diff --git a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java index 6261fb1de..84b15423e 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java @@ -15,10 +15,13 @@ */ package org.springframework.data.elasticsearch.core.query.highlight; +import org.springframework.data.elasticsearch.core.query.Query; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.3 */ public abstract class HighlightCommonParameters { @@ -31,6 +34,7 @@ public abstract class HighlightCommonParameters { private final int fragmentSize; private final int noMatchSize; private final int numberOfFragments; + @Nullable private final Query highlightQuery; private final String order; private final int phraseLimit; private final String[] preTags; @@ -51,6 +55,7 @@ protected HighlightCommonParameters(HighlightCommonParametersBuilder builder) fragmentSize = builder.fragmentSize; noMatchSize = builder.noMatchSize; numberOfFragments = builder.numberOfFragments; + highlightQuery = builder.highlightQuery; order = builder.order; phraseLimit = builder.phraseLimit; preTags = builder.preTags; @@ -95,6 +100,11 @@ public int getNumberOfFragments() { return numberOfFragments; } + @Nullable + public Query getHighlightQuery() { + return highlightQuery; + } + public String getOrder() { return order; } @@ -130,6 +140,11 @@ public static abstract class HighlightCommonParametersBuildermessage"); } + @Test // #2636 + void shouldReturnHighlightFieldsWithHighlightQueryInSearchHit() { + IndexCoordinates index = createIndexCoordinatesWithHighlightMessage(); + + // a highlight query equals to the search query + var sameHighlightQuery = HighlightFieldParameters.builder() + .withHighlightQuery(getBuilderWithTermQuery("message", "message").build()) + .build(); + Query query = getBuilderWithTermQuery("message", "message") // + .withHighlightQuery( + new HighlightQuery(new Highlight(singletonList(new HighlightField("message", sameHighlightQuery))), HighlightEntity.class) + ) + .build(); + SearchHits searchHits = operations.search(query, HighlightEntity.class, index); + + assertThat(searchHits).isNotNull(); + assertThat(searchHits.getSearchHits()).hasSize(1); + + SearchHit searchHit = searchHits.getSearchHit(0); + List highlightField = searchHit.getHighlightField("message"); + assertThat(highlightField).hasSize(2); + assertThat(highlightField.get(0)).contains("message"); + assertThat(highlightField.get(1)).contains("message"); + } + + @Test // #2636 + void shouldReturnDifferentHighlightFieldsWithDifferentHighlightQueryInSearchHit() { + IndexCoordinates index = createIndexCoordinatesWithHighlightMessage(); + + // a different highlight query from the search query + var differentHighlightQueryInField = HighlightFieldParameters.builder() + .withHighlightQuery(getBuilderWithTermQuery("message", "initial").build()) + .build(); + // highlight_query in field + Query highlightQueryInField = getBuilderWithTermQuery("message", "message") // + .withHighlightQuery( + new HighlightQuery(new Highlight(singletonList(new HighlightField("message", differentHighlightQueryInField))), HighlightEntity.class) + ) + .build(); + assertThatHighlightFieldsIsDifferentFromHighlightQuery(highlightQueryInField, index); + } + + @Test // #2636 + void shouldReturnDifferentHighlightFieldsWithDifferentParamHighlightQueryInSearchHit() { + IndexCoordinates index = createIndexCoordinatesWithHighlightMessage(); + + // a different highlight query from the search query and used in highlight param rather than field + var differentHighlightQueryInParam = HighlightParameters.builder() + .withHighlightQuery(getBuilderWithTermQuery("message", "initial").build()) + .build(); + // highlight_query in param + Query highlightQueryInParam = getBuilderWithTermQuery("message", "message") // + .withHighlightQuery( + new HighlightQuery(new Highlight(differentHighlightQueryInParam, singletonList(new HighlightField("message"))), HighlightEntity.class) + ) + .build(); + assertThatHighlightFieldsIsDifferentFromHighlightQuery(highlightQueryInParam, index); + } + + @Test // #2636 + void shouldReturnDifferentHighlightFieldsWithDifferentHighlightCriteriaQueryInSearchHit() { + IndexCoordinates index = createIndexCoordinatesWithHighlightMessage(); + // a different highlight query from the search query, written by CriteriaQuery rather than NativeQuery + var criteriaHighlightQueryInParam = HighlightParameters.builder() + .withHighlightQuery(new CriteriaQuery(new Criteria("message").is("initial"))) + .build(); + // highlight_query in param + Query differentHighlightQueryUsingCriteria = getBuilderWithTermQuery("message", "message") // + .withHighlightQuery( + new HighlightQuery(new Highlight(criteriaHighlightQueryInParam, singletonList(new HighlightField("message"))), HighlightEntity.class) + ) + .build(); + assertThatHighlightFieldsIsDifferentFromHighlightQuery(differentHighlightQueryUsingCriteria, index); + } + + @Test // #2636 + void shouldReturnDifferentHighlightFieldsWithDifferentHighlightStringQueryInSearchHit() { + IndexCoordinates index = createIndexCoordinatesWithHighlightMessage(); + // a different highlight query from the search query, written by StringQuery + var stringHighlightQueryInParam = HighlightParameters.builder() + .withHighlightQuery(new StringQuery( + """ + { + "term": { + "message": { + "value": "initial" + } + } + } + """ + )) + .build(); + // highlight_query in param + Query differentHighlightQueryUsingStringQuery = getBuilderWithTermQuery("message", "message") // + .withHighlightQuery( + new HighlightQuery(new Highlight(stringHighlightQueryInParam, singletonList(new HighlightField("message"))), HighlightEntity.class) + ) + .build(); + assertThatHighlightFieldsIsDifferentFromHighlightQuery(differentHighlightQueryUsingStringQuery, index); + } + + private IndexCoordinates createIndexCoordinatesWithHighlightMessage() { + IndexCoordinates index = IndexCoordinates.of("test-index-highlight-entity-template"); + HighlightEntity entity = new HighlightEntity("1", + "This message is a long text which contains the word to search for " + + "in two places, the first being near the beginning and the second near the end of the message. " + + "However, i'll use a different highlight query from the initial search query"); + IndexQuery indexQuery = new IndexQueryBuilder().withId(entity.getId()).withObject(entity).build(); + operations.index(indexQuery, index); + operations.indexOps(index).refresh(); + return index; + } + + private void assertThatHighlightFieldsIsDifferentFromHighlightQuery(Query query, IndexCoordinates index) { + SearchHits searchHits = operations.search(query, HighlightEntity.class, index); + + SearchHit searchHit = searchHits.getSearchHit(0); + List highlightField = searchHit.getHighlightField("message"); + assertThat(highlightField).hasSize(1); + assertThat(highlightField.get(0)).contains("initial"); + } + @Test // #1686 void shouldRunRescoreQueryInSearchQuery() { IndexCoordinates index = IndexCoordinates.of(indexNameProvider.getPrefix() + "rescore-entity"); From 362126e72db96c6594d00ee5ecc924c0c08f6154 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 14 Dec 2023 08:40:44 +0100 Subject: [PATCH 012/204] Upgrade to Maven Wrapper 3.9.6. See #2801 --- .mvn/wrapper/maven-wrapper.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index 021cf949c..e6686c6c0 100644 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -1,3 +1,3 @@ -#Wed Oct 04 16:58:13 PDT 2023 +#Thu Dec 14 08:40:44 CET 2023 wrapperUrl=https\://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar -distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.5/apache-maven-3.9.5-bin.zip +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.6/apache-maven-3.9.6-bin.zip From d0ed80dfde11cd528f7032c051d05405a3d59359 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 14 Dec 2023 08:50:32 +0100 Subject: [PATCH 013/204] Update CI properties. See #2772 --- ci/pipeline.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/pipeline.properties b/ci/pipeline.properties index dc8b6fb0a..60057f265 100644 --- a/ci/pipeline.properties +++ b/ci/pipeline.properties @@ -1,6 +1,6 @@ # Java versions -java.main.tag=17.0.8_7-jdk-focal -java.next.tag=21_35-jdk-jammy +java.main.tag=17.0.9_9-jdk-focal +java.next.tag=21.0.1_12-jdk-jammy # Docker container images - standard docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag} From 96b38652ab6d83cd23b8e010bc6471c5591fbfd5 Mon Sep 17 00:00:00 2001 From: puppylpg Date: Mon, 18 Dec 2023 01:16:18 +0800 Subject: [PATCH 014/204] Support highlight query in @HighlightParameters annotation. Original Pull Request #2802 --- .../annotations/HighlightParameters.java | 3 + .../core/query/highlight/Highlight.java | 41 +------- .../highlight/HighlightCommonParameters.java | 2 +- .../query/ElasticsearchQueryMethod.java | 32 +++---- .../repository/query/HighlightConverter.java | 94 +++++++++++++++++++ ...ustomMethodRepositoryIntegrationTests.java | 59 +++++++++++- 6 files changed, 168 insertions(+), 63 deletions(-) create mode 100644 src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java b/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java index 55cb5ab57..b4e92db4a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java +++ b/src/main/java/org/springframework/data/elasticsearch/annotations/HighlightParameters.java @@ -21,6 +21,7 @@ /** * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.0 */ @Documented @@ -59,6 +60,8 @@ int numberOfFragments() default -1; + Query highlightQuery() default @Query; + String order() default ""; int phraseLimit() default -1; diff --git a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/Highlight.java b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/Highlight.java index 97f6eb5c4..e982c168c 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/Highlight.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/Highlight.java @@ -15,14 +15,13 @@ */ package org.springframework.data.elasticsearch.core.query.highlight; -import java.util.Arrays; import java.util.List; -import java.util.stream.Collectors; import org.springframework.util.Assert; /** * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.3 */ public class Highlight { @@ -57,42 +56,4 @@ public HighlightParameters getParameters() { public List getFields() { return fields; } - - /** - * Creates a {@link Highlight} from an Annotation instance. - * - * @param highlight must not be {@literal null} - * @return highlight definition - */ - public static Highlight of(org.springframework.data.elasticsearch.annotations.Highlight highlight) { - - Assert.notNull(highlight, "highlight must not be null"); - - org.springframework.data.elasticsearch.annotations.HighlightParameters parameters = highlight.parameters(); - HighlightParameters highlightParameters = HighlightParameters.builder() // - .withBoundaryChars(parameters.boundaryChars()) // - .withBoundaryMaxScan(parameters.boundaryMaxScan()) // - .withBoundaryScanner(parameters.boundaryScanner()) // - .withBoundaryScannerLocale(parameters.boundaryScannerLocale()) // - .withEncoder(parameters.encoder()) // - .withForceSource(parameters.forceSource()) // - .withFragmenter(parameters.fragmenter()) // - .withFragmentSize(parameters.fragmentSize()) // - .withNoMatchSize(parameters.noMatchSize()) // - .withNumberOfFragments(parameters.numberOfFragments()) // - .withOrder(parameters.order()) // - .withPhraseLimit(parameters.phraseLimit()) // - .withPreTags(parameters.preTags()) // - .withPostTags(parameters.postTags()) // - .withRequireFieldMatch(parameters.requireFieldMatch()) // - .withTagsSchema(parameters.tagsSchema()) // - .withType(parameters.type()) // - .build(); - - List highlightFields = Arrays.stream(highlight.fields()) // - .map(HighlightField::of) // - .collect(Collectors.toList()); - - return new Highlight(highlightParameters, highlightFields); - } } diff --git a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java index 84b15423e..428072ef2 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java @@ -199,7 +199,7 @@ public SELF withNumberOfFragments(int numberOfFragments) { return (SELF) this; } - public SELF withHighlightQuery(Query highlightQuery) { + public SELF withHighlightQuery(@Nullable Query highlightQuery) { this.highlightQuery = highlightQuery; return (SELF) this; } diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/ElasticsearchQueryMethod.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/ElasticsearchQueryMethod.java index 6af2b618e..f5510b32a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/query/ElasticsearchQueryMethod.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/ElasticsearchQueryMethod.java @@ -15,14 +15,6 @@ */ package org.springframework.data.elasticsearch.repository.query; -import java.lang.reflect.Method; -import java.lang.reflect.ParameterizedType; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.stream.Stream; - import org.springframework.core.annotation.AnnotatedElementUtils; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.elasticsearch.annotations.Highlight; @@ -50,12 +42,19 @@ import org.springframework.data.repository.query.QueryMethod; import org.springframework.data.repository.util.QueryExecutionConverters; import org.springframework.data.repository.util.ReactiveWrapperConverters; -import org.springframework.data.util.Lazy; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import java.lang.reflect.Method; +import java.lang.reflect.ParameterizedType; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.Stream; + /** * ElasticsearchQueryMethod * @@ -66,6 +65,7 @@ * @author Christoph Strobl * @author Peter-Josef Meisch * @author Alexander Torres + * @author Haibo Liu */ public class ElasticsearchQueryMethod extends QueryMethod { @@ -81,8 +81,6 @@ public class ElasticsearchQueryMethod extends QueryMethod { @Nullable private ElasticsearchEntityMetadata metadata; @Nullable private final Query queryAnnotation; @Nullable private final Highlight highlightAnnotation; - private final Lazy highlightQueryLazy = Lazy.of(this::createAnnotatedHighlightQuery); - @Nullable private final SourceFilters sourceFilters; public ElasticsearchQueryMethod(Method method, RepositoryMetadata repositoryMetadata, ProjectionFactory factory, @@ -143,19 +141,13 @@ public boolean hasAnnotatedHighlight() { * @throws IllegalArgumentException if no {@link Highlight} annotation is present on the method * @see #hasAnnotatedHighlight() */ - public HighlightQuery getAnnotatedHighlightQuery() { + public HighlightQuery getAnnotatedHighlightQuery(HighlightConverter highlightConverter) { Assert.isTrue(hasAnnotatedHighlight(), "no Highlight annotation present on " + getName()); - - return highlightQueryLazy.get(); - } - - private HighlightQuery createAnnotatedHighlightQuery() { - Assert.notNull(highlightAnnotation, "highlightAnnotation must not be null"); return new HighlightQuery( - org.springframework.data.elasticsearch.core.query.highlight.Highlight.of(highlightAnnotation), + highlightConverter.convert(highlightAnnotation), getDomainClass()); } @@ -378,7 +370,7 @@ void addMethodParameter(BaseQuery query, ElasticsearchParametersParameterAccesso ElasticsearchConverter elasticsearchConverter) { if (hasAnnotatedHighlight()) { - query.setHighlightQuery(getAnnotatedHighlightQuery()); + query.setHighlightQuery(getAnnotatedHighlightQuery(new HighlightConverter(parameterAccessor, elasticsearchConverter))); } var sourceFilter = getSourceFilter(parameterAccessor, elasticsearchConverter); diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java new file mode 100644 index 000000000..68b2e9c67 --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java @@ -0,0 +1,94 @@ +/* + * Copyright 2013-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.repository.query; + +import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; +import org.springframework.data.elasticsearch.core.query.Query; +import org.springframework.data.elasticsearch.core.query.StringQuery; +import org.springframework.data.elasticsearch.core.query.highlight.Highlight; +import org.springframework.data.elasticsearch.core.query.highlight.HighlightField; +import org.springframework.data.elasticsearch.core.query.highlight.HighlightParameters; +import org.springframework.data.elasticsearch.repository.support.StringQueryUtil; +import org.springframework.util.Assert; + +import java.util.Arrays; +import java.util.List; + +/** + * Convert {@link org.springframework.data.elasticsearch.annotations.Highlight} to {@link Highlight}. + * + * @author Haibo Liu + */ +public class HighlightConverter { + + private final ElasticsearchParametersParameterAccessor parameterAccessor; + private final ElasticsearchConverter elasticsearchConverter; + + HighlightConverter(ElasticsearchParametersParameterAccessor parameterAccessor, + ElasticsearchConverter elasticsearchConverter) { + this.parameterAccessor = parameterAccessor; + this.elasticsearchConverter = elasticsearchConverter; + } + + /** + * Creates a {@link Highlight} from an Annotation instance. + * + * @param highlight must not be {@literal null} + * @return highlight definition + */ + Highlight convert(org.springframework.data.elasticsearch.annotations.Highlight highlight) { + + Assert.notNull(highlight, "highlight must not be null"); + + org.springframework.data.elasticsearch.annotations.HighlightParameters parameters = highlight.parameters(); + + // replace placeholders in highlight query with actual parameters + Query highlightQuery = null; + if (!parameters.highlightQuery().value().isEmpty()) { + String rawString = parameters.highlightQuery().value(); + String queryString = new StringQueryUtil(elasticsearchConverter.getConversionService()) + .replacePlaceholders(rawString, parameterAccessor); + highlightQuery = new StringQuery(queryString); + } + + HighlightParameters highlightParameters = HighlightParameters.builder() // + .withBoundaryChars(parameters.boundaryChars()) // + .withBoundaryMaxScan(parameters.boundaryMaxScan()) // + .withBoundaryScanner(parameters.boundaryScanner()) // + .withBoundaryScannerLocale(parameters.boundaryScannerLocale()) // + .withEncoder(parameters.encoder()) // + .withForceSource(parameters.forceSource()) // + .withFragmenter(parameters.fragmenter()) // + .withFragmentSize(parameters.fragmentSize()) // + .withNoMatchSize(parameters.noMatchSize()) // + .withNumberOfFragments(parameters.numberOfFragments()) // + .withHighlightQuery(highlightQuery) // + .withOrder(parameters.order()) // + .withPhraseLimit(parameters.phraseLimit()) // + .withPreTags(parameters.preTags()) // + .withPostTags(parameters.postTags()) // + .withRequireFieldMatch(parameters.requireFieldMatch()) // + .withTagsSchema(parameters.tagsSchema()) // + .withType(parameters.type()) // + .build(); + + List highlightFields = Arrays.stream(highlight.fields()) // + .map(HighlightField::of) // + .toList(); + + return new Highlight(highlightParameters, highlightFields); + } +} diff --git a/src/test/java/org/springframework/data/elasticsearch/repositories/custommethod/CustomMethodRepositoryIntegrationTests.java b/src/test/java/org/springframework/data/elasticsearch/repositories/custommethod/CustomMethodRepositoryIntegrationTests.java index c593a42a1..8760c1e25 100644 --- a/src/test/java/org/springframework/data/elasticsearch/repositories/custommethod/CustomMethodRepositoryIntegrationTests.java +++ b/src/test/java/org/springframework/data/elasticsearch/repositories/custommethod/CustomMethodRepositoryIntegrationTests.java @@ -32,7 +32,6 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.condition.DisabledIf; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Version; @@ -46,9 +45,9 @@ import org.springframework.data.elasticsearch.annotations.Field; import org.springframework.data.elasticsearch.annotations.Highlight; import org.springframework.data.elasticsearch.annotations.HighlightField; +import org.springframework.data.elasticsearch.annotations.HighlightParameters; import org.springframework.data.elasticsearch.annotations.Query; import org.springframework.data.elasticsearch.annotations.SourceFilters; -import org.springframework.data.elasticsearch.core.AbstractElasticsearchTemplate; import org.springframework.data.elasticsearch.core.ElasticsearchOperations; import org.springframework.data.elasticsearch.core.SearchHit; import org.springframework.data.elasticsearch.core.SearchHits; @@ -76,6 +75,7 @@ * @author Peter-Josef Meisch * @author Rasmus Faber-Espensen * @author James Mudd + * @author Haibo Liu */ @SpringIntegrationTest public abstract class CustomMethodRepositoryIntegrationTests { @@ -1548,6 +1548,26 @@ void shouldReturnHighlightsOnAnnotatedStringQueryMethod() { assertThat(searchHit.getHighlightField("type")).hasSize(1).contains("abc"); } + @Test + void shouldReturnDifferentHighlightsOnAnnotatedStringQueryMethod() { + List entities = createSampleEntities("abc xyz", 2); + repository.saveAll(entities); + + // when + SearchHits highlightAbcHits = repository.queryByStringWithSeparateHighlight("abc", "abc"); + + assertThat(highlightAbcHits.getTotalHits()).isEqualTo(2); + SearchHit highlightAbcHit = highlightAbcHits.getSearchHit(0); + assertThat(highlightAbcHit.getHighlightField("type")).hasSize(1).contains("abc xyz"); + + // when + SearchHits highlightXyzHits = repository.queryByStringWithSeparateHighlight("abc", "xyz"); + + assertThat(highlightXyzHits.getTotalHits()).isEqualTo(2); + SearchHit highlightXyzHit = highlightXyzHits.getSearchHit(0); + assertThat(highlightXyzHit.getHighlightField("type")).hasSize(1).contains("abc xyz"); + } + @Test // DATAES-734 void shouldUseGeoSortParameter() { GeoPoint munich = new GeoPoint(48.137154, 11.5761247); @@ -1920,6 +1940,41 @@ public interface SampleCustomMethodRepository extends ElasticsearchRepository queryByString(String type); + @Query(""" + { + "bool":{ + "must":[ + { + "match":{ + "type":"?0" + } + } + ] + } + } + """ + ) + @Highlight( + fields = {@HighlightField(name = "type")}, + parameters = @HighlightParameters( + highlightQuery = @Query(""" + { + "bool":{ + "must":[ + { + "match":{ + "type":"?1" + } + } + ] + } + } + """ + ) + ) + ) + SearchHits queryByStringWithSeparateHighlight(String type, String highlight); + List> queryByMessage(String message); Stream> readByMessage(String message); From 434de11f3d077dd536f0adbf1ebf7b6c0f496866 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Sun, 17 Dec 2023 18:19:51 +0100 Subject: [PATCH 015/204] Polishing. --- .../highlight/HighlightCommonParameters.java | 3 +-- .../query/ElasticsearchQueryMethod.java | 19 ++++++++++--------- .../repository/query/HighlightConverter.java | 8 ++++---- ...ustomMethodRepositoryIntegrationTests.java | 10 +++------- 4 files changed, 18 insertions(+), 22 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java index 428072ef2..db94150b5 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/query/highlight/HighlightCommonParameters.java @@ -141,8 +141,7 @@ public static abstract class HighlightCommonParametersBuilder queryByStringWithSeparateHighlight(String type, String highlight); List> queryByMessage(String message); From 21a1fbca0fb58ef19605fc8b1ee84dd5d4d0f7a5 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Mon, 18 Dec 2023 15:18:37 +0100 Subject: [PATCH 016/204] Clarified dependencies in the documentation --- .../antora/modules/ROOT/pages/elasticsearch/versions.adoc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc index 5b2223b83..c00aab9f2 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc @@ -2,11 +2,14 @@ = Versions The following table shows the Elasticsearch versions that are used by Spring Data release trains and version of Spring Data Elasticsearch included in that, as well as the Spring Boot versions referring to that particular Spring Data release train. -The Elasticsearch version given shows with which client libraries Spring Data Elasticsearch was built and tested. + +The Elasticsearch version given shows with which client libraries Spring Data Elasticsearch was built with and tested. + +Note that Spring Data Elasticsearch **does not** depend on Spring Boot, the version given here shows the Sprig Boot version that uses Spring Data Elasticsearch. [cols="^,^,^,^,^",options="header"] |=== -| Spring Data Release Train | Spring Data Elasticsearch | Elasticsearch | Spring Framework | Spring Boot +| Spring Data Release Train | Spring Data Elasticsearch | Elasticsearch | Spring Framework | used by Spring Boot | 2024.0 (?) | 5.3.x | 8.11.2 | ? | ? | 2023.1 (Vaughan) | 5.2.x | 8.11.1 | 6.1.x | 3.2.x | 2023.0 (Ullmann) | 5.1.x | 8.7.1 | 6.0.x | 3.1.x From 02bd3e60f854360de90e867bb87baef874546edf Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 21 Dec 2023 14:50:18 +0100 Subject: [PATCH 017/204] ClientConfigurer is only available in MaybeSecureClientConfigurationBuilder. The documentation code does not compile and I'm not sure if it's a bug in the code or a miss in the documentation. When you want to configure a client, the doc says to use: ```java ClientConfiguration.builder().withClientConfigurer( // ... ``` But `withClientConfigurer(ClientConfiguration.ClientConfigurationCallback clientConfigurer)` is only available in `TerminalClientConfigurationBuilder` interface. And `ClientConfiguration.builder()` returns a `ClientConfigurationBuilderWithRequiredEndpoint` interface. --- src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc index 0c2b4a66f..0f8d8c102 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/clients.adoc @@ -192,6 +192,7 @@ This callback provides a `org.elasticsearch.client.RestClientBuilder` that can b [source,java] ---- ClientConfiguration.builder() + .connectedTo("localhost:9200", "localhost:9291") .withClientConfigurer(ElasticsearchClients.ElasticsearchRestClientConfigurationCallback.from(restClientBuilder -> { // configure the Elasticsearch RestClient return restClientBuilder; @@ -210,6 +211,7 @@ used by the `RestClient`. [source,java] ---- ClientConfiguration.builder() + .connectedTo("localhost:9200", "localhost:9291") .withClientConfigurer(ElasticsearchClients.ElasticsearchHttpClientConfigurationCallback.from(httpAsyncClientBuilder -> { // configure the HttpAsyncClient return httpAsyncClientBuilder; From 6350514e7ee6b897d0dda126f159c8d3412f7eab Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Sun, 24 Dec 2023 14:47:14 +0100 Subject: [PATCH 018/204] Update documentation. Original Pull Request #2811 Closes #2810 --- .../ROOT/pages/elasticsearch/versions.adoc | 36 +++++++++---------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc index c00aab9f2..6845d6363 100644 --- a/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc +++ b/src/main/antora/modules/ROOT/pages/elasticsearch/versions.adoc @@ -1,28 +1,24 @@ [[preface.versions]] = Versions -The following table shows the Elasticsearch versions that are used by Spring Data release trains and version of Spring Data Elasticsearch included in that, as well as the Spring Boot versions referring to that particular Spring Data release train. +The following table shows the Elasticsearch and Spring versions that are used by Spring Data release trains and the version of Spring Data Elasticsearch included in that. -The Elasticsearch version given shows with which client libraries Spring Data Elasticsearch was built with and tested. - -Note that Spring Data Elasticsearch **does not** depend on Spring Boot, the version given here shows the Sprig Boot version that uses Spring Data Elasticsearch. - -[cols="^,^,^,^,^",options="header"] +[cols="^,^,^,^",options="header"] |=== -| Spring Data Release Train | Spring Data Elasticsearch | Elasticsearch | Spring Framework | used by Spring Boot -| 2024.0 (?) | 5.3.x | 8.11.2 | ? | ? -| 2023.1 (Vaughan) | 5.2.x | 8.11.1 | 6.1.x | 3.2.x -| 2023.0 (Ullmann) | 5.1.x | 8.7.1 | 6.0.x | 3.1.x -| 2022.0 (Turing) | 5.0.xfootnote:oom[Out of maintenance] | 8.5.3 | 6.0.x | 3.0.x -| 2021.2 (Raj) | 4.4.xfootnote:oom[] | 7.17.3 | 5.3.x | 2.7.x -| 2021.1 (Q) | 4.3.xfootnote:oom[] | 7.15.2 | 5.3.x | 2.6.x -| 2021.0 (Pascal) | 4.2.xfootnote:oom[] | 7.12.0 | 5.3.x | 2.5.x -| 2020.0 (Ockham) | 4.1.xfootnote:oom[] | 7.9.3 | 5.3.2 | 2.4.x -| Neumann | 4.0.xfootnote:oom[] | 7.6.2 | 5.2.12 |2.3.x -| Moore | 3.2.xfootnote:oom[] |6.8.12 | 5.2.12| 2.2.x -| Lovelace | 3.1.xfootnote:oom[] | 6.2.2 | 5.1.19 |2.1.x -| Kay | 3.0.xfootnote:oom[] | 5.5.0 | 5.0.13 | 2.0.x -| Ingalls | 2.1.xfootnote:oom[] | 2.4.0 | 4.3.25 | 1.5.x +| Spring Data Release Train | Spring Data Elasticsearch | Elasticsearch | Spring Framework +| 2024.0 (?) | 5.3.x | 8.11.2 | ? +| 2023.1 (Vaughan) | 5.2.x | 8.11.1 | 6.1.x +| 2023.0 (Ullmann) | 5.1.x | 8.7.1 | 6.0.x +| 2022.0 (Turing) | 5.0.xfootnote:oom[Out of maintenance] | 8.5.3 | 6.0.x +| 2021.2 (Raj) | 4.4.xfootnote:oom[] | 7.17.3 | 5.3.x +| 2021.1 (Q) | 4.3.xfootnote:oom[] | 7.15.2 | 5.3.x +| 2021.0 (Pascal) | 4.2.xfootnote:oom[] | 7.12.0 | 5.3.x +| 2020.0 (Ockham) | 4.1.xfootnote:oom[] | 7.9.3 | 5.3.2 +| Neumann | 4.0.xfootnote:oom[] | 7.6.2 | 5.2.12 +| Moore | 3.2.xfootnote:oom[] |6.8.12 | 5.2.12 +| Lovelace | 3.1.xfootnote:oom[] | 6.2.2 | 5.1.19 +| Kay | 3.0.xfootnote:oom[] | 5.5.0 | 5.0.13 +| Ingalls | 2.1.xfootnote:oom[] | 2.4.0 | 4.3.25 |=== Support for upcoming versions of Elasticsearch is being tracked and general compatibility should be given assuming the usage of the xref:elasticsearch/template.adoc[ElasticsearchOperations interface]. From 433d52981e598ed6e44bdcdae5df11e28d5a1e1d Mon Sep 17 00:00:00 2001 From: puppylpg Date: Thu, 28 Dec 2023 19:57:44 +0800 Subject: [PATCH 019/204] Expose search shard statistics in search hits. Original Pull Request #2806 Closes #2605 --- .../client/elc/DocumentAdapters.java | 3 +- .../client/elc/ResponseConverter.java | 2 +- .../elc/SearchDocumentResponseBuilder.java | 27 +++- .../elasticsearch/core/SearchHitMapping.java | 7 +- .../data/elasticsearch/core/SearchHits.java | 7 + .../elasticsearch/core/SearchHitsImpl.java | 12 +- .../core/SearchShardStatistics.java | 130 ++++++++++++++++++ .../ElasticsearchCustomConversions.java | 2 +- .../core/document/SearchDocumentResponse.java | 12 +- .../AbstractElasticsearchRepositoryQuery.java | 20 +-- .../repository/query/HighlightConverter.java | 2 +- ...earchDocumentResponseBuilderUnitTests.java | 69 +++++++++- .../core/SearchHitSupportTest.java | 3 +- .../elasticsearch/core/StreamQueriesTest.java | 3 +- 14 files changed, 266 insertions(+), 33 deletions(-) create mode 100644 src/main/java/org/springframework/data/elasticsearch/core/SearchShardStatistics.java diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java index fbd3b622a..151d090ea 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/DocumentAdapters.java @@ -49,6 +49,7 @@ * {@link org.springframework.data.elasticsearch.core.document.Document} * * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.4 */ final class DocumentAdapters { @@ -73,7 +74,7 @@ public static SearchDocument from(Hit hit, JsonpMapper jsonpMapper) { Map innerHits = new LinkedHashMap<>(); hit.innerHits().forEach((name, innerHitsResult) -> { // noinspection ReturnOfNull - innerHits.put(name, SearchDocumentResponseBuilder.from(innerHitsResult.hits(), null, null, null, null, + innerHits.put(name, SearchDocumentResponseBuilder.from(innerHitsResult.hits(), null, null, null, null, null, searchDocument -> null, jsonpMapper)); }); diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java index 9083fc8ae..7ad69c2aa 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java @@ -541,7 +541,7 @@ private long timeToLong(Time time) { } @Nullable - private static ElasticsearchErrorCause toErrorCause(@Nullable ErrorCause errorCause) { + static ElasticsearchErrorCause toErrorCause(@Nullable ErrorCause errorCause) { if (errorCause != null) { return new ElasticsearchErrorCause( // diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java index 00293b407..68148ef21 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java @@ -15,6 +15,8 @@ */ package org.springframework.data.elasticsearch.client.elc; +import co.elastic.clients.elasticsearch._types.ShardFailure; +import co.elastic.clients.elasticsearch._types.ShardStatistics; import co.elastic.clients.elasticsearch._types.aggregations.Aggregate; import co.elastic.clients.elasticsearch.core.SearchResponse; import co.elastic.clients.elasticsearch.core.SearchTemplateResponse; @@ -36,6 +38,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.data.elasticsearch.core.SearchShardStatistics; import org.springframework.data.elasticsearch.core.TotalHitsRelation; import org.springframework.data.elasticsearch.core.document.SearchDocument; import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse; @@ -52,6 +55,7 @@ * Factory class to create {@link SearchDocumentResponse} instances. * * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.4 */ class SearchDocumentResponseBuilder { @@ -78,8 +82,9 @@ public static SearchDocumentResponse from(ResponseBody response Map aggregations = responseBody.aggregations(); Map>> suggest = responseBody.suggest(); var pointInTimeId = responseBody.pitId(); + var shards = responseBody.shards(); - return from(hitsMetadata, scrollId, pointInTimeId, aggregations, suggest, entityCreator, jsonpMapper); + return from(hitsMetadata, shards, scrollId, pointInTimeId, aggregations, suggest, entityCreator, jsonpMapper); } /** @@ -98,13 +103,14 @@ public static SearchDocumentResponse from(SearchTemplateResponse SearchDocumentResponse from(SearchTemplateResponse SearchDocumentResponse from(HitsMetadata hitsMetadata, @Nullable String scrollId, - @Nullable String pointInTimeId, @Nullable Map aggregations, + public static SearchDocumentResponse from(HitsMetadata hitsMetadata, @Nullable ShardStatistics shards, + @Nullable String scrollId, @Nullable String pointInTimeId, @Nullable Map aggregations, Map>> suggestES, SearchDocumentResponse.EntityCreator entityCreator, JsonpMapper jsonpMapper) { @@ -155,8 +161,19 @@ public static SearchDocumentResponse from(HitsMetadata hitsMetadata, @Nul Suggest suggest = suggestFrom(suggestES, entityCreator); + SearchShardStatistics shardStatistics = shards != null ? shardsFrom(shards) : null; + return new SearchDocumentResponse(totalHits, totalHitsRelation, maxScore, scrollId, pointInTimeId, searchDocuments, - aggregationsContainer, suggest); + aggregationsContainer, suggest, shardStatistics); + } + + private static SearchShardStatistics shardsFrom(ShardStatistics shards) { + List failures = shards.failures(); + List searchFailures = failures.stream() + .map(f -> SearchShardStatistics.Failure.of(f.index(), f.node(), f.status(), f.shard(), null, + ResponseConverter.toErrorCause(f.reason()))) + .toList(); + return SearchShardStatistics.of(shards.failed(), shards.successful(), shards.total(), shards.skipped(), searchFailures); } @Nullable diff --git a/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java b/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java index 357ec3ed1..e6dc1ba7a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java @@ -46,6 +46,7 @@ * @author Matt Gilene * @author Sascha Woo * @author Jakob Hoeper + * @author Haibo Liu * @since 4.0 */ public class SearchHitMapping { @@ -84,6 +85,7 @@ private SearchHitsImpl mapHitsFromResponse(SearchDocumentResponse searchDocum "Count of documents must match the count of entities"); long totalHits = searchDocumentResponse.getTotalHits(); + SearchShardStatistics shardStatistics = searchDocumentResponse.getSearchShardStatistics(); float maxScore = searchDocumentResponse.getMaxScore(); String scrollId = searchDocumentResponse.getScrollId(); String pointInTimeId = searchDocumentResponse.getPointInTimeId(); @@ -103,7 +105,7 @@ private SearchHitsImpl mapHitsFromResponse(SearchDocumentResponse searchDocum mapHitsInCompletionSuggestion(suggest); return new SearchHitsImpl<>(totalHits, totalHitsRelation, maxScore, scrollId, pointInTimeId, searchHits, - aggregations, suggest); + aggregations, suggest, shardStatistics); } @SuppressWarnings("unchecked") @@ -240,7 +242,8 @@ private SearchHits mapInnerDocuments(SearchHits searchHits, C searchHits.getPointInTimeId(), // convertedSearchHits, // searchHits.getAggregations(), // - searchHits.getSuggest()); + searchHits.getSuggest(), + searchHits.getSearchShardStatistics()); } } catch (Exception e) { throw new UncategorizedElasticsearchException("Unable to convert inner hits.", e); diff --git a/src/main/java/org/springframework/data/elasticsearch/core/SearchHits.java b/src/main/java/org/springframework/data/elasticsearch/core/SearchHits.java index 935680879..a42ddd18b 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/SearchHits.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/SearchHits.java @@ -27,6 +27,7 @@ * * @param the result data class. * @author Sascha Woo + * @author Haibo Liu * @since 4.0 */ public interface SearchHits extends Streamable> { @@ -108,4 +109,10 @@ default Iterator> iterator() { */ @Nullable String getPointInTimeId(); + + /** + * @return shard statistics for the search hit. + */ + @Nullable + SearchShardStatistics getSearchShardStatistics(); } diff --git a/src/main/java/org/springframework/data/elasticsearch/core/SearchHitsImpl.java b/src/main/java/org/springframework/data/elasticsearch/core/SearchHitsImpl.java index e80f1e3c7..e03307bb8 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/SearchHitsImpl.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/SearchHitsImpl.java @@ -29,6 +29,7 @@ * @param the result data class. * @author Peter-Josef Meisch * @author Sascha Woo + * @author Haibo Liu * @since 4.0 */ public class SearchHitsImpl implements SearchScrollHits { @@ -42,6 +43,7 @@ public class SearchHitsImpl implements SearchScrollHits { @Nullable private final AggregationsContainer aggregations; @Nullable private final Suggest suggest; @Nullable private String pointInTimeId; + @Nullable private final SearchShardStatistics searchShardStatistics; /** * @param totalHits the number of total hits for the search @@ -53,7 +55,8 @@ public class SearchHitsImpl implements SearchScrollHits { */ public SearchHitsImpl(long totalHits, TotalHitsRelation totalHitsRelation, float maxScore, @Nullable String scrollId, @Nullable String pointInTimeId, List> searchHits, - @Nullable AggregationsContainer aggregations, @Nullable Suggest suggest) { + @Nullable AggregationsContainer aggregations, @Nullable Suggest suggest, + @Nullable SearchShardStatistics searchShardStatistics) { Assert.notNull(searchHits, "searchHits must not be null"); @@ -66,6 +69,7 @@ public SearchHitsImpl(long totalHits, TotalHitsRelation totalHitsRelation, float this.aggregations = aggregations; this.suggest = suggest; this.unmodifiableSearchHits = Lazy.of(() -> Collections.unmodifiableList(searchHits)); + this.searchShardStatistics = searchShardStatistics; } // region getter @@ -118,6 +122,11 @@ public String getPointInTimeId() { return pointInTimeId; } + @Override + public SearchShardStatistics getSearchShardStatistics() { + return searchShardStatistics; + } + @Override public String toString() { return "SearchHits{" + // @@ -128,6 +137,7 @@ public String toString() { ", pointInTimeId='" + pointInTimeId + '\'' + // ", searchHits={" + searchHits.size() + " elements}" + // ", aggregations=" + aggregations + // + ", shardStatistics=" + searchShardStatistics + // '}'; } } diff --git a/src/main/java/org/springframework/data/elasticsearch/core/SearchShardStatistics.java b/src/main/java/org/springframework/data/elasticsearch/core/SearchShardStatistics.java new file mode 100644 index 000000000..c623408ad --- /dev/null +++ b/src/main/java/org/springframework/data/elasticsearch/core/SearchShardStatistics.java @@ -0,0 +1,130 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.elasticsearch.core; + +import java.util.List; + +import org.springframework.data.elasticsearch.ElasticsearchErrorCause; +import org.springframework.lang.Nullable; + +/** + * @author Haibo Liu + * @since 5.3 + */ +public class SearchShardStatistics { + private final Number failed; + + private final Number successful; + + private final Number total; + + @Nullable private final Number skipped; + + private final List failures; + + private SearchShardStatistics(Number failed, Number successful, Number total, @Nullable Number skipped, + List failures) { + this.failed = failed; + this.successful = successful; + this.total = total; + this.skipped = skipped; + this.failures = failures; + } + + public static SearchShardStatistics of(Number failed, Number successful, Number total, @Nullable Number skipped, + List failures) { + return new SearchShardStatistics(failed, successful, total, skipped, failures); + } + + public Number getFailed() { + return failed; + } + + public Number getSuccessful() { + return successful; + } + + public Number getTotal() { + return total; + } + + @Nullable + public Number getSkipped() { + return skipped; + } + + public boolean isFailed() { + return failed.intValue() > 0; + } + + public List getFailures() { + return failures; + } + + public static class Failure { + @Nullable private final String index; + @Nullable private final String node; + @Nullable private final String status; + private final int shard; + @Nullable private final Exception exception; + @Nullable private final ElasticsearchErrorCause elasticsearchErrorCause; + + private Failure(@Nullable String index, @Nullable String node, @Nullable String status, int shard, + @Nullable Exception exception, @Nullable ElasticsearchErrorCause elasticsearchErrorCause) { + this.index = index; + this.node = node; + this.status = status; + this.shard = shard; + this.exception = exception; + this.elasticsearchErrorCause = elasticsearchErrorCause; + } + + public static SearchShardStatistics.Failure of(@Nullable String index, @Nullable String node, + @Nullable String status, int shard, @Nullable Exception exception, + @Nullable ElasticsearchErrorCause elasticsearchErrorCause) { + return new SearchShardStatistics.Failure(index, node, status, shard, exception, elasticsearchErrorCause); + } + + @Nullable + public String getIndex() { + return index; + } + + @Nullable + public String getNode() { + return node; + } + + @Nullable + public String getStatus() { + return status; + } + + @Nullable + public Exception getException() { + return exception; + } + + public int getShard() { + return shard; + } + + @Nullable + public ElasticsearchErrorCause getElasticsearchErrorCause() { + return elasticsearchErrorCause; + } + } +} diff --git a/src/main/java/org/springframework/data/elasticsearch/core/convert/ElasticsearchCustomConversions.java b/src/main/java/org/springframework/data/elasticsearch/core/convert/ElasticsearchCustomConversions.java index dbf197f69..66a4262cd 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/convert/ElasticsearchCustomConversions.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/convert/ElasticsearchCustomConversions.java @@ -129,7 +129,7 @@ public Double convert(BigDecimal source) { @WritingConverter enum ByteArrayToBase64Converter implements Converter { - INSTANCE,; + INSTANCE; @Override public String convert(byte[] source) { diff --git a/src/main/java/org/springframework/data/elasticsearch/core/document/SearchDocumentResponse.java b/src/main/java/org/springframework/data/elasticsearch/core/document/SearchDocumentResponse.java index adac6231e..7fcc1db94 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/document/SearchDocumentResponse.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/document/SearchDocumentResponse.java @@ -20,6 +20,7 @@ import java.util.function.Function; import org.springframework.data.elasticsearch.core.AggregationsContainer; +import org.springframework.data.elasticsearch.core.SearchShardStatistics; import org.springframework.data.elasticsearch.core.suggest.response.Suggest; import org.springframework.lang.Nullable; @@ -27,6 +28,7 @@ * This represents the complete search response from Elasticsearch, including the returned documents. * * @author Peter-Josef Meisch + * @author Haibo Liu * @since 4.0 */ public class SearchDocumentResponse { @@ -40,10 +42,12 @@ public class SearchDocumentResponse { @Nullable private final Suggest suggest; @Nullable String pointInTimeId; + @Nullable private final SearchShardStatistics searchShardStatistics; public SearchDocumentResponse(long totalHits, String totalHitsRelation, float maxScore, @Nullable String scrollId, @Nullable String pointInTimeId, List searchDocuments, - @Nullable AggregationsContainer aggregationsContainer, @Nullable Suggest suggest) { + @Nullable AggregationsContainer aggregationsContainer, @Nullable Suggest suggest, + @Nullable SearchShardStatistics searchShardStatistics) { this.totalHits = totalHits; this.totalHitsRelation = totalHitsRelation; this.maxScore = maxScore; @@ -52,6 +56,7 @@ public SearchDocumentResponse(long totalHits, String totalHitsRelation, float ma this.searchDocuments = searchDocuments; this.aggregations = aggregationsContainer; this.suggest = suggest; + this.searchShardStatistics = searchShardStatistics; } public long getTotalHits() { @@ -93,6 +98,11 @@ public String getPointInTimeId() { return pointInTimeId; } + @Nullable + public SearchShardStatistics getSearchShardStatistics() { + return searchShardStatistics; + } + /** * A function to convert a {@link SearchDocument} async into an entity. Asynchronous so that it can be used from the * imperative and the reactive code. diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/AbstractElasticsearchRepositoryQuery.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/AbstractElasticsearchRepositoryQuery.java index b5814169c..d139068b5 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/query/AbstractElasticsearchRepositoryQuery.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/AbstractElasticsearchRepositoryQuery.java @@ -15,14 +15,10 @@ */ package org.springframework.data.elasticsearch.repository.query; -import java.util.Collections; - import org.springframework.data.domain.PageRequest; import org.springframework.data.elasticsearch.core.ElasticsearchOperations; import org.springframework.data.elasticsearch.core.SearchHitSupport; import org.springframework.data.elasticsearch.core.SearchHits; -import org.springframework.data.elasticsearch.core.SearchHitsImpl; -import org.springframework.data.elasticsearch.core.TotalHitsRelation; import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates; import org.springframework.data.elasticsearch.core.query.BaseQuery; @@ -42,6 +38,7 @@ * @author Rizwan Idrees * @author Mohsin Husen * @author Peter-Josef Meisch + * @author Haibo Liu */ public abstract class AbstractElasticsearchRepositoryQuery implements RepositoryQuery { @@ -107,24 +104,13 @@ public Object execute(Object[] parameters) { : PageRequest.of(0, DEFAULT_STREAM_BATCH_SIZE)); result = StreamUtils.createStreamFromIterator(elasticsearchOperations.searchForStream(query, clazz, index)); } else if (queryMethod.isCollectionQuery()) { - if (parameterAccessor.getPageable().isUnpaged()) { int itemCount = (int) elasticsearchOperations.count(query, clazz, index); - - if (itemCount == 0) { - result = new SearchHitsImpl<>(0, TotalHitsRelation.EQUAL_TO, Float.NaN, null, - query.getPointInTime() != null ? query.getPointInTime().id() : null, Collections.emptyList(), null, null); - } else { - query.setPageable(PageRequest.of(0, Math.max(1, itemCount))); - } + query.setPageable(PageRequest.of(0, Math.max(1, itemCount))); } else { query.setPageable(parameterAccessor.getPageable()); } - - if (result == null) { - result = elasticsearchOperations.search(query, clazz, index); - } - + result = elasticsearchOperations.search(query, clazz, index); } else { result = elasticsearchOperations.searchOne(query, clazz, index); } diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java b/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java index 06334192f..8e4f80a45 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/query/HighlightConverter.java @@ -1,5 +1,5 @@ /* - * Copyright 2013-2023 the original author or authors. + * Copyright 2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java b/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java index d53e54079..e1980a06c 100644 --- a/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java +++ b/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java @@ -15,26 +15,35 @@ */ package org.springframework.data.elasticsearch.client.elc; +import co.elastic.clients.elasticsearch._types.ShardFailure; +import co.elastic.clients.elasticsearch._types.ShardStatistics; import co.elastic.clients.elasticsearch.core.search.HitsMetadata; import co.elastic.clients.elasticsearch.core.search.Suggestion; import co.elastic.clients.elasticsearch.core.search.TotalHitsRelation; +import co.elastic.clients.json.JsonData; import co.elastic.clients.json.jackson.JacksonJsonpMapper; import java.util.ArrayList; import java.util.List; +import java.util.Map; import org.assertj.core.api.SoftAssertions; import org.json.JSONException; import org.junit.jupiter.api.Test; +import org.springframework.data.elasticsearch.ElasticsearchErrorCause; +import org.springframework.data.elasticsearch.core.SearchShardStatistics; import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import static org.assertj.core.api.Assertions.*; + /** * Tests for the factory class to create {@link SearchDocumentResponse} instances. * * @author Sébastien Comeau + * @author Haibo Liu * @since 5.2 */ class SearchDocumentResponseBuilderUnitTests { @@ -73,7 +82,7 @@ void shouldGetPhraseSuggestion() throws JSONException { .build(); // act - final var actual = SearchDocumentResponseBuilder.from(hitsMetadata, null, null, null, sortProperties, null, + final var actual = SearchDocumentResponseBuilder.from(hitsMetadata, null, null, null, null, sortProperties, null, jsonpMapper); // assert @@ -108,4 +117,62 @@ void shouldGetPhraseSuggestion() throws JSONException { softly.assertAll(); } + + @Test // #2605 + void shouldGetShardStatisticsInfo() { + // arrange + HitsMetadata hitsMetadata = new HitsMetadata.Builder() + .total(t -> t + .value(0) + .relation(TotalHitsRelation.Eq)) + .hits(new ArrayList<>()) + .build(); + + ShardStatistics shards = new ShardStatistics.Builder() + .total(15) + .successful(14) + .skipped(0) + .failed(1) + .failures(List.of( + ShardFailure.of(sfb -> sfb + .index("test-index") + .node("test-node") + .shard(1) + .reason(rb -> rb + .reason("this is a mock failure in shards") + .causedBy(cbb -> + cbb.reason("inner reason") + .metadata(Map.of("hello", JsonData.of("world"))) + ) + .type("reason-type") + + ) + .status("fail") + ) + )) + .build(); + + // act + SearchDocumentResponse response = SearchDocumentResponseBuilder.from(hitsMetadata, shards, null, null, + null, null, null, jsonpMapper); + + // assert + SearchShardStatistics shardStatistics = response.getSearchShardStatistics(); + assertThat(shardStatistics).isNotNull(); + assertThat(shardStatistics.getTotal()).isEqualTo(15); + assertThat(shardStatistics.getSuccessful()).isEqualTo(14); + assertThat(shardStatistics.getSkipped()).isEqualTo(0); + assertThat(shardStatistics.getFailed()).isEqualTo(1); + // assert failure + List failures = shardStatistics.getFailures(); + assertThat(failures.size()).isEqualTo(1); + assertThat(failures).extracting(SearchShardStatistics.Failure::getIndex).containsExactly("test-index"); + assertThat(failures).extracting(SearchShardStatistics.Failure::getElasticsearchErrorCause) + .extracting(ElasticsearchErrorCause::getReason) + .containsExactly("this is a mock failure in shards"); + assertThat(failures).extracting(SearchShardStatistics.Failure::getElasticsearchErrorCause) + .extracting(ElasticsearchErrorCause::getCausedBy) + .extracting(ElasticsearchErrorCause::getReason) + .containsExactly("inner reason"); + } } diff --git a/src/test/java/org/springframework/data/elasticsearch/core/SearchHitSupportTest.java b/src/test/java/org/springframework/data/elasticsearch/core/SearchHitSupportTest.java index fcbef6b9a..ae95cc0b4 100644 --- a/src/test/java/org/springframework/data/elasticsearch/core/SearchHitSupportTest.java +++ b/src/test/java/org/springframework/data/elasticsearch/core/SearchHitSupportTest.java @@ -32,6 +32,7 @@ /** * @author Roman Puchkovskiy * @author Peter-Josef Meisch + * @author Haibo Liu */ class SearchHitSupportTest { @@ -65,7 +66,7 @@ void shouldReturnTheSameListInstanceInSearchHitsAndGetContent() { hits.add(new SearchHit<>(null, null, null, 0, null, null, null, null, null, null, "five")); SearchHits originalSearchHits = new SearchHitsImpl<>(hits.size(), TotalHitsRelation.EQUAL_TO, 0, "scroll", - null, hits, null, null); + null, hits, null, null, null); SearchPage searchPage = SearchHitSupport.searchPageFor(originalSearchHits, PageRequest.of(0, 3)); SearchHits searchHits = searchPage.getSearchHits(); diff --git a/src/test/java/org/springframework/data/elasticsearch/core/StreamQueriesTest.java b/src/test/java/org/springframework/data/elasticsearch/core/StreamQueriesTest.java index 7bcbd93cf..3580f6c80 100644 --- a/src/test/java/org/springframework/data/elasticsearch/core/StreamQueriesTest.java +++ b/src/test/java/org/springframework/data/elasticsearch/core/StreamQueriesTest.java @@ -31,6 +31,7 @@ /** * @author Sascha Woo * @author Peter-Josef Meisch + * @author Haibo Liu */ public class StreamQueriesTest { @@ -180,6 +181,6 @@ void shouldOnlyReturnRequestedCount() { } private SearchScrollHits newSearchScrollHits(List> hits, String scrollId) { - return new SearchHitsImpl<>(hits.size(), TotalHitsRelation.EQUAL_TO, 0, scrollId, null, hits, null, null); + return new SearchHitsImpl<>(hits.size(), TotalHitsRelation.EQUAL_TO, 0, scrollId, null, hits, null, null, null); } } From b0c97ccf27e3592dbe3bc9800afb4951fd1fdab5 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Thu, 28 Dec 2023 13:53:42 +0100 Subject: [PATCH 020/204] Polishing --- .../client/elc/ResponseConverter.java | 5 ++--- .../client/elc/SearchDocumentResponseBuilder.java | 14 ++++++-------- .../data/elasticsearch/core/SearchHitMapping.java | 14 +++++++------- .../SearchDocumentResponseBuilderUnitTests.java | 10 +++------- 4 files changed, 18 insertions(+), 25 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java index 7ad69c2aa..9449682c1 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ResponseConverter.java @@ -121,8 +121,7 @@ private TemplateResponse clusterGetComponentTemplate( .build(); } - private TemplateResponseData clusterGetComponentTemplateData( - ComponentTemplateSummary componentTemplateSummary) { + private TemplateResponseData clusterGetComponentTemplateData(ComponentTemplateSummary componentTemplateSummary) { var mapping = typeMapping(componentTemplateSummary.mappings()); var settings = new Settings(); @@ -326,7 +325,7 @@ private TemplateResponse indexGetComponentTemplate(IndexTemplateItem indexTempla } private TemplateResponseData indexGetComponentTemplateData(IndexTemplateSummary indexTemplateSummary, - List composedOf) { + List composedOf) { var mapping = typeMapping(indexTemplateSummary.mappings()); Function indexSettingsToSettings = indexSettings -> { diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java index 68148ef21..0eddc80d8 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilder.java @@ -169,11 +169,10 @@ public static SearchDocumentResponse from(HitsMetadata hitsMetadata, @Nul private static SearchShardStatistics shardsFrom(ShardStatistics shards) { List failures = shards.failures(); - List searchFailures = failures.stream() - .map(f -> SearchShardStatistics.Failure.of(f.index(), f.node(), f.status(), f.shard(), null, - ResponseConverter.toErrorCause(f.reason()))) - .toList(); - return SearchShardStatistics.of(shards.failed(), shards.successful(), shards.total(), shards.skipped(), searchFailures); + List searchFailures = failures.stream().map(f -> SearchShardStatistics.Failure + .of(f.index(), f.node(), f.status(), f.shard(), null, ResponseConverter.toErrorCause(f.reason()))).toList(); + return SearchShardStatistics.of(shards.failed(), shards.successful(), shards.total(), shards.skipped(), + searchFailures); } @Nullable @@ -235,9 +234,8 @@ private static PhraseSuggestion getPhraseSuggestion(String name, List options = new ArrayList<>(); - phraseSuggestOptions.forEach(optionES -> options - .add(new PhraseSuggestion.Entry.Option(optionES.text(), optionES.highlighted(), optionES.score(), - optionES.collateMatch()))); + phraseSuggestOptions.forEach(optionES -> options.add(new PhraseSuggestion.Entry.Option(optionES.text(), + optionES.highlighted(), optionES.score(), optionES.collateMatch()))); entries.add(new PhraseSuggestion.Entry(phraseSuggest.text(), phraseSuggest.offset(), phraseSuggest.length(), options, null)); }); diff --git a/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java b/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java index e6dc1ba7a..e46e644dc 100644 --- a/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java +++ b/src/main/java/org/springframework/data/elasticsearch/core/SearchHitMapping.java @@ -235,13 +235,13 @@ private SearchHits mapInnerDocuments(SearchHits searchHits, C scrollId = searchHitsImpl.getScrollId(); } - return new SearchHitsImpl<>(searchHits.getTotalHits(), // - searchHits.getTotalHitsRelation(), // - searchHits.getMaxScore(), // - scrollId, // - searchHits.getPointInTimeId(), // - convertedSearchHits, // - searchHits.getAggregations(), // + return new SearchHitsImpl<>(searchHits.getTotalHits(), + searchHits.getTotalHitsRelation(), + searchHits.getMaxScore(), + scrollId, + searchHits.getPointInTimeId(), + convertedSearchHits, + searchHits.getAggregations(), searchHits.getSuggest(), searchHits.getSearchShardStatistics()); } diff --git a/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java b/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java index e1980a06c..8e2363b83 100644 --- a/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java +++ b/src/test/java/org/springframework/data/elasticsearch/client/elc/SearchDocumentResponseBuilderUnitTests.java @@ -140,16 +140,12 @@ void shouldGetShardStatisticsInfo() { .shard(1) .reason(rb -> rb .reason("this is a mock failure in shards") - .causedBy(cbb -> - cbb.reason("inner reason") - .metadata(Map.of("hello", JsonData.of("world"))) - ) + .causedBy(cbb -> cbb.reason("inner reason") + .metadata(Map.of("hello", JsonData.of("world")))) .type("reason-type") ) - .status("fail") - ) - )) + .status("fail")))) .build(); // act From b78588eec58a9adb38a4c08664284d682b003620 Mon Sep 17 00:00:00 2001 From: Junghoon Ban Date: Thu, 28 Dec 2023 21:59:25 +0900 Subject: [PATCH 021/204] Remove duplicate declaration of identifying type for repository. Original Pull Request #2813 Closes #2812 --- .../config/ElasticsearchRepositoryConfigExtension.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/repository/config/ElasticsearchRepositoryConfigExtension.java b/src/main/java/org/springframework/data/elasticsearch/repository/config/ElasticsearchRepositoryConfigExtension.java index fa7b429ce..05bd8eb74 100644 --- a/src/main/java/org/springframework/data/elasticsearch/repository/config/ElasticsearchRepositoryConfigExtension.java +++ b/src/main/java/org/springframework/data/elasticsearch/repository/config/ElasticsearchRepositoryConfigExtension.java @@ -19,6 +19,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Locale; import org.springframework.beans.factory.support.BeanDefinitionBuilder; @@ -41,6 +42,7 @@ * @author Mohsin Husen * @author Mark Paluch * @author Christoph Strobl + * @author Junghoon Ban */ public class ElasticsearchRepositoryConfigExtension extends RepositoryConfigurationExtensionSupport { @@ -106,7 +108,7 @@ protected Collection> getIdentifyingAnnotations() { */ @Override protected Collection> getIdentifyingTypes() { - return Arrays.asList(ElasticsearchRepository.class, ElasticsearchRepository.class); + return List.of(ElasticsearchRepository.class); } /* From 260dadd4d648b87dda5d18cc90ae457cce497735 Mon Sep 17 00:00:00 2001 From: Peter-Josef Meisch Date: Fri, 29 Dec 2023 12:44:54 +0100 Subject: [PATCH 022/204] Make org.springframework.data.elasticsearch.client.elc.ReactiveElasticsearchTemplate.ClientCallback public. Original Pull Request #2815 Closes #2814 --- .../elc/ReactiveElasticsearchTemplate.java | 1109 +++++++++-------- 1 file changed, 555 insertions(+), 554 deletions(-) diff --git a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java index 1d60ab94a..099c2ba7a 100644 --- a/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java +++ b/src/main/java/org/springframework/data/elasticsearch/client/elc/ReactiveElasticsearchTemplate.java @@ -84,574 +84,575 @@ */ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearchTemplate { - private static final Log LOGGER = LogFactory.getLog(ReactiveElasticsearchTemplate.class); - - private final ReactiveElasticsearchClient client; - private final RequestConverter requestConverter; - private final ResponseConverter responseConverter; - private final JsonpMapper jsonpMapper; - private final ElasticsearchExceptionTranslator exceptionTranslator; - - public ReactiveElasticsearchTemplate(ReactiveElasticsearchClient client, ElasticsearchConverter converter) { - super(converter); - - Assert.notNull(client, "client must not be null"); - - this.client = client; - this.jsonpMapper = client._transport().jsonpMapper(); - requestConverter = new RequestConverter(converter, jsonpMapper); - responseConverter = new ResponseConverter(jsonpMapper); - exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); - } - - // region Document operations - @Override - protected Mono> doIndex(T entity, IndexCoordinates index) { - - IndexRequest indexRequest = requestConverter.documentIndexRequest(getIndexQuery(entity), index, - getRefreshPolicy()); - return Mono.just(entity) // - .zipWith(// - Mono.from(execute(client -> client.index(indexRequest))) // - .map(indexResponse -> new IndexResponseMetaData(indexResponse.id(), // - indexResponse.index(), // - indexResponse.seqNo(), // - indexResponse.primaryTerm(), // - indexResponse.version() // - ))); - } - - @Override - public Flux saveAll(Mono> entitiesPublisher, IndexCoordinates index) { - - Assert.notNull(entitiesPublisher, "entitiesPublisher must not be null!"); - - return entitiesPublisher // - .flatMapMany(entities -> Flux.fromIterable(entities) // - .concatMap(entity -> maybeCallbackBeforeConvert(entity, index)) // - ).collectList() // - .map(Entities::new) // - .flatMapMany(entities -> { - - if (entities.isEmpty()) { - return Flux.empty(); - } - - return doBulkOperation(entities.indexQueries(), BulkOptions.defaultOptions(), index)// - .index() // - .flatMap(indexAndResponse -> { - T savedEntity = entities.entityAt(indexAndResponse.getT1()); - BulkResponseItem response = indexAndResponse.getT2(); - var updatedEntity = entityOperations.updateIndexedObject( - savedEntity, new IndexedObjectInformation( // - response.id(), // - response.index(), // - response.seqNo(), // - response.primaryTerm(), // - response.version()), - converter, - routingResolver); - return maybeCallbackAfterSave(updatedEntity, index); - }); - }); - } - - @Override - protected Mono doExists(String id, IndexCoordinates index) { - - Assert.notNull(id, "id must not be null"); - Assert.notNull(index, "index must not be null"); - - ExistsRequest existsRequest = requestConverter.documentExistsRequest(id, routingResolver.getRouting(), index); - - return Mono.from(execute( - ((ClientCallback>) client -> client.exists(existsRequest)))) - .map(BooleanResponse::value) // - .onErrorReturn(NoSuchIndexException.class, false); - } - - @Override - public Mono delete(Query query, Class entityType, IndexCoordinates index) { - - Assert.notNull(query, "query must not be null"); - - DeleteByQueryRequest request = requestConverter.documentDeleteByQueryRequest(query, routingResolver.getRouting(), - entityType, index, getRefreshPolicy()); - return Mono.from(execute(client -> client.deleteByQuery(request))).map(responseConverter::byQueryResponse); - } - - @Override - public Mono get(String id, Class entityType, IndexCoordinates index) { - - Assert.notNull(id, "id must not be null"); - Assert.notNull(entityType, "entityType must not be null"); - Assert.notNull(index, "index must not be null"); - - GetRequest getRequest = requestConverter.documentGetRequest(id, routingResolver.getRouting(), index); - - Mono> getResponse = Mono - .from(execute(client -> client.get(getRequest, EntityAsMap.class))); - - ReadDocumentCallback callback = new ReadDocumentCallback<>(converter, entityType, index); - return getResponse.flatMap(response -> callback.toEntity(DocumentAdapters.from(response))); - } - - @Override - public Mono reindex(ReindexRequest reindexRequest) { - - Assert.notNull(reindexRequest, "reindexRequest must not be null"); - - co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, - true); - - return Mono.from(execute( // - client -> client.reindex(reindexRequestES))).map(responseConverter::reindexResponse); - } - - @Override - public Mono submitReindex(ReindexRequest reindexRequest) { - - Assert.notNull(reindexRequest, "reindexRequest must not be null"); - - co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, - false); - - return Mono.from(execute( // - client -> client.reindex(reindexRequestES))) - .flatMap(response -> (response.task() == null) - ? Mono.error( - new UnsupportedBackendOperation("ElasticsearchClient did not return a task id on submit request")) - : Mono.just(response.task())); - } + private static final Log LOGGER = LogFactory.getLog(ReactiveElasticsearchTemplate.class); + + private final ReactiveElasticsearchClient client; + private final RequestConverter requestConverter; + private final ResponseConverter responseConverter; + private final JsonpMapper jsonpMapper; + private final ElasticsearchExceptionTranslator exceptionTranslator; + + public ReactiveElasticsearchTemplate(ReactiveElasticsearchClient client, ElasticsearchConverter converter) { + super(converter); + + Assert.notNull(client, "client must not be null"); + + this.client = client; + this.jsonpMapper = client._transport().jsonpMapper(); + requestConverter = new RequestConverter(converter, jsonpMapper); + responseConverter = new ResponseConverter(jsonpMapper); + exceptionTranslator = new ElasticsearchExceptionTranslator(jsonpMapper); + } + + // region Document operations + @Override + protected Mono> doIndex(T entity, IndexCoordinates index) { + + IndexRequest indexRequest = requestConverter.documentIndexRequest(getIndexQuery(entity), index, + getRefreshPolicy()); + return Mono.just(entity) // + .zipWith(// + Mono.from(execute(client -> client.index(indexRequest))) // + .map(indexResponse -> new IndexResponseMetaData(indexResponse.id(), // + indexResponse.index(), // + indexResponse.seqNo(), // + indexResponse.primaryTerm(), // + indexResponse.version() // + ))); + } + + @Override + public Flux saveAll(Mono> entitiesPublisher, IndexCoordinates index) { + + Assert.notNull(entitiesPublisher, "entitiesPublisher must not be null!"); + + return entitiesPublisher // + .flatMapMany(entities -> Flux.fromIterable(entities) // + .concatMap(entity -> maybeCallbackBeforeConvert(entity, index)) // + ).collectList() // + .map(Entities::new) // + .flatMapMany(entities -> { + + if (entities.isEmpty()) { + return Flux.empty(); + } + + return doBulkOperation(entities.indexQueries(), BulkOptions.defaultOptions(), index)// + .index() // + .flatMap(indexAndResponse -> { + T savedEntity = entities.entityAt(indexAndResponse.getT1()); + BulkResponseItem response = indexAndResponse.getT2(); + var updatedEntity = entityOperations.updateIndexedObject( + savedEntity, new IndexedObjectInformation( // + response.id(), // + response.index(), // + response.seqNo(), // + response.primaryTerm(), // + response.version()), + converter, + routingResolver); + return maybeCallbackAfterSave(updatedEntity, index); + }); + }); + } + + @Override + protected Mono doExists(String id, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(index, "index must not be null"); + + ExistsRequest existsRequest = requestConverter.documentExistsRequest(id, routingResolver.getRouting(), index); + + return Mono.from(execute( + ((ClientCallback>) client -> client.exists(existsRequest)))) + .map(BooleanResponse::value) // + .onErrorReturn(NoSuchIndexException.class, false); + } + + @Override + public Mono delete(Query query, Class entityType, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + + DeleteByQueryRequest request = requestConverter.documentDeleteByQueryRequest(query, routingResolver.getRouting(), + entityType, index, getRefreshPolicy()); + return Mono.from(execute(client -> client.deleteByQuery(request))).map(responseConverter::byQueryResponse); + } + + @Override + public Mono get(String id, Class entityType, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(entityType, "entityType must not be null"); + Assert.notNull(index, "index must not be null"); + + GetRequest getRequest = requestConverter.documentGetRequest(id, routingResolver.getRouting(), index); + + Mono> getResponse = Mono + .from(execute(client -> client.get(getRequest, EntityAsMap.class))); + + ReadDocumentCallback callback = new ReadDocumentCallback<>(converter, entityType, index); + return getResponse.flatMap(response -> callback.toEntity(DocumentAdapters.from(response))); + } + + @Override + public Mono reindex(ReindexRequest reindexRequest) { + + Assert.notNull(reindexRequest, "reindexRequest must not be null"); + + co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, + true); + + return Mono.from(execute( // + client -> client.reindex(reindexRequestES))).map(responseConverter::reindexResponse); + } + + @Override + public Mono submitReindex(ReindexRequest reindexRequest) { + + Assert.notNull(reindexRequest, "reindexRequest must not be null"); + + co.elastic.clients.elasticsearch.core.ReindexRequest reindexRequestES = requestConverter.reindex(reindexRequest, + false); + + return Mono.from(execute( // + client -> client.reindex(reindexRequestES))) + .flatMap(response -> (response.task() == null) + ? Mono.error( + new UnsupportedBackendOperation("ElasticsearchClient did not return a task id on submit request")) + : Mono.just(response.task())); + } - @Override - public Mono update(UpdateQuery updateQuery, IndexCoordinates index) { - - Assert.notNull(updateQuery, "UpdateQuery must not be null"); - Assert.notNull(index, "Index must not be null"); - - UpdateRequest request = requestConverter.documentUpdateRequest(updateQuery, index, getRefreshPolicy(), - routingResolver.getRouting()); - - return Mono.from(execute(client -> client.update(request, Document.class))).flatMap(response -> { - UpdateResponse.Result result = result(response.result()); - return result == null ? Mono.empty() : Mono.just(UpdateResponse.of(result)); - }); - } - - @Override - public Mono updateByQuery(UpdateQuery updateQuery, IndexCoordinates index) { - throw new UnsupportedOperationException("not implemented"); - } + @Override + public Mono update(UpdateQuery updateQuery, IndexCoordinates index) { + + Assert.notNull(updateQuery, "UpdateQuery must not be null"); + Assert.notNull(index, "Index must not be null"); + + UpdateRequest request = requestConverter.documentUpdateRequest(updateQuery, index, getRefreshPolicy(), + routingResolver.getRouting()); + + return Mono.from(execute(client -> client.update(request, Document.class))).flatMap(response -> { + UpdateResponse.Result result = result(response.result()); + return result == null ? Mono.empty() : Mono.just(UpdateResponse.of(result)); + }); + } + + @Override + public Mono updateByQuery(UpdateQuery updateQuery, IndexCoordinates index) { + throw new UnsupportedOperationException("not implemented"); + } - @Override - public Mono bulkUpdate(List queries, BulkOptions bulkOptions, IndexCoordinates index) { + @Override + public Mono bulkUpdate(List queries, BulkOptions bulkOptions, IndexCoordinates index) { - Assert.notNull(queries, "List of UpdateQuery must not be null"); - Assert.notNull(bulkOptions, "BulkOptions must not be null"); - Assert.notNull(index, "Index must not be null"); + Assert.notNull(queries, "List of UpdateQuery must not be null"); + Assert.notNull(bulkOptions, "BulkOptions must not be null"); + Assert.notNull(index, "Index must not be null"); - return doBulkOperation(queries, bulkOptions, index).then(); - } + return doBulkOperation(queries, bulkOptions, index).then(); + } - private Flux doBulkOperation(List queries, BulkOptions bulkOptions, IndexCoordinates index) { + private Flux doBulkOperation(List queries, BulkOptions bulkOptions, IndexCoordinates index) { - BulkRequest bulkRequest = requestConverter.documentBulkRequest(queries, bulkOptions, index, getRefreshPolicy()); - return client.bulk(bulkRequest) - .onErrorMap(e -> new UncategorizedElasticsearchException("Error executing bulk request", e)) - .flatMap(this::checkForBulkOperationFailure) // - .flatMapMany(response -> Flux.fromIterable(response.items())); + BulkRequest bulkRequest = requestConverter.documentBulkRequest(queries, bulkOptions, index, getRefreshPolicy()); + return client.bulk(bulkRequest) + .onErrorMap(e -> new UncategorizedElasticsearchException("Error executing bulk request", e)) + .flatMap(this::checkForBulkOperationFailure) // + .flatMapMany(response -> Flux.fromIterable(response.items())); - } + } - private Mono checkForBulkOperationFailure(BulkResponse bulkResponse) { + private Mono checkForBulkOperationFailure(BulkResponse bulkResponse) { - if (bulkResponse.errors()) { - Map failedDocuments = new HashMap<>(); + if (bulkResponse.errors()) { + Map failedDocuments = new HashMap<>(); - for (BulkResponseItem item : bulkResponse.items()) { + for (BulkResponseItem item : bulkResponse.items()) { - if (item.error() != null) { - failedDocuments.put(item.id(), new BulkFailureException.FailureDetails(item.status(), item.error().reason())); - } - } - BulkFailureException exception = new BulkFailureException( - "Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" - + failedDocuments + ']', - failedDocuments); - return Mono.error(exception); - } else { - return Mono.just(bulkResponse); - } - } + if (item.error() != null) { + failedDocuments.put(item.id(), new BulkFailureException.FailureDetails(item.status(), item.error().reason())); + } + } + BulkFailureException exception = new BulkFailureException( + "Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + + failedDocuments + ']', + failedDocuments); + return Mono.error(exception); + } else { + return Mono.just(bulkResponse); + } + } - @Override - protected Mono doDeleteById(String id, @Nullable String routing, IndexCoordinates index) { - - Assert.notNull(id, "id must not be null"); - Assert.notNull(index, "index must not be null"); - - return Mono.defer(() -> { - DeleteRequest deleteRequest = requestConverter.documentDeleteRequest(id, routing, index, getRefreshPolicy()); - return doDelete(deleteRequest); - }); - } - - private Mono doDelete(DeleteRequest request) { - - return Mono.from(execute(client -> client.delete(request))) // - .flatMap(deleteResponse -> { - if (deleteResponse.result() == Result.NotFound) { - return Mono.empty(); - } - return Mono.just(deleteResponse.id()); - }).onErrorResume(NoSuchIndexException.class, it -> Mono.empty()); - } - - @Override - public Flux> multiGet(Query query, Class clazz, IndexCoordinates index) { - - Assert.notNull(query, "query must not be null"); - Assert.notNull(clazz, "clazz must not be null"); - - MgetRequest request = requestConverter.documentMgetRequest(query, clazz, index); - - ReadDocumentCallback callback = new ReadDocumentCallback<>(converter, clazz, index); - - Publisher> response = execute(client -> client.mget(request, EntityAsMap.class)); - - return Mono.from(response)// - .flatMapMany(it -> Flux.fromIterable(DocumentAdapters.from(it))) // - .flatMap(multiGetItem -> { - if (multiGetItem.isFailed()) { - return Mono.just(MultiGetItem.of(null, multiGetItem.getFailure())); - } else { - return callback.toEntity(multiGetItem.getItem()) // - .map(t -> MultiGetItem.of(t, multiGetItem.getFailure())); - } - }); - } - - // endregion - - @Override - protected ReactiveElasticsearchTemplate doCopy() { - return new ReactiveElasticsearchTemplate(client, converter); - } - - // region search operations - - @Override - protected Flux doFind(Query query, Class clazz, IndexCoordinates index) { - - Assert.notNull(query, "query must not be null"); - Assert.notNull(clazz, "clazz must not be null"); - Assert.notNull(index, "index must not be null"); - - if (query instanceof SearchTemplateQuery searchTemplateQuery) { - return Flux.defer(() -> doSearch(searchTemplateQuery, clazz, index)); - } else { - return Flux.defer(() -> { - boolean queryIsUnbounded = !(query.getPageable().isPaged() || query.isLimiting()); - return queryIsUnbounded ? doFindUnbounded(query, clazz, index) : doFindBounded(query, clazz, index); - }); - } - } - - private Flux doFindUnbounded(Query query, Class clazz, IndexCoordinates index) { - - if (query instanceof BaseQuery baseQuery) { - var pitKeepAlive = Duration.ofMinutes(5); - // setup functions for Flux.usingWhen() - Mono resourceSupplier = openPointInTime(index, pitKeepAlive, true) - .map(pit -> new PitSearchAfter(baseQuery, pit)); - - Function> asyncComplete = this::cleanupPit; - - BiFunction> asyncError = (psa, ex) -> { - if (LOGGER.isErrorEnabled()) { - LOGGER.error("Error during pit/search_after", ex); - } - return cleanupPit(psa); - }; - - Function> asyncCancel = psa -> { - if (LOGGER.isWarnEnabled()) { - LOGGER.warn("pit/search_after was cancelled"); - } - return cleanupPit(psa); - }; - - Function>> resourceClosure = psa -> { - - baseQuery.setPointInTime(new Query.PointInTime(psa.getPit(), pitKeepAlive)); - baseQuery.addSort(Sort.by("_shard_doc")); - SearchRequest firstSearchRequest = requestConverter.searchRequest(baseQuery, routingResolver.getRouting(), - clazz, index, false, true); - - return Mono.from(execute(client -> client.search(firstSearchRequest, EntityAsMap.class))) - .expand(entityAsMapSearchResponse -> { - - var hits = entityAsMapSearchResponse.hits().hits(); - if (CollectionUtils.isEmpty(hits)) { - return Mono.empty(); - } - - List sortOptions = hits.get(hits.size() - 1).sort().stream().map(TypeUtils::toObject) - .collect(Collectors.toList()); - baseQuery.setSearchAfter(sortOptions); - SearchRequest followSearchRequest = requestConverter.searchRequest(baseQuery, - routingResolver.getRouting(), clazz, index, false, true); - return Mono.from(execute(client -> client.search(followSearchRequest, EntityAsMap.class))); - }); - - }; - - Flux> searchResponses = Flux.usingWhen(resourceSupplier, resourceClosure, asyncComplete, - asyncError, asyncCancel); - return searchResponses.flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) - .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); - } else { - return Flux.error(new IllegalArgumentException("Query must be derived from BaseQuery")); - } - } - - private Publisher cleanupPit(PitSearchAfter psa) { - var baseQuery = psa.getBaseQuery(); - baseQuery.setPointInTime(null); - baseQuery.setSearchAfter(null); - baseQuery.setSort(psa.getSort()); - var pit = psa.getPit(); - return StringUtils.hasText(pit) ? closePointInTime(pit) : Mono.empty(); - } - - static private class PitSearchAfter { - private final BaseQuery baseQuery; - @Nullable private final Sort sort; - private final String pit; - - PitSearchAfter(BaseQuery baseQuery, String pit) { - this.baseQuery = baseQuery; - this.sort = baseQuery.getSort(); - this.pit = pit; - } - - public BaseQuery getBaseQuery() { - return baseQuery; - } - - @Nullable - public Sort getSort() { - return sort; - } - - public String getPit() { - return pit; - } - } - - @Override - protected Mono doCount(Query query, Class entityType, IndexCoordinates index) { - - Assert.notNull(query, "query must not be null"); - Assert.notNull(index, "index must not be null"); + @Override + protected Mono doDeleteById(String id, @Nullable String routing, IndexCoordinates index) { + + Assert.notNull(id, "id must not be null"); + Assert.notNull(index, "index must not be null"); + + return Mono.defer(() -> { + DeleteRequest deleteRequest = requestConverter.documentDeleteRequest(id, routing, index, getRefreshPolicy()); + return doDelete(deleteRequest); + }); + } + + private Mono doDelete(DeleteRequest request) { + + return Mono.from(execute(client -> client.delete(request))) // + .flatMap(deleteResponse -> { + if (deleteResponse.result() == Result.NotFound) { + return Mono.empty(); + } + return Mono.just(deleteResponse.id()); + }).onErrorResume(NoSuchIndexException.class, it -> Mono.empty()); + } + + @Override + public Flux> multiGet(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + + MgetRequest request = requestConverter.documentMgetRequest(query, clazz, index); + + ReadDocumentCallback callback = new ReadDocumentCallback<>(converter, clazz, index); + + Publisher> response = execute(client -> client.mget(request, EntityAsMap.class)); + + return Mono.from(response)// + .flatMapMany(it -> Flux.fromIterable(DocumentAdapters.from(it))) // + .flatMap(multiGetItem -> { + if (multiGetItem.isFailed()) { + return Mono.just(MultiGetItem.of(null, multiGetItem.getFailure())); + } else { + return callback.toEntity(multiGetItem.getItem()) // + .map(t -> MultiGetItem.of(t, multiGetItem.getFailure())); + } + }); + } + + // endregion + + @Override + protected ReactiveElasticsearchTemplate doCopy() { + return new ReactiveElasticsearchTemplate(client, converter); + } + + // region search operations + + @Override + protected Flux doFind(Query query, Class clazz, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(clazz, "clazz must not be null"); + Assert.notNull(index, "index must not be null"); + + if (query instanceof SearchTemplateQuery searchTemplateQuery) { + return Flux.defer(() -> doSearch(searchTemplateQuery, clazz, index)); + } else { + return Flux.defer(() -> { + boolean queryIsUnbounded = !(query.getPageable().isPaged() || query.isLimiting()); + return queryIsUnbounded ? doFindUnbounded(query, clazz, index) : doFindBounded(query, clazz, index); + }); + } + } + + private Flux doFindUnbounded(Query query, Class clazz, IndexCoordinates index) { + + if (query instanceof BaseQuery baseQuery) { + var pitKeepAlive = Duration.ofMinutes(5); + // setup functions for Flux.usingWhen() + Mono resourceSupplier = openPointInTime(index, pitKeepAlive, true) + .map(pit -> new PitSearchAfter(baseQuery, pit)); + + Function> asyncComplete = this::cleanupPit; + + BiFunction> asyncError = (psa, ex) -> { + if (LOGGER.isErrorEnabled()) { + LOGGER.error("Error during pit/search_after", ex); + } + return cleanupPit(psa); + }; + + Function> asyncCancel = psa -> { + if (LOGGER.isWarnEnabled()) { + LOGGER.warn("pit/search_after was cancelled"); + } + return cleanupPit(psa); + }; + + Function>> resourceClosure = psa -> { + + baseQuery.setPointInTime(new Query.PointInTime(psa.getPit(), pitKeepAlive)); + baseQuery.addSort(Sort.by("_shard_doc")); + SearchRequest firstSearchRequest = requestConverter.searchRequest(baseQuery, routingResolver.getRouting(), + clazz, index, false, true); + + return Mono.from(execute(client -> client.search(firstSearchRequest, EntityAsMap.class))) + .expand(entityAsMapSearchResponse -> { + + var hits = entityAsMapSearchResponse.hits().hits(); + if (CollectionUtils.isEmpty(hits)) { + return Mono.empty(); + } + + List sortOptions = hits.get(hits.size() - 1).sort().stream().map(TypeUtils::toObject) + .collect(Collectors.toList()); + baseQuery.setSearchAfter(sortOptions); + SearchRequest followSearchRequest = requestConverter.searchRequest(baseQuery, + routingResolver.getRouting(), clazz, index, false, true); + return Mono.from(execute(client -> client.search(followSearchRequest, EntityAsMap.class))); + }); + + }; + + Flux> searchResponses = Flux.usingWhen(resourceSupplier, resourceClosure, asyncComplete, + asyncError, asyncCancel); + return searchResponses.flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) + .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); + } else { + return Flux.error(new IllegalArgumentException("Query must be derived from BaseQuery")); + } + } + + private Publisher cleanupPit(PitSearchAfter psa) { + var baseQuery = psa.getBaseQuery(); + baseQuery.setPointInTime(null); + baseQuery.setSearchAfter(null); + baseQuery.setSort(psa.getSort()); + var pit = psa.getPit(); + return StringUtils.hasText(pit) ? closePointInTime(pit) : Mono.empty(); + } + + static private class PitSearchAfter { + private final BaseQuery baseQuery; + @Nullable + private final Sort sort; + private final String pit; + + PitSearchAfter(BaseQuery baseQuery, String pit) { + this.baseQuery = baseQuery; + this.sort = baseQuery.getSort(); + this.pit = pit; + } + + public BaseQuery getBaseQuery() { + return baseQuery; + } + + @Nullable + public Sort getSort() { + return sort; + } + + public String getPit() { + return pit; + } + } + + @Override + protected Mono doCount(Query query, Class entityType, IndexCoordinates index) { + + Assert.notNull(query, "query must not be null"); + Assert.notNull(index, "index must not be null"); - SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), entityType, index, - true); + SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), entityType, index, + true); - return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) - .map(searchResponse -> searchResponse.hits().total() != null ? searchResponse.hits().total().value() : 0L); - } - - private Flux doFindBounded(Query query, Class clazz, IndexCoordinates index) { + return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) + .map(searchResponse -> searchResponse.hits().total() != null ? searchResponse.hits().total().value() : 0L); + } + + private Flux doFindBounded(Query query, Class clazz, IndexCoordinates index) { - SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, - false, false); - - return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) // - .flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) // - .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); - } - - private Flux doSearch(SearchTemplateQuery query, Class clazz, IndexCoordinates index) { - - var request = requestConverter.searchTemplate(query, routingResolver.getRouting(), index); - - return Mono.from(execute(client -> client.searchTemplate(request, EntityAsMap.class))) // - .flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits()) // - .map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper)); - } - - @Override - protected Mono doFindForResponse(Query query, Class clazz, IndexCoordinates index) { - - Assert.notNull(query, "query must not be null"); - Assert.notNull(index, "index must not be null"); - - SearchRequest searchRequest = requestConverter.searchRequest(query, routingResolver.getRouting(), clazz, index, - false); - - // noinspection unchecked - SearchDocumentCallback callback = new ReadSearchDocumentCallback<>((Class) clazz, index); - SearchDocumentResponse.EntityCreator entityCreator = searchDocument -> callback.toEntity(searchDocument) - .toFuture(); - - return Mono.from(execute(client -> client.search(searchRequest, EntityAsMap.class))) - .map(searchResponse -> SearchDocumentResponseBuilder.from(searchResponse, entityCreator, jsonpMapper)); - } - - @Override - public Flux> aggregate(Query query, Class entityType, IndexCoordinates index) { - - return doFindForResponse(query, entityType, index).flatMapMany(searchDocumentResponse -> { - ElasticsearchAggregations aggregations = (ElasticsearchAggregations) searchDocumentResponse.getAggregations(); - return aggregations == null ? Flux.empty() : Flux.fromIterable(aggregations.aggregations()); - }); - } - - @Override - public Mono openPointInTime(IndexCoordinates index, Duration keepAlive, Boolean ignoreUnavailable) { - - Assert.notNull(index, "index must not be null"); - Assert.notNull(keepAlive, "keepAlive must not be null"); - Assert.notNull(ignoreUnavailable, "ignoreUnavailable must not be null"); - - var request = requestConverter.searchOpenPointInTimeRequest(index, keepAlive, ignoreUnavailable); - return Mono.from(execute(client -> client.openPointInTime(request))).map(OpenPointInTimeResponse::id); - } - - @Override - public Mono closePointInTime(String pit) { - - Assert.notNull(pit, "pit must not be null"); - - ClosePointInTimeRequest request = requestConverter.searchClosePointInTime(pit); - return Mono.from(execute(client -> client.closePointInTime(request))).map(ClosePointInTimeResponse::succeeded); - } - - // endregion - - // region script operations - @Override - public Mono putScript(Script script) { - - Assert.notNull(script, "script must not be null"); - - var request = requestConverter.scriptPut(script); - return Mono.from(execute(client -> client.putScript(request))).map(PutScriptResponse::acknowledged); - } - - @Override - public Mono