diff --git a/.evergreen/.evg.yml b/.evergreen/.evg.yml index a57f6473b6f..66b809e38a0 100644 --- a/.evergreen/.evg.yml +++ b/.evergreen/.evg.yml @@ -509,24 +509,6 @@ functions: mongo --nodb setup.js aws_e2e_ecs.js cd - - "run atlas data lake test": - - command: shell.exec - type: test - params: - working_dir: "src" - script: | - ${PREPARE_SHELL} - JAVA_VERSION=${JAVA_VERSION} .evergreen/run-atlas-data-lake-test.sh - - "run atlas search test": - - command: shell.exec - type: test - params: - working_dir: "src" - script: | - ${PREPARE_SHELL} - MONGODB_URI="${atlas_search_uri}" .evergreen/run-atlas-search-tests.sh - "run-ocsp-test": - command: shell.exec type: test @@ -627,19 +609,6 @@ functions: ${PREPARE_SHELL} PROJECT_DIRECTORY=${PROJECT_DIRECTORY} JAVA_VERSION=${JAVA_VERSION} TOPOLOGY=${TOPOLOGY} STORAGE_ENGINE=${STORAGE_ENGINE} MONGODB_URI="${MONGODB_URI}" .evergreen/run-mmapv1-storage-test.sh - "run atlas test": - - command: shell.exec - type: test - params: - silent: true - working_dir: "src" - script: | - # DO NOT ECHO WITH XTRACE (which PREPARE_SHELL does) - # The connection strings are pipe-delimited - JAVA_VERSION="8" \ - MONGODB_URIS="${atlas_free_tier_uri}|${atlas_replica_set_uri}|${atlas_sharded_uri}|${atlas_tls_v11_uri}|${atlas_tls_v12_uri}|${atlas_free_tier_uri_srv}|${atlas_replica_set_uri_srv}|${atlas_sharded_uri_srv}|${atlas_tls_v11_uri_srv}|${atlas_tls_v12_uri_srv}|${atlas_serverless_uri}|${atlas_serverless_uri_srv}" \ - .evergreen/run-connectivity-tests.sh - run socks5 tests: - command: shell.exec type: test @@ -794,15 +763,7 @@ functions: shell: bash script: | ${PREPARE_SHELL} - cd "$MONGO_ORCHESTRATION_HOME" - # source the mongo-orchestration virtualenv if it exists - if [ -f venv/bin/activate ]; then - . venv/bin/activate - elif [ -f venv/Scripts/activate ]; then - . venv/Scripts/activate - fi - mongo-orchestration stop || true - cd - + bash ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh || true rm -rf $DRIVERS_TOOLS || true "fix absolute paths": @@ -837,6 +798,30 @@ functions: ${PREPARE_SHELL} MONGODB_URI="${MONGODB_URI}" JAVA_VERSION="${JAVA_VERSION}" .evergreen/run-graalvm-native-image-app.sh + "oidc-auth-test-k8s-func": + - command: shell.exec + type: test + params: + shell: bash + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + script: |- + set -o errexit + ${PREPARE_SHELL} + export K8S_VARIANT=${VARIANT} + cd src + git add . + git commit --allow-empty -m "add files" + # uncompressed tar used to allow appending .git folder + export K8S_DRIVERS_TAR_FILE=/tmp/mongo-java-driver.tar + git archive -o $K8S_DRIVERS_TAR_FILE HEAD + tar -rf $K8S_DRIVERS_TAR_FILE .git + export K8S_TEST_CMD="OIDC_ENV=k8s VARIANT=${VARIANT} ./.evergreen/run-mongodb-oidc-test.sh" + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/setup-pod.sh + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/run-self-test.sh + source $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/secrets-export.sh + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/run-driver-test.sh + bash $DRIVERS_TOOLS/.evergreen/auth_oidc/k8s/teardown-pod.sh + # Anchors pre: @@ -960,6 +945,22 @@ tasks: export GCPOIDC_TEST_CMD="OIDC_ENV=gcp ./.evergreen/run-mongodb-oidc-test.sh" bash $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/run-driver-test.sh + - name: "oidc-auth-test-k8s" + commands: + - command: ec2.assume_role + params: + role_arn: ${aws_test_secrets_role} + duration_seconds: 1800 + - func: "oidc-auth-test-k8s-func" + vars: + VARIANT: eks + - func: "oidc-auth-test-k8s-func" + vars: + VARIANT: aks + - func: "oidc-auth-test-k8s-func" + vars: + VARIANT: gke + - name: serverless-test commands: - func: "run serverless" @@ -1462,14 +1463,50 @@ tasks: OCSP_MUST_STAPLE: "false" OCSP_TLS_SHOULD_SUCCEED: "0" - - name: "atlas-data-lake-test" + - name: "atlas-data-lake-task" commands: - func: "bootstrap mongohoused" - - func: "run atlas data lake test" + - command: shell.exec + type: test + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + JAVA_VERSION=${JAVA_VERSION} .evergreen/run-atlas-data-lake-test.sh + + - name: "atlas-search-task" + commands: + - command: shell.exec + type: test + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + MONGODB_URI="${atlas_search_uri}" .evergreen/run-atlas-search-tests.sh + + - name: "atlas-connectivity-task" + commands: + - command: shell.exec + type: test + params: + silent: true + working_dir: "src" + script: | + # DO NOT ECHO WITH XTRACE (which PREPARE_SHELL does) + # The connection strings are pipe-delimited + MONGODB_URIS="${atlas_free_tier_uri}|${atlas_replica_set_uri}|${atlas_sharded_uri}|${atlas_tls_v11_uri}|${atlas_tls_v12_uri}|${atlas_free_tier_uri_srv}|${atlas_replica_set_uri_srv}|${atlas_sharded_uri_srv}|${atlas_tls_v11_uri_srv}|${atlas_tls_v12_uri_srv}|${atlas_serverless_uri}|${atlas_serverless_uri_srv}" \ + JAVA_VERSION="8" \ + .evergreen/run-connectivity-tests.sh - - name: "atlas-search-test" + - name: "atlas-search-index-management-task" commands: - - func: "run atlas search test" + - command: subprocess.exec + params: + working_dir: src + binary: bash + add_expansions_to_env: true + args: + - .evergreen/run-atlas-search-index-management-tests.sh - name: "gssapi-auth-test" commands: @@ -1490,20 +1527,6 @@ tasks: - func: "bootstrap mongo-orchestration" - func: "run netty tests" - - name: "atlas-test" - commands: - - func: "run atlas test" - - - name: "test-atlas-search-index-helpers" - commands: - - command: subprocess.exec - params: - working_dir: src - binary: bash - add_expansions_to_env: true - args: - - .evergreen/run-atlas-search-index-management-tests.sh - - name: publish-snapshot depends_on: - variant: "static-checks" @@ -1536,7 +1559,7 @@ tasks: - func: "run perf tests" - func: "send dashboard data" - - name: "test-aws-lambda-deployed" + - name: "aws-lambda-deployed-task" commands: - command: ec2.assume_role params: @@ -1630,7 +1653,6 @@ tasks: echo "Untarring file ... begin" GCPKMS_CMD="tar xf mongo-java-driver.tgz" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh echo "Untarring file ... end" - - command: shell.exec type: test params: @@ -1918,11 +1940,15 @@ axes: batchtime: 10080 # 7 days task_groups: - - name: test_atlas_task_group_search_indexes + - name: "atlas-deployed-task-group" + max_hosts: -1 + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 setup_group: - func: fetch source - func: prepare resources - command: subprocess.exec + type: setup params: working_dir: src binary: bash @@ -1934,16 +1960,17 @@ task_groups: file: src/atlas-expansion.yml teardown_group: - command: subprocess.exec + type: setup params: working_dir: src binary: bash add_expansions_to_env: true args: - ${DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 tasks: - - test-atlas-search-index-helpers + - "atlas-search-index-management-task" + - "aws-lambda-deployed-task" + - name: testgcpkms_task_group setup_group_can_fail_task: true setup_group_timeout_secs: 1800 # 30 minutes @@ -1979,6 +2006,7 @@ task_groups: $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/delete-instance.sh tasks: - testgcpkms-task + - name: testazurekms_task_group setup_group_can_fail_task: true setup_group_timeout_secs: 1800 # 30 minutes @@ -2023,34 +2051,8 @@ task_groups: $DRIVERS_TOOLS/.evergreen/csfle/azurekms/delete-vm.sh tasks: - testazurekms-task - - name: test_atlas_task_group - setup_group: - - func: fetch source - - func: prepare resources - - command: subprocess.exec - params: - working_dir: src - binary: bash - add_expansions_to_env: true - args: - - ${DRIVERS_TOOLS}/.evergreen/atlas/setup-atlas-cluster.sh - - command: expansions.update - params: - file: src/atlas-expansion.yml - teardown_group: - - command: subprocess.exec - params: - working_dir: src - binary: bash - add_expansions_to_env: true - args: - - ${DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - test-aws-lambda-deployed - - name: testoidc_task_group + - name: test-oidc-task-group setup_group: - func: fetch source - func: prepare resources @@ -2075,7 +2077,7 @@ task_groups: tasks: - oidc-auth-test - - name: testazureoidc_task_group + - name: test-oidc-azure-task-group setup_group: - func: fetch source - func: prepare resources @@ -2098,7 +2100,7 @@ task_groups: tasks: - oidc-auth-test-azure - - name: testgcpoidc_task_group + - name: test-oidc-gcp-task-group setup_group: - func: fetch source - func: prepare resources @@ -2122,6 +2124,33 @@ task_groups: tasks: - oidc-auth-test-gcp + - name: test-oidc-k8s-task-group + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + teardown_task_can_fail_task: true + teardown_group_timeout_secs: 180 + setup_group: + - func: fetch source + - func: prepare resources + - func: fix absolute paths + - command: ec2.assume_role + params: + role_arn: ${aws_test_secrets_role} + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/setup.sh + teardown_group: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/teardown.sh + tasks: + - oidc-auth-test-k8s + buildvariants: # Test packaging and other release related routines @@ -2283,43 +2312,38 @@ buildvariants: tasks: - name: "perf" -- name: rhel8-test-atlas - display_name: Atlas Cluster Tests - run_on: rhel80-large - tasks: - - test_atlas_task_group - - name: plain-auth-test display_name: "PLAIN (LDAP) Auth test" run_on: rhel80-small tasks: - name: "plain-auth-test" -- name: rhel80-test-search-indexes - display_name: Atlas Search Index Management Tests - run_on: rhel80-small - tasks: - - name: "test_atlas_task_group_search_indexes" - - name: "oidc-auth-test" display_name: "OIDC Auth" run_on: ubuntu2204-small tasks: - - name: testoidc_task_group + - name: test-oidc-task-group batchtime: 20160 # 14 days -- name: testazureoidc-variant +- name: test-oidc-azure-variant display_name: "OIDC Auth Azure" run_on: ubuntu2204-small tasks: - - name: testazureoidc_task_group + - name: test-oidc-azure-task-group batchtime: 20160 # 14 days -- name: testgcpoidc-variant +- name: test-oidc-gcp-variant display_name: "OIDC Auth GCP" run_on: ubuntu2204-small tasks: - - name: testgcpoidc_task_group + - name: test-oidc-gcp-task-group + batchtime: 20160 # 14 days + +- name: test-oidc-k8s-variant + display_name: "OIDC Auth K8S" + run_on: ubuntu2204-small + tasks: + - name: test-oidc-k8s-task-group batchtime: 20160 # 14 days - matrix_name: "aws-auth-test" @@ -2355,23 +2379,19 @@ buildvariants: tasks: - name: ".ocsp" -- name: atlas-data-lake-test - display_name: "Atlas Data Lake test" - run_on: ubuntu2004-small - tasks: - - name: "atlas-data-lake-test" - -- name: atlas-test - display_name: "Atlas test" +- name: "atlas-search-variant" + display_name: "Atlas Tests" run_on: rhel80-small tasks: - - name: "atlas-test" + - name: "atlas-deployed-task-group" + - name: "atlas-search-task" + - name: "atlas-connectivity-task" -- name: atlas-search-test - display_name: "Atlas Search test" - run_on: rhel80-small +- name: atlas-data-lake-test + display_name: "Atlas Data Lake test" + run_on: ubuntu2004-small tasks: - - name: "atlas-search-test" + - name: "atlas-data-lake-task" - name: "reactive-streams-tck-test" display_name: "Reactive Streams TCK tests" diff --git a/.evergreen/publish.sh b/.evergreen/publish.sh index bfecc0ae865..e3f9f365d42 100755 --- a/.evergreen/publish.sh +++ b/.evergreen/publish.sh @@ -26,6 +26,6 @@ fi SYSTEM_PROPERTIES="-Dorg.gradle.internal.publish.checksums.insecure=true -Dorg.gradle.internal.http.connectionTimeout=120000 -Dorg.gradle.internal.http.socketTimeout=120000" ./gradlew -version -./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info ${TASK} +./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info ${TASK} # Scala 2.13 is published as result of this gradle execution. ./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info :bson-scala:${TASK} :driver-scala:${TASK} -PdefaultScalaVersions=2.12.12 ./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info :bson-scala:${TASK} :driver-scala:${TASK} -PdefaultScalaVersions=2.11.12 diff --git a/.evergreen/run-atlas-search-tests.sh b/.evergreen/run-atlas-search-tests.sh index 36cc981b3f4..f207647825f 100755 --- a/.evergreen/run-atlas-search-tests.sh +++ b/.evergreen/run-atlas-search-tests.sh @@ -16,4 +16,6 @@ echo "Running Atlas Search tests" ./gradlew --stacktrace --info \ -Dorg.mongodb.test.atlas.search=true \ -Dorg.mongodb.test.uri=${MONGODB_URI} \ - driver-core:test --tests AggregatesSearchIntegrationTest --tests AggregatesVectorSearchIntegrationTest + driver-core:test --tests AggregatesSearchIntegrationTest \ + --tests AggregatesBinaryVectorSearchIntegrationTest \ + --tests AggregatesSearchTest \ diff --git a/.evergreen/run-mongodb-oidc-test.sh b/.evergreen/run-mongodb-oidc-test.sh index ec2b2c19610..55b0599fd02 100755 --- a/.evergreen/run-mongodb-oidc-test.sh +++ b/.evergreen/run-mongodb-oidc-test.sh @@ -19,6 +19,16 @@ elif [ $OIDC_ENV == "azure" ]; then source ./env.sh elif [ $OIDC_ENV == "gcp" ]; then source ./secrets-export.sh +elif [ $OIDC_ENV == "k8s" ]; then + # Make sure K8S_VARIANT is set. + if [ -z "$K8S_VARIANT" ]; then + echo "Must specify K8S_VARIANT" + popd + exit 1 + fi + + # fix for git permissions issue: + git config --global --add safe.directory /tmp/test else echo "Unrecognized OIDC_ENV $OIDC_ENV" exit 1 diff --git a/THIRD-PARTY-NOTICES b/THIRD-PARTY-NOTICES index f881b103544..acca60ca973 100644 --- a/THIRD-PARTY-NOTICES +++ b/THIRD-PARTY-NOTICES @@ -184,3 +184,19 @@ https://github.com/mongodb/mongo-java-driver. See the License for the specific language governing permissions and limitations under the License. +9) The following files: BsonCodecUtils.kt + + Copyright 2008-present MongoDB, Inc. + Copyright 2017-2021 JetBrains s.r.o. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/bom/build.gradle.kts b/bom/build.gradle.kts new file mode 100644 index 00000000000..5d1fb81c384 --- /dev/null +++ b/bom/build.gradle.kts @@ -0,0 +1,23 @@ +group = "org.mongodb" +description = "This Bill of Materials POM simplifies dependency management when referencing multiple" + + " MongoDB Java Driver artifacts in projects using Gradle or Maven." + +dependencies { + constraints { + api(project(":mongodb-crypt")) + api(project(":driver-core")) + api(project(":bson")) + api(project(":bson-record-codec")) + + api(project(":driver-sync")) + api(project(":driver-reactive-streams")) + + api(project(":bson-kotlin")) + api(project(":bson-kotlinx")) + api(project(":driver-kotlin-coroutine")) + api(project(":driver-kotlin-sync")) + + api(project(":bson-scala")) + api(project(":driver-scala")) + } +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt index 027fe8925da..8a163f42f83 100644 --- a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonConfiguration.kt @@ -31,4 +31,19 @@ public data class BsonConfiguration( val encodeDefaults: Boolean = true, val explicitNulls: Boolean = false, val classDiscriminator: String = "_t", + val bsonNamingStrategy: BsonNamingStrategy? = null ) + +/** + * Optional BSON naming strategy for a field. + * + * @since 5.4 + */ +public enum class BsonNamingStrategy { + + /** + * A strategy that transforms serial names from camel case to snake case — lowercase characters with words separated + * by underscores. + */ + SNAKE_CASE, +} diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt index 99e5d2acb17..c00d09345d0 100644 --- a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonDecoder.kt @@ -37,11 +37,13 @@ import org.bson.BsonType import org.bson.BsonValue import org.bson.codecs.BsonValueCodec import org.bson.codecs.DecoderContext +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.cacheElementNamesByDescriptor import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonArrayDecoder import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonDecoder import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonDocumentDecoder import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonMapDecoder import org.bson.codecs.kotlinx.utils.BsonCodecUtils.createBsonPolymorphicDecoder +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.getCachedElementNamesByDescriptor import org.bson.internal.NumberCodecHelper import org.bson.internal.StringCodecHelper import org.bson.types.ObjectId @@ -102,6 +104,7 @@ internal sealed class AbstractBsonDecoder( elementDescriptor.serialName, elementDescriptor.isNullable && !descriptor.isElementOptional(it)) } this.elementsMetadata = elementsMetadata + cacheElementNamesByDescriptor(descriptor, configuration) } override fun decodeElementIndex(descriptor: SerialDescriptor): Int { @@ -129,7 +132,13 @@ internal sealed class AbstractBsonDecoder( } return name?.let { - val index = descriptor.getElementIndex(it) + val index = + if (configuration.bsonNamingStrategy == BsonNamingStrategy.SNAKE_CASE) { + getCachedElementNamesByDescriptor(descriptor)[it]?.let { name -> descriptor.getElementIndex(name) } + ?: UNKNOWN_NAME + } else { + descriptor.getElementIndex(it) + } return if (index == UNKNOWN_NAME) { reader.skipValue() decodeElementIndexImpl(descriptor) diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt index 1470bbb76a5..8a34bccdb36 100644 --- a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/BsonEncoder.kt @@ -31,6 +31,7 @@ import org.bson.BsonValue import org.bson.BsonWriter import org.bson.codecs.BsonValueCodec import org.bson.codecs.EncoderContext +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.convertCamelCase import org.bson.types.ObjectId /** @@ -203,7 +204,15 @@ internal open class BsonEncoderImpl( } internal fun encodeName(value: Any) { - writer.writeName(value.toString()) + val name = + value.toString().let { + if (configuration.bsonNamingStrategy == BsonNamingStrategy.SNAKE_CASE) { + convertCamelCase(it, '_') + } else { + it + } + } + writer.writeName(name) state = STATE.VALUE } diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt index 4b0eee8213a..bd8b6739958 100644 --- a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonDecoder.kt @@ -31,6 +31,7 @@ import org.bson.AbstractBsonReader import org.bson.BsonBinarySubType import org.bson.BsonType import org.bson.UuidRepresentation +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.toJsonNamingStrategy import org.bson.internal.UuidHelper @OptIn(ExperimentalSerializationApi::class) @@ -42,6 +43,7 @@ internal interface JsonBsonDecoder : BsonDecoder, JsonDecoder { explicitNulls = configuration.explicitNulls encodeDefaults = configuration.encodeDefaults classDiscriminator = configuration.classDiscriminator + namingStrategy = configuration.bsonNamingStrategy.toJsonNamingStrategy() serializersModule = this@JsonBsonDecoder.serializersModule } diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt index 6cff36a0909..4a754834e6d 100644 --- a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/JsonBsonEncoder.kt @@ -30,6 +30,7 @@ import kotlinx.serialization.json.int import kotlinx.serialization.json.long import kotlinx.serialization.modules.SerializersModule import org.bson.BsonWriter +import org.bson.codecs.kotlinx.utils.BsonCodecUtils.toJsonNamingStrategy import org.bson.types.Decimal128 @OptIn(ExperimentalSerializationApi::class) @@ -52,6 +53,7 @@ internal class JsonBsonEncoder( explicitNulls = configuration.explicitNulls encodeDefaults = configuration.encodeDefaults classDiscriminator = configuration.classDiscriminator + namingStrategy = configuration.bsonNamingStrategy.toJsonNamingStrategy() serializersModule = this@JsonBsonEncoder.serializersModule } diff --git a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt index eabfebc5833..daf6c7df6f9 100644 --- a/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt +++ b/bson-kotlinx/src/main/kotlin/org/bson/codecs/kotlinx/utils/BsonCodecUtils.kt @@ -16,7 +16,10 @@ package org.bson.codecs.kotlinx.utils import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.SerializationException import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.elementNames +import kotlinx.serialization.json.JsonNamingStrategy import kotlinx.serialization.modules.SerializersModule import org.bson.AbstractBsonReader import org.bson.BsonWriter @@ -28,6 +31,7 @@ import org.bson.codecs.kotlinx.BsonDocumentDecoder import org.bson.codecs.kotlinx.BsonEncoder import org.bson.codecs.kotlinx.BsonEncoderImpl import org.bson.codecs.kotlinx.BsonMapDecoder +import org.bson.codecs.kotlinx.BsonNamingStrategy import org.bson.codecs.kotlinx.BsonPolymorphicDecoder import org.bson.codecs.kotlinx.JsonBsonArrayDecoder import org.bson.codecs.kotlinx.JsonBsonDecoderImpl @@ -59,6 +63,8 @@ internal object BsonCodecUtils { } } + private val cachedElementNamesByDescriptor: MutableMap> = mutableMapOf() + internal fun createBsonEncoder( writer: BsonWriter, serializersModule: SerializersModule, @@ -116,4 +122,73 @@ internal object BsonCodecUtils { return if (hasJsonDecoder) JsonBsonMapDecoder(descriptor, reader, serializersModule, configuration) else BsonMapDecoder(descriptor, reader, serializersModule, configuration) } + + internal fun cacheElementNamesByDescriptor(descriptor: SerialDescriptor, configuration: BsonConfiguration) { + val convertedNameMap = + when (configuration.bsonNamingStrategy) { + BsonNamingStrategy.SNAKE_CASE -> { + val snakeCasedNames = descriptor.elementNames.associateWith { name -> convertCamelCase(name, '_') } + + snakeCasedNames.entries + .groupBy { entry -> entry.value } + .filter { group -> group.value.size > 1 } + .entries + .fold(StringBuilder("")) { acc, group -> + val keys = group.value.joinToString(", ") { entry -> entry.key } + acc.append("$keys in ${descriptor.serialName} generate same name: ${group.key}.\n") + } + .toString() + .takeIf { it.trim().isNotEmpty() } + ?.let { errorMessage: String -> throw SerializationException(errorMessage) } + + snakeCasedNames.entries.associate { it.value to it.key } + } + else -> emptyMap() + } + + cachedElementNamesByDescriptor[descriptor.serialName] = convertedNameMap + } + + internal fun getCachedElementNamesByDescriptor(descriptor: SerialDescriptor): Map { + return cachedElementNamesByDescriptor[descriptor.serialName] ?: emptyMap() + } + + // https://github.com/Kotlin/kotlinx.serialization/blob/f9f160a680da9f92c3bb121ae3644c96e57ba42e/formats/json/commonMain/src/kotlinx/serialization/json/JsonNamingStrategy.kt#L142-L174 + internal fun convertCamelCase(value: String, delimiter: Char) = + buildString(value.length * 2) { + var bufferedChar: Char? = null + var previousUpperCharsCount = 0 + + value.forEach { c -> + if (c.isUpperCase()) { + if (previousUpperCharsCount == 0 && isNotEmpty() && last() != delimiter) append(delimiter) + + bufferedChar?.let(::append) + + previousUpperCharsCount++ + bufferedChar = c.lowercaseChar() + } else { + if (bufferedChar != null) { + if (previousUpperCharsCount > 1 && c.isLetter()) { + append(delimiter) + } + append(bufferedChar) + previousUpperCharsCount = 0 + bufferedChar = null + } + append(c) + } + } + + if (bufferedChar != null) { + append(bufferedChar) + } + } + + internal fun BsonNamingStrategy?.toJsonNamingStrategy(): JsonNamingStrategy? { + return when (this) { + BsonNamingStrategy.SNAKE_CASE -> JsonNamingStrategy.SnakeCase + else -> null + } + } } diff --git a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt index aa749368e04..f9b3eb753c5 100644 --- a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt +++ b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/KotlinSerializerCodecTest.kt @@ -81,6 +81,7 @@ import org.bson.codecs.kotlinx.samples.DataClassWithBsonId import org.bson.codecs.kotlinx.samples.DataClassWithBsonIgnore import org.bson.codecs.kotlinx.samples.DataClassWithBsonProperty import org.bson.codecs.kotlinx.samples.DataClassWithBsonRepresentation +import org.bson.codecs.kotlinx.samples.DataClassWithCamelCase import org.bson.codecs.kotlinx.samples.DataClassWithCollections import org.bson.codecs.kotlinx.samples.DataClassWithContextualDateValues import org.bson.codecs.kotlinx.samples.DataClassWithDataClassMapKey @@ -94,6 +95,7 @@ import org.bson.codecs.kotlinx.samples.DataClassWithFailingInit import org.bson.codecs.kotlinx.samples.DataClassWithJsonElement import org.bson.codecs.kotlinx.samples.DataClassWithJsonElements import org.bson.codecs.kotlinx.samples.DataClassWithJsonElementsNullable +import org.bson.codecs.kotlinx.samples.DataClassWithKotlinAllowedName import org.bson.codecs.kotlinx.samples.DataClassWithListThatLastItemDefaultsToNull import org.bson.codecs.kotlinx.samples.DataClassWithMutableList import org.bson.codecs.kotlinx.samples.DataClassWithMutableMap @@ -105,6 +107,7 @@ import org.bson.codecs.kotlinx.samples.DataClassWithNulls import org.bson.codecs.kotlinx.samples.DataClassWithPair import org.bson.codecs.kotlinx.samples.DataClassWithParameterizedDataClass import org.bson.codecs.kotlinx.samples.DataClassWithRequired +import org.bson.codecs.kotlinx.samples.DataClassWithSameSnakeCaseName import org.bson.codecs.kotlinx.samples.DataClassWithSequence import org.bson.codecs.kotlinx.samples.DataClassWithSimpleValues import org.bson.codecs.kotlinx.samples.DataClassWithTriple @@ -1126,6 +1129,40 @@ class KotlinSerializerCodecTest { } } + @Test + fun testSnakeCaseNamingStrategy() { + val expected = + """{"two_words": "", "my_property": "", "camel_case_underscores": "", "url_mapping": "", + | "my_http_auth": "", "my_http2_api_key": "", "my_http2fast_api_key": ""}""" + .trimMargin() + val dataClass = DataClassWithCamelCase() + assertRoundTrips(expected, dataClass, BsonConfiguration(bsonNamingStrategy = BsonNamingStrategy.SNAKE_CASE)) + } + + @Test + fun testSameSnakeCaseName() { + val expected = """{"my_http_auth": "", "my_http_auth1": ""}""" + val dataClass = DataClassWithSameSnakeCaseName() + val exception = + assertThrows { + assertRoundTrips( + expected, dataClass, BsonConfiguration(bsonNamingStrategy = BsonNamingStrategy.SNAKE_CASE)) + } + assertEquals( + "myHTTPAuth, myHttpAuth in org.bson.codecs.kotlinx.samples.DataClassWithSameSnakeCaseName" + + " generate same name: my_http_auth.\n" + + "myHTTPAuth1, myHttpAuth1 in org.bson.codecs.kotlinx.samples.DataClassWithSameSnakeCaseName" + + " generate same name: my_http_auth1.\n", + exception.message) + } + + @Test + fun testKotlinAllowedName() { + val expected = """{"имя_переменной": "", "variable _name": ""}""" + val dataClass = DataClassWithKotlinAllowedName() + assertRoundTrips(expected, dataClass, BsonConfiguration(bsonNamingStrategy = BsonNamingStrategy.SNAKE_CASE)) + } + private inline fun assertRoundTrips( expected: String, value: T, diff --git a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt index e7a06600d20..773af52cd96 100644 --- a/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt +++ b/bson-kotlinx/src/test/kotlin/org/bson/codecs/kotlinx/samples/DataClasses.kt @@ -102,6 +102,31 @@ data class DataClassWithDefaults( val listSimple: List = listOf("a", "b", "c") ) +@Serializable +data class DataClassWithCamelCase( + val twoWords: String = "", + @Suppress("ConstructorParameterNaming") val MyProperty: String = "", + @Suppress("ConstructorParameterNaming") val camel_Case_Underscores: String = "", + @Suppress("ConstructorParameterNaming") val URLMapping: String = "", + val myHTTPAuth: String = "", + val myHTTP2ApiKey: String = "", + val myHTTP2fastApiKey: String = "", +) + +@Serializable +data class DataClassWithSameSnakeCaseName( + val myHTTPAuth: String = "", + val myHttpAuth: String = "", + val myHTTPAuth1: String = "", + val myHttpAuth1: String = "", +) + +@Serializable +data class DataClassWithKotlinAllowedName( + @Suppress("ConstructorParameterNaming") val имяПеременной: String = "", + @Suppress("ConstructorParameterNaming") val `variable Name`: String = "", +) + @Serializable data class DataClassWithNulls(val boolean: Boolean?, val string: String?, val listSimple: List?) @Serializable diff --git a/bson/src/main/org/bson/codecs/BsonArrayCodec.java b/bson/src/main/org/bson/codecs/BsonArrayCodec.java index 2efe3147d8a..6d16bb7d1b0 100644 --- a/bson/src/main/org/bson/codecs/BsonArrayCodec.java +++ b/bson/src/main/org/bson/codecs/BsonArrayCodec.java @@ -23,10 +23,8 @@ import org.bson.BsonWriter; import org.bson.codecs.configuration.CodecRegistry; -import java.util.ArrayList; -import java.util.List; - import static org.bson.assertions.Assertions.notNull; +import static org.bson.codecs.BsonValueCodecProvider.getBsonTypeClassMap; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; /** @@ -37,8 +35,8 @@ public class BsonArrayCodec implements Codec { private static final CodecRegistry DEFAULT_REGISTRY = fromProviders(new BsonValueCodecProvider()); - - private final CodecRegistry codecRegistry; + private static final BsonTypeCodecMap DEFAULT_BSON_TYPE_CODEC_MAP = new BsonTypeCodecMap(getBsonTypeClassMap(), DEFAULT_REGISTRY); + private final BsonTypeCodecMap bsonTypeCodecMap; /** * Creates a new instance with a default codec registry that uses the {@link BsonValueCodecProvider}. @@ -46,7 +44,7 @@ public class BsonArrayCodec implements Codec { * @since 3.4 */ public BsonArrayCodec() { - this(DEFAULT_REGISTRY); + this(DEFAULT_BSON_TYPE_CODEC_MAP); } /** @@ -55,21 +53,22 @@ public BsonArrayCodec() { * @param codecRegistry the codec registry */ public BsonArrayCodec(final CodecRegistry codecRegistry) { - this.codecRegistry = notNull("codecRegistry", codecRegistry); + this(new BsonTypeCodecMap(getBsonTypeClassMap(), codecRegistry)); + } + + private BsonArrayCodec(final BsonTypeCodecMap bsonTypeCodecMap) { + this.bsonTypeCodecMap = notNull("bsonTypeCodecMap", bsonTypeCodecMap); } @Override public BsonArray decode(final BsonReader reader, final DecoderContext decoderContext) { + BsonArray bsonArray = new BsonArray(); reader.readStartArray(); - - List list = new ArrayList<>(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - list.add(readValue(reader, decoderContext)); + bsonArray.add(readValue(reader, decoderContext)); } - reader.readEndArray(); - - return new BsonArray(list); + return bsonArray; } @Override @@ -78,7 +77,7 @@ public void encode(final BsonWriter writer, final BsonArray array, final Encoder writer.writeStartArray(); for (BsonValue value : array) { - Codec codec = codecRegistry.get(value.getClass()); + Codec codec = bsonTypeCodecMap.get(value.getBsonType()); encoderContext.encodeWithChildContext(codec, writer, value); } @@ -99,7 +98,7 @@ public Class getEncoderClass() { * @return the non-null value read from the reader */ protected BsonValue readValue(final BsonReader reader, final DecoderContext decoderContext) { - return codecRegistry.get(BsonValueCodecProvider.getClassForBsonType(reader.getCurrentBsonType())).decode(reader, decoderContext); + BsonType currentBsonType = reader.getCurrentBsonType(); + return (BsonValue) bsonTypeCodecMap.get(currentBsonType).decode(reader, decoderContext); } - } diff --git a/bson/src/main/org/bson/codecs/BsonDocumentCodec.java b/bson/src/main/org/bson/codecs/BsonDocumentCodec.java index 405fd78e117..75bd3b7a2b0 100644 --- a/bson/src/main/org/bson/codecs/BsonDocumentCodec.java +++ b/bson/src/main/org/bson/codecs/BsonDocumentCodec.java @@ -17,7 +17,6 @@ package org.bson.codecs; import org.bson.BsonDocument; -import org.bson.BsonElement; import org.bson.BsonObjectId; import org.bson.BsonReader; import org.bson.BsonType; @@ -26,8 +25,6 @@ import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.ObjectId; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import static org.bson.assertions.Assertions.notNull; @@ -79,17 +76,15 @@ public CodecRegistry getCodecRegistry() { @Override public BsonDocument decode(final BsonReader reader, final DecoderContext decoderContext) { - List keyValuePairs = new ArrayList<>(); - + BsonDocument bsonDocument = new BsonDocument(); reader.readStartDocument(); while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { String fieldName = reader.readName(); - keyValuePairs.add(new BsonElement(fieldName, readValue(reader, decoderContext))); + bsonDocument.append(fieldName, readValue(reader, decoderContext)); } reader.readEndDocument(); - - return new BsonDocument(keyValuePairs); + return bsonDocument; } /** @@ -135,7 +130,7 @@ private boolean skipField(final EncoderContext encoderContext, final String key) @SuppressWarnings({"unchecked", "rawtypes"}) private void writeValue(final BsonWriter writer, final EncoderContext encoderContext, final BsonValue value) { - Codec codec = codecRegistry.get(value.getClass()); + Codec codec = bsonTypeCodecMap.get(value.getBsonType()); encoderContext.encodeWithChildContext(codec, writer, value); } diff --git a/bson/src/main/org/bson/codecs/BsonTypeClassMap.java b/bson/src/main/org/bson/codecs/BsonTypeClassMap.java index 82144e9b4aa..32acaeb7f85 100644 --- a/bson/src/main/org/bson/codecs/BsonTypeClassMap.java +++ b/bson/src/main/org/bson/codecs/BsonTypeClassMap.java @@ -31,12 +31,11 @@ import org.bson.types.ObjectId; import org.bson.types.Symbol; +import java.util.Arrays; import java.util.Collections; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; /** *

A map from a BSON types to the Class to which it should be decoded. This class is useful if, for example, @@ -71,7 +70,7 @@ */ public class BsonTypeClassMap { static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(); - private final Map> map = new HashMap<>(); + private final Class[] bsonTypeOrdinalToClassMap = new Class[256]; /** * Construct an instance with the default mapping, but replacing the default mapping with any values contained in the given map. @@ -81,7 +80,7 @@ public class BsonTypeClassMap { */ public BsonTypeClassMap(final Map> replacementsForDefaults) { addDefaults(); - map.putAll(replacementsForDefaults); + replacementsForDefaults.forEach((key, value) -> bsonTypeOrdinalToClassMap[key.getValue()] = value); } /** @@ -91,10 +90,6 @@ public BsonTypeClassMap() { this(Collections.emptyMap()); } - Set keys() { - return map.keySet(); - } - /** * Gets the Class that is mapped to the given BSON type. * @@ -102,30 +97,30 @@ Set keys() { * @return the Class that is mapped to the BSON type */ public Class get(final BsonType bsonType) { - return map.get(bsonType); + return bsonTypeOrdinalToClassMap[bsonType.getValue()]; } private void addDefaults() { - map.put(BsonType.ARRAY, List.class); - map.put(BsonType.BINARY, Binary.class); - map.put(BsonType.BOOLEAN, Boolean.class); - map.put(BsonType.DATE_TIME, Date.class); - map.put(BsonType.DB_POINTER, BsonDbPointer.class); - map.put(BsonType.DOCUMENT, Document.class); - map.put(BsonType.DOUBLE, Double.class); - map.put(BsonType.INT32, Integer.class); - map.put(BsonType.INT64, Long.class); - map.put(BsonType.DECIMAL128, Decimal128.class); - map.put(BsonType.MAX_KEY, MaxKey.class); - map.put(BsonType.MIN_KEY, MinKey.class); - map.put(BsonType.JAVASCRIPT, Code.class); - map.put(BsonType.JAVASCRIPT_WITH_SCOPE, CodeWithScope.class); - map.put(BsonType.OBJECT_ID, ObjectId.class); - map.put(BsonType.REGULAR_EXPRESSION, BsonRegularExpression.class); - map.put(BsonType.STRING, String.class); - map.put(BsonType.SYMBOL, Symbol.class); - map.put(BsonType.TIMESTAMP, BsonTimestamp.class); - map.put(BsonType.UNDEFINED, BsonUndefined.class); + bsonTypeOrdinalToClassMap[BsonType.ARRAY.getValue()] = List.class; + bsonTypeOrdinalToClassMap[BsonType.BINARY.getValue()] = Binary.class; + bsonTypeOrdinalToClassMap[BsonType.BOOLEAN.getValue()] = Boolean.class; + bsonTypeOrdinalToClassMap[BsonType.DATE_TIME.getValue()] = Date.class; + bsonTypeOrdinalToClassMap[BsonType.DB_POINTER.getValue()] = BsonDbPointer.class; + bsonTypeOrdinalToClassMap[BsonType.DOCUMENT.getValue()] = Document.class; + bsonTypeOrdinalToClassMap[BsonType.DOUBLE.getValue()] = Double.class; + bsonTypeOrdinalToClassMap[BsonType.INT32.getValue()] = Integer.class; + bsonTypeOrdinalToClassMap[BsonType.INT64.getValue()] = Long.class; + bsonTypeOrdinalToClassMap[BsonType.DECIMAL128.getValue()] = Decimal128.class; + bsonTypeOrdinalToClassMap[BsonType.MAX_KEY.getValue()] = MaxKey.class; + bsonTypeOrdinalToClassMap[BsonType.MIN_KEY.getValue()] = MinKey.class; + bsonTypeOrdinalToClassMap[BsonType.JAVASCRIPT.getValue()] = Code.class; + bsonTypeOrdinalToClassMap[BsonType.JAVASCRIPT_WITH_SCOPE.getValue()] = CodeWithScope.class; + bsonTypeOrdinalToClassMap[BsonType.OBJECT_ID.getValue()] = ObjectId.class; + bsonTypeOrdinalToClassMap[BsonType.REGULAR_EXPRESSION.getValue()] = BsonRegularExpression.class; + bsonTypeOrdinalToClassMap[BsonType.STRING.getValue()] = String.class; + bsonTypeOrdinalToClassMap[BsonType.SYMBOL.getValue()] = Symbol.class; + bsonTypeOrdinalToClassMap[BsonType.TIMESTAMP.getValue()] = BsonTimestamp.class; + bsonTypeOrdinalToClassMap[BsonType.UNDEFINED.getValue()] = BsonUndefined.class; } @Override @@ -139,15 +134,11 @@ public boolean equals(final Object o) { BsonTypeClassMap that = (BsonTypeClassMap) o; - if (!map.equals(that.map)) { - return false; - } - - return true; + return Arrays.equals(bsonTypeOrdinalToClassMap, that.bsonTypeOrdinalToClassMap); } @Override public int hashCode() { - return map.hashCode(); + return Arrays.hashCode(bsonTypeOrdinalToClassMap); } } diff --git a/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java b/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java index 510a6041a0b..3a3def7ca7f 100644 --- a/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java +++ b/bson/src/main/org/bson/codecs/BsonTypeCodecMap.java @@ -40,7 +40,7 @@ public class BsonTypeCodecMap { public BsonTypeCodecMap(final BsonTypeClassMap bsonTypeClassMap, final CodecRegistry codecRegistry) { this.bsonTypeClassMap = notNull("bsonTypeClassMap", bsonTypeClassMap); notNull("codecRegistry", codecRegistry); - for (BsonType cur : bsonTypeClassMap.keys()) { + for (BsonType cur : BsonType.values()) { Class clazz = bsonTypeClassMap.get(cur); if (clazz != null) { try { diff --git a/bson/src/main/org/bson/codecs/DocumentCodec.java b/bson/src/main/org/bson/codecs/DocumentCodec.java index 3559e93fcae..0c4161f53fd 100644 --- a/bson/src/main/org/bson/codecs/DocumentCodec.java +++ b/bson/src/main/org/bson/codecs/DocumentCodec.java @@ -156,7 +156,7 @@ public void encode(final BsonWriter writer, final Document document, final Encod beforeFields(writer, encoderContext, document); - for (final Map.Entry entry : ((Map) document).entrySet()) { + for (final Map.Entry entry : document.entrySet()) { if (skipField(encoderContext, entry.getKey())) { continue; } diff --git a/bson/src/main/org/bson/types/ObjectId.java b/bson/src/main/org/bson/types/ObjectId.java index 7c1b1d29540..927d3ab0c31 100644 --- a/bson/src/main/org/bson/types/ObjectId.java +++ b/bson/src/main/org/bson/types/ObjectId.java @@ -16,17 +16,18 @@ package org.bson.types; +import static org.bson.assertions.Assertions.isTrueArgument; +import static org.bson.assertions.Assertions.notNull; + import java.io.InvalidObjectException; import java.io.ObjectInputStream; import java.io.Serializable; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.security.SecureRandom; import java.util.Date; import java.util.concurrent.atomic.AtomicInteger; -import static org.bson.assertions.Assertions.isTrueArgument; -import static org.bson.assertions.Assertions.notNull; - /** *

A globally unique identifier for objects.

* @@ -53,9 +54,8 @@ public final class ObjectId implements Comparable, Serializable { private static final int OBJECT_ID_LENGTH = 12; private static final int LOW_ORDER_THREE_BYTES = 0x00ffffff; - // Use primitives to represent the 5-byte random value. - private static final int RANDOM_VALUE1; - private static final short RANDOM_VALUE2; + // Use upper bytes of a long to represent the 5-byte random value. + private static final long RANDOM_VALUE; private static final AtomicInteger NEXT_COUNTER; @@ -67,18 +67,12 @@ public final class ObjectId implements Comparable, Serializable { * The timestamp */ private final int timestamp; + /** - * The counter. - */ - private final int counter; - /** - * the first four bits of randomness. - */ - private final int randomValue1; - /** - * The last two bits of randomness. + * The final 8 bytes of the ObjectID are 5 bytes probabilistically unique to the machine and + * process, followed by a 3 byte incrementing counter initialized to a random value. */ - private final short randomValue2; + private final long nonce; /** * Gets a new object id. @@ -101,7 +95,7 @@ public static ObjectId get() { * @since 4.1 */ public static ObjectId getSmallestWithDate(final Date date) { - return new ObjectId(dateToTimestampSeconds(date), 0, (short) 0, 0, false); + return new ObjectId(dateToTimestampSeconds(date), 0L); } /** @@ -152,7 +146,7 @@ public ObjectId() { * @param date the date */ public ObjectId(final Date date) { - this(dateToTimestampSeconds(date), NEXT_COUNTER.getAndIncrement() & LOW_ORDER_THREE_BYTES, false); + this(dateToTimestampSeconds(date), RANDOM_VALUE | (NEXT_COUNTER.getAndIncrement() & LOW_ORDER_THREE_BYTES)); } /** @@ -163,7 +157,7 @@ public ObjectId(final Date date) { * @throws IllegalArgumentException if the high order byte of counter is not zero */ public ObjectId(final Date date, final int counter) { - this(dateToTimestampSeconds(date), counter, true); + this(dateToTimestampSeconds(date), getNonceFromUntrustedCounter(counter)); } /** @@ -174,25 +168,19 @@ public ObjectId(final Date date, final int counter) { * @throws IllegalArgumentException if the high order byte of counter is not zero */ public ObjectId(final int timestamp, final int counter) { - this(timestamp, counter, true); + this(timestamp, getNonceFromUntrustedCounter(counter)); } - private ObjectId(final int timestamp, final int counter, final boolean checkCounter) { - this(timestamp, RANDOM_VALUE1, RANDOM_VALUE2, counter, checkCounter); + private ObjectId(final int timestamp, final long nonce) { + this.timestamp = timestamp; + this.nonce = nonce; } - private ObjectId(final int timestamp, final int randomValue1, final short randomValue2, final int counter, - final boolean checkCounter) { - if ((randomValue1 & 0xff000000) != 0) { - throw new IllegalArgumentException("The random value must be between 0 and 16777215 (it must fit in three bytes)."); - } - if (checkCounter && ((counter & 0xff000000) != 0)) { + private static long getNonceFromUntrustedCounter(final int counter) { + if ((counter & 0xff000000) != 0) { throw new IllegalArgumentException("The counter must be between 0 and 16777215 (it must fit in three bytes)."); } - this.timestamp = timestamp; - this.counter = counter & LOW_ORDER_THREE_BYTES; - this.randomValue1 = randomValue1; - this.randomValue2 = randomValue2; + return RANDOM_VALUE | counter; } /** @@ -226,12 +214,14 @@ public ObjectId(final ByteBuffer buffer) { notNull("buffer", buffer); isTrueArgument("buffer.remaining() >=12", buffer.remaining() >= OBJECT_ID_LENGTH); - // Note: Cannot use ByteBuffer.getInt because it depends on tbe buffer's byte order - // and ObjectId's are always in big-endian order. - timestamp = makeInt(buffer.get(), buffer.get(), buffer.get(), buffer.get()); - randomValue1 = makeInt((byte) 0, buffer.get(), buffer.get(), buffer.get()); - randomValue2 = makeShort(buffer.get(), buffer.get()); - counter = makeInt((byte) 0, buffer.get(), buffer.get(), buffer.get()); + ByteOrder originalOrder = buffer.order(); + try { + buffer.order(ByteOrder.BIG_ENDIAN); + this.timestamp = buffer.getInt(); + this.nonce = buffer.getLong(); + } finally { + buffer.order(originalOrder); + } } /** @@ -240,9 +230,11 @@ public ObjectId(final ByteBuffer buffer) { * @return the byte array */ public byte[] toByteArray() { - ByteBuffer buffer = ByteBuffer.allocate(OBJECT_ID_LENGTH); - putToByteBuffer(buffer); - return buffer.array(); // using .allocate ensures there is a backing array that can be returned + // using .allocate ensures there is a backing array that can be returned + return ByteBuffer.allocate(OBJECT_ID_LENGTH) + .putInt(this.timestamp) + .putLong(this.nonce) + .array(); } /** @@ -257,18 +249,14 @@ public void putToByteBuffer(final ByteBuffer buffer) { notNull("buffer", buffer); isTrueArgument("buffer.remaining() >=12", buffer.remaining() >= OBJECT_ID_LENGTH); - buffer.put(int3(timestamp)); - buffer.put(int2(timestamp)); - buffer.put(int1(timestamp)); - buffer.put(int0(timestamp)); - buffer.put(int2(randomValue1)); - buffer.put(int1(randomValue1)); - buffer.put(int0(randomValue1)); - buffer.put(short1(randomValue2)); - buffer.put(short0(randomValue2)); - buffer.put(int2(counter)); - buffer.put(int1(counter)); - buffer.put(int0(counter)); + ByteOrder originalOrder = buffer.order(); + try { + buffer.order(ByteOrder.BIG_ENDIAN); + buffer.putInt(this.timestamp); + buffer.putLong(this.nonce); + } finally { + buffer.order(originalOrder); + } } /** @@ -313,49 +301,26 @@ public boolean equals(final Object o) { return false; } - ObjectId objectId = (ObjectId) o; - - if (counter != objectId.counter) { - return false; - } - if (timestamp != objectId.timestamp) { - return false; - } - - if (randomValue1 != objectId.randomValue1) { + ObjectId other = (ObjectId) o; + if (timestamp != other.timestamp) { return false; } - - if (randomValue2 != objectId.randomValue2) { - return false; - } - - return true; + return nonce == other.nonce; } @Override public int hashCode() { - int result = timestamp; - result = 31 * result + counter; - result = 31 * result + randomValue1; - result = 31 * result + randomValue2; - return result; + return 31 * timestamp + Long.hashCode(nonce); } @Override public int compareTo(final ObjectId other) { - if (other == null) { - throw new NullPointerException(); + int cmp = Integer.compareUnsigned(this.timestamp, other.timestamp); + if (cmp != 0) { + return cmp; } - byte[] byteArray = toByteArray(); - byte[] otherByteArray = other.toByteArray(); - for (int i = 0; i < OBJECT_ID_LENGTH; i++) { - if (byteArray[i] != otherByteArray[i]) { - return ((byteArray[i] & 0xff) < (otherByteArray[i] & 0xff)) ? -1 : 1; - } - } - return 0; + return Long.compareUnsigned(nonce, other.nonce); } @Override @@ -407,8 +372,7 @@ private Object readResolve() { static { try { SecureRandom secureRandom = new SecureRandom(); - RANDOM_VALUE1 = secureRandom.nextInt(0x01000000); - RANDOM_VALUE2 = (short) secureRandom.nextInt(0x00008000); + RANDOM_VALUE = secureRandom.nextLong() & ~LOW_ORDER_THREE_BYTES; NEXT_COUNTER = new AtomicInteger(secureRandom.nextInt()); } catch (Exception e) { throw new RuntimeException(e); @@ -443,46 +407,4 @@ private static int hexCharToInt(final char c) { private static int dateToTimestampSeconds(final Date time) { return (int) (time.getTime() / 1000); } - - // Big-Endian helpers, in this class because all other BSON numbers are little-endian - - private static int makeInt(final byte b3, final byte b2, final byte b1, final byte b0) { - // CHECKSTYLE:OFF - return (((b3) << 24) | - ((b2 & 0xff) << 16) | - ((b1 & 0xff) << 8) | - ((b0 & 0xff))); - // CHECKSTYLE:ON - } - - private static short makeShort(final byte b1, final byte b0) { - // CHECKSTYLE:OFF - return (short) (((b1 & 0xff) << 8) | ((b0 & 0xff))); - // CHECKSTYLE:ON - } - - private static byte int3(final int x) { - return (byte) (x >> 24); - } - - private static byte int2(final int x) { - return (byte) (x >> 16); - } - - private static byte int1(final int x) { - return (byte) (x >> 8); - } - - private static byte int0(final int x) { - return (byte) (x); - } - - private static byte short1(final short x) { - return (byte) (x >> 8); - } - - private static byte short0(final short x) { - return (byte) (x); - } - } diff --git a/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy b/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy index b0eae796fc4..b5217676871 100644 --- a/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy +++ b/bson/src/test/unit/org/bson/codecs/IterableCodecProviderSpecification.groovy @@ -16,6 +16,7 @@ package org.bson.codecs +import org.bson.BsonType import spock.lang.Specification import static org.bson.codecs.configuration.CodecRegistries.fromProviders @@ -57,7 +58,7 @@ class IterableCodecProviderSpecification extends Specification { def 'unidentical instances should not be equal'() { given: def first = new IterableCodecProvider() - def second = new IterableCodecProvider(new BsonTypeClassMap([BOOLEAN: String])) + def second = new IterableCodecProvider(new BsonTypeClassMap([(BsonType.BOOLEAN): String])) def third = new IterableCodecProvider(new BsonTypeClassMap(), { Object from -> from }) diff --git a/bson/src/test/unit/org/bson/types/ObjectIdTest.java b/bson/src/test/unit/org/bson/types/ObjectIdTest.java index 14c8241f55a..cfe04623b90 100644 --- a/bson/src/test/unit/org/bson/types/ObjectIdTest.java +++ b/bson/src/test/unit/org/bson/types/ObjectIdTest.java @@ -17,36 +17,72 @@ package org.bson.types; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; +import java.nio.Buffer; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.text.ParseException; import java.text.SimpleDateFormat; +import java.util.ArrayList; import java.util.Date; +import java.util.List; import java.util.Locale; import java.util.Random; +import static org.junit.Assert.assertFalse; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; public class ObjectIdTest { - @Test - public void testToBytes() { + + /** Calls the base method of ByteBuffer.position(int) since the override is not available in jdk8. */ + private static ByteBuffer setPosition(final ByteBuffer buf, final int pos) { + ((Buffer) buf).position(pos); + return buf; + } + + /** + * MethodSource for valid ByteBuffers that can hold an ObjectID + */ + public static List validOutputBuffers() { + List result = new ArrayList<>(); + result.add(ByteBuffer.allocate(12)); + result.add(ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN)); + result.add(ByteBuffer.allocate(24).put(new byte[12])); + result.add(ByteBuffer.allocateDirect(12)); + result.add(ByteBuffer.allocateDirect(12).order(ByteOrder.LITTLE_ENDIAN)); + return result; + } + + @MethodSource("validOutputBuffers") + @ParameterizedTest + public void testToBytes(final ByteBuffer output) { + int originalPosition = output.position(); + ByteOrder originalOrder = output.order(); byte[] expectedBytes = {81, 6, -4, -102, -68, -126, 55, 85, -127, 54, -46, -119}; + byte[] result = new byte[12]; ObjectId objectId = new ObjectId(expectedBytes); assertArrayEquals(expectedBytes, objectId.toByteArray()); - ByteBuffer buffer = ByteBuffer.allocate(12); - objectId.putToByteBuffer(buffer); - assertArrayEquals(expectedBytes, buffer.array()); + objectId.putToByteBuffer(output); + ((Buffer) output).position(output.position() - 12); + output.get(result); // read last 12 bytes leaving position intact + + assertArrayEquals(expectedBytes, result); + assertEquals(originalPosition + 12, output.position()); + assertEquals(originalOrder, output.order()); } @Test @@ -136,8 +172,64 @@ public void testTime() { } @Test - public void testDateCons() { + public void testDateConstructor() { assertEquals(new Date().getTime() / 1000, new ObjectId(new Date()).getDate().getTime() / 1000); + assertNotEquals(new ObjectId(new Date(1_000)), new ObjectId(new Date(1_000))); + assertEquals("00000001", new ObjectId(new Date(1_000)).toHexString().substring(0, 8)); + } + + @Test + public void testDateConstructorWithCounter() { + assertEquals(new ObjectId(new Date(1_000), 1), new ObjectId(new Date(1_000), 1)); + assertEquals("00000001", new ObjectId(new Date(1_000), 1).toHexString().substring(0, 8)); + assertThrows(NullPointerException.class, () -> new ObjectId(null, Integer.MAX_VALUE)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(new Date(1_000), Integer.MAX_VALUE)); + } + + @Test + public void testTimestampConstructor() { + assertEquals(1_000, new ObjectId(1_000, 1).getTimestamp()); + assertEquals(new ObjectId(1_000, 1), new ObjectId(1_000, 1)); + assertEquals("7fffffff", new ObjectId(Integer.MAX_VALUE, 1).toHexString().substring(0, 8)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(Integer.MAX_VALUE, Integer.MAX_VALUE)); + } + + /** + * MethodSource for valid ByteBuffers containing an ObjectID at the current position. + */ + public static List validInputBuffers() { + byte[] data = new byte[12]; + for (byte i = 0; i < data.length; ++i) { + data[i] = i; + } + + List result = new ArrayList<>(); + result.add(ByteBuffer.wrap(data)); + result.add(ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN)); + result.add(setPosition(ByteBuffer.allocateDirect(data.length).put(data), 0)); + result.add(setPosition(ByteBuffer.allocateDirect(data.length).put(data).order(ByteOrder.LITTLE_ENDIAN), 0)); + result.add(setPosition(ByteBuffer.allocate(2 * data.length).put(data), 0)); + result.add(setPosition(ByteBuffer.allocate(2 * data.length).put(new byte[12]).put(data), 12)); + return result; + } + + @ParameterizedTest + @MethodSource(value = "validInputBuffers") + public void testByteBufferConstructor(final ByteBuffer input) { + ByteOrder order = input.order(); + int position = input.position(); + + byte[] result = new ObjectId(input).toByteArray(); + + assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, result); + assertEquals(order, input.order()); + assertEquals(position + 12, input.position()); + } + + @Test + public void testInvalidByteBufferConstructor() { + assertThrows(IllegalArgumentException.class, () -> new ObjectId((ByteBuffer) null)); + assertThrows(IllegalArgumentException.class, () -> new ObjectId(ByteBuffer.allocate(11))); } @Test @@ -162,6 +254,23 @@ public void testCompareTo() { assertEquals(-1, first.compareTo(third)); assertEquals(1, second.compareTo(first)); assertEquals(1, third.compareTo(first)); + assertThrows(NullPointerException.class, () -> first.compareTo(null)); + } + + @Test + public void testEquals() { + Date dateOne = new Date(); + Date dateTwo = new Date(dateOne.getTime() + 10000); + ObjectId first = new ObjectId(dateOne, 0); + ObjectId second = new ObjectId(dateOne, 1); + ObjectId third = new ObjectId(dateTwo, 0); + ObjectId fourth = new ObjectId(first.toByteArray()); + assertEquals(first, first); + assertEquals(first, fourth); + assertNotEquals(first, second); + assertNotEquals(first, third); + assertNotEquals(second, third); + assertFalse(first.equals(null)); } @Test diff --git a/build.gradle b/build.gradle index df2f70c49de..baf9769015e 100644 --- a/build.gradle +++ b/build.gradle @@ -53,8 +53,8 @@ ext { nettyVersion = '4.1.87.Final' snappyVersion = '1.1.10.3' zstdVersion = '1.5.5-3' - awsSdkV2Version = '2.18.9' - awsSdkV1Version = '1.12.337' + awsSdkV2Version = '2.30.31' + awsSdkV1Version = '1.12.782' projectReactorVersion = '2022.0.0' junitBomVersion = '5.10.2' logbackVersion = '1.3.14' @@ -65,9 +65,10 @@ ext { def configDir = ext.configDir def utilProjects = project(":util").allprojects +def bomProjects = project(":bom") def coreProjects = subprojects - utilProjects -def scalaProjects = subprojects.findAll { it.name.contains('scala') } -def javaProjects = subprojects - scalaProjects +def scalaProjects = subprojects.findAll { it.name.contains('scala') } - bomProjects +def javaProjects = subprojects - scalaProjects - bomProjects def javaMainProjects = javaProjects - utilProjects def javaCodeCheckedProjects = javaMainProjects.findAll { !['driver-benchmarks', 'driver-workload-executor', 'driver-lambda'].contains(it.name) } def javaAndScalaTestedProjects = javaCodeCheckedProjects + scalaProjects @@ -76,7 +77,7 @@ configure(coreProjects) { apply plugin: 'idea' group = 'org.mongodb' - version = '5.3.0-SNAPSHOT' + version = '5.4.0' repositories { mavenLocal() diff --git a/driver-benchmarks/build.gradle b/driver-benchmarks/build.gradle index 91d979cff68..84e32c42e52 100644 --- a/driver-benchmarks/build.gradle +++ b/driver-benchmarks/build.gradle @@ -33,6 +33,16 @@ dependencies { api project(':driver-sync') api project(':mongodb-crypt') implementation "ch.qos.logback:logback-classic:$logbackVersion" + + implementation 'org.openjdk.jmh:jmh-core:1.37' + annotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.37' +} + +tasks.register("jmh", JavaExec) { + group = 'benchmark' + description = 'Run JMH benchmarks.' + mainClass = 'org.openjdk.jmh.Main' + classpath = sourceSets.main.runtimeClasspath } javadoc { diff --git a/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonArrayCodecBenchmark.java b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonArrayCodecBenchmark.java new file mode 100644 index 00000000000..75cc9dab337 --- /dev/null +++ b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonArrayCodecBenchmark.java @@ -0,0 +1,99 @@ +/* + * Copyright 2016-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.mongodb.benchmark.jmh.codec; + +import com.mongodb.internal.connection.ByteBufferBsonOutput; +import com.mongodb.internal.connection.PowerOfTwoBufferPool; +import org.bson.BsonArray; +import org.bson.BsonBinaryReader; +import org.bson.BsonBinaryWriter; +import org.bson.BsonDocument; +import org.bson.BsonDouble; +import org.bson.codecs.BsonArrayCodec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import com.mongodb.lang.NonNull; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.TimeUnit; +import static com.mongodb.benchmark.jmh.codec.BsonUtils.getDocumentAsBuffer; + +@BenchmarkMode(Mode.Throughput) +@Warmup(iterations = 20, time = 2, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 20, time = 2, timeUnit = TimeUnit.SECONDS) +@OutputTimeUnit(TimeUnit.SECONDS) +@Fork(3) +public class BsonArrayCodecBenchmark { + + @State(Scope.Benchmark) + public static class Input { + protected final PowerOfTwoBufferPool bufferPool = PowerOfTwoBufferPool.DEFAULT; + protected final BsonArrayCodec bsonArrayCodec = new BsonArrayCodec(); + protected BsonDocument document; + protected byte[] documentBytes; + private BsonBinaryReader reader; + private BsonBinaryWriter writer; + private BsonArray bsonValues; + + @Setup + public void setup() throws IOException { + bsonValues = new BsonArray(); + document = new BsonDocument("array", bsonValues); + + for (int i = 0; i < 1000; i++) { + bsonValues.add(new BsonDouble(i)); + } + + documentBytes = getDocumentAsBuffer(document); + } + + @Setup(Level.Invocation) + public void beforeIteration() { + reader = new BsonBinaryReader(ByteBuffer.wrap(documentBytes)); + writer = new BsonBinaryWriter(new ByteBufferBsonOutput(bufferPool)); + + reader.readStartDocument(); + writer.writeStartDocument(); + writer.writeName("array"); + } + } + + @Benchmark + public void decode(@NonNull Input input, @NonNull Blackhole blackhole) { + blackhole.consume(input.bsonArrayCodec.decode(input.reader, DecoderContext.builder().build())); + } + + @Benchmark + public void encode(@NonNull Input input, @NonNull Blackhole blackhole) { + input.bsonArrayCodec.encode(input.writer, input.bsonValues, EncoderContext.builder().build()); + blackhole.consume(input); + } +} diff --git a/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonDocumentBenchmark.java b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonDocumentBenchmark.java new file mode 100644 index 00000000000..b050f19007e --- /dev/null +++ b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonDocumentBenchmark.java @@ -0,0 +1,86 @@ +/* + * Copyright 2016-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.mongodb.benchmark.jmh.codec; + +import com.mongodb.internal.connection.ByteBufferBsonOutput; +import com.mongodb.internal.connection.PowerOfTwoBufferPool; +import org.bson.BsonBinaryReader; +import org.bson.BsonBinaryWriter; +import org.bson.BsonDocument; +import org.bson.BsonInt32; +import org.bson.codecs.BsonDocumentCodec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import com.mongodb.lang.NonNull; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.TimeUnit; +import static com.mongodb.benchmark.jmh.codec.BsonUtils.getDocumentAsBuffer; + +/** + * Benchmark with minimal dependency on other codecs to evaluate BsonDocumentCodec's internal performance. + */ +@BenchmarkMode(Mode.Throughput) +@Warmup(iterations = 20, time = 2, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 20, time = 2, timeUnit = TimeUnit.SECONDS) +@OutputTimeUnit(TimeUnit.SECONDS) +@Fork(3) +public class BsonDocumentBenchmark { + + @State(Scope.Benchmark) + public static class Input { + protected final PowerOfTwoBufferPool bufferPool = PowerOfTwoBufferPool.DEFAULT; + protected final BsonDocumentCodec bsonDocumentCodec = new BsonDocumentCodec(); + protected BsonDocument document; + protected byte[] documentBytes; + + @Setup + public void setup() throws IOException { + document = new BsonDocument(); + + for (int i = 0; i < 500; i++) { + document.append(Integer.toString(i), new BsonInt32(i)); + } + + documentBytes = getDocumentAsBuffer(document); + } + } + + @Benchmark + public void decode(@NonNull Input input, @NonNull Blackhole blackhole) { + blackhole.consume(input.bsonDocumentCodec.decode(new BsonBinaryReader(ByteBuffer.wrap(input.documentBytes)), DecoderContext.builder().build())); + } + + @Benchmark + public void encode(@NonNull Input input, @NonNull Blackhole blackhole) { + input.bsonDocumentCodec.encode(new BsonBinaryWriter(new ByteBufferBsonOutput(input.bufferPool)), input.document, EncoderContext.builder().build()); + blackhole.consume(input); + } +} diff --git a/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonUtils.java b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonUtils.java new file mode 100644 index 00000000000..58ad034788b --- /dev/null +++ b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/BsonUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright 2016-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.mongodb.benchmark.jmh.codec; + +import org.bson.BsonBinaryWriter; +import org.bson.BsonDocument; +import org.bson.codecs.BsonDocumentCodec; +import org.bson.codecs.Codec; +import org.bson.codecs.EncoderContext; +import org.bson.io.BasicOutputBuffer; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +public class BsonUtils { + + private static final Codec BSON_DOCUMENT_CODEC = new BsonDocumentCodec(); + + private BsonUtils(){ + //NOP + } + + public static byte[] getDocumentAsBuffer(final BsonDocument document) throws IOException { + BasicOutputBuffer buffer = new BasicOutputBuffer(); + BSON_DOCUMENT_CODEC.encode(new BsonBinaryWriter(buffer), document, EncoderContext.builder().build()); + + ByteArrayOutputStream baos = new ByteArrayOutputStream(buffer.getSize()); + buffer.pipe(baos); + return baos.toByteArray(); + } +} diff --git a/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/package-info.java b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/package-info.java new file mode 100644 index 00000000000..4c2731a218f --- /dev/null +++ b/driver-benchmarks/src/main/com/mongodb/benchmark/jmh/codec/package-info.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/** + * Contains JMH benchmarks for targeted components and code paths. + * + *

When changes are made, the existing benchmarks can be quickly executed to assess + * any performance impact. These benchmarks are intended for targeted evaluation in a local environment or spawn host + * and are not currently executed on the Evergreen. If a benchmark for a particular code path or component does not yet + * exist, this package provides a convenient location to set up a new one + * for performance testing.

+ */ +package com.mongodb.benchmark.jmh.codec; diff --git a/driver-core/src/main/com/mongodb/ConnectionString.java b/driver-core/src/main/com/mongodb/ConnectionString.java index 69db84eb072..062ca1d45c2 100644 --- a/driver-core/src/main/com/mongodb/ConnectionString.java +++ b/driver-core/src/main/com/mongodb/ConnectionString.java @@ -509,7 +509,6 @@ public ConnectionString(final String connectionString, @Nullable final DnsClient credential = createCredentials(combinedOptionsMaps, userName, password); warnOnUnsupportedOptions(combinedOptionsMaps); - warnDeprecatedTimeouts(combinedOptionsMaps); } private static final Set GENERAL_OPTIONS_KEYS = new LinkedHashSet<>(); @@ -518,7 +517,6 @@ public ConnectionString(final String connectionString, @Nullable final DnsClient private static final Set WRITE_CONCERN_KEYS = new HashSet<>(); private static final Set COMPRESSOR_KEYS = new HashSet<>(); private static final Set ALL_KEYS = new HashSet<>(); - private static final Set DEPRECATED_TIMEOUT_KEYS = new HashSet<>(); static { GENERAL_OPTIONS_KEYS.add("minpoolsize"); @@ -592,10 +590,6 @@ public ConnectionString(final String connectionString, @Nullable final DnsClient ALL_KEYS.addAll(READ_PREFERENCE_KEYS); ALL_KEYS.addAll(WRITE_CONCERN_KEYS); ALL_KEYS.addAll(COMPRESSOR_KEYS); - - DEPRECATED_TIMEOUT_KEYS.add("sockettimeoutms"); - DEPRECATED_TIMEOUT_KEYS.add("waitqueuetimeoutms"); - DEPRECATED_TIMEOUT_KEYS.add("wtimeoutms"); } // Any options contained in the connection string completely replace the corresponding options specified in TXT records, @@ -616,15 +610,6 @@ private void warnOnUnsupportedOptions(final Map> optionsMap .forEach(k -> LOGGER.warn(format("Connection string contains unsupported option '%s'.", k))); } } - private void warnDeprecatedTimeouts(final Map> optionsMap) { - if (LOGGER.isWarnEnabled()) { - optionsMap.keySet() - .stream() - .filter(DEPRECATED_TIMEOUT_KEYS::contains) - .forEach(k -> LOGGER.warn(format("Use of deprecated timeout option: '%s'. Prefer 'timeoutMS' instead.", k))); - } - } - private void translateOptions(final Map> optionsMap) { boolean tlsInsecureSet = false; diff --git a/driver-core/src/main/com/mongodb/MongoCredential.java b/driver-core/src/main/com/mongodb/MongoCredential.java index f55251a7603..6e83e54a3cf 100644 --- a/driver-core/src/main/com/mongodb/MongoCredential.java +++ b/driver-core/src/main/com/mongodb/MongoCredential.java @@ -189,7 +189,7 @@ public final class MongoCredential { /** * Mechanism property key for specifying the environment for OIDC, which is * the name of a built-in OIDC application environment integration to use - * to obtain credentials. The value must be either "gcp" or "azure". + * to obtain credentials. The value must be either "k8s", "gcp", or "azure". * This is an alternative to supplying a callback. *

* The "gcp" and "azure" environments require @@ -199,6 +199,11 @@ public final class MongoCredential { * {@link MongoCredential#OIDC_CALLBACK_KEY} and * {@link MongoCredential#OIDC_HUMAN_CALLBACK_KEY} * must not be provided. + *

+ * The "k8s" environment will check the env vars + * {@code AZURE_FEDERATED_TOKEN_FILE}, and then {@code AWS_WEB_IDENTITY_TOKEN_FILE}, + * for the token file path, and if neither is set will then use the path + * {@code /var/run/secrets/kubernetes.io/serviceaccount/token}. * * @see #createOidcCredential(String) * @see MongoCredential#TOKEN_RESOURCE_KEY @@ -265,7 +270,7 @@ public final class MongoCredential { "*.mongodb.net", "*.mongodb-qa.net", "*.mongodb-dev.net", "*.mongodbgov.net", "localhost", "127.0.0.1", "::1")); /** - * Mechanism property key for specifying he URI of the target resource (sometimes called the audience), + * Mechanism property key for specifying the URI of the target resource (sometimes called the audience), * used in some OIDC environments. * *

A TOKEN_RESOURCE with a comma character must be given as a `MongoClient` configuration and not as diff --git a/driver-core/src/main/com/mongodb/assertions/Assertions.java b/driver-core/src/main/com/mongodb/assertions/Assertions.java index a40b4e4b7b6..bf38638dc6d 100644 --- a/driver-core/src/main/com/mongodb/assertions/Assertions.java +++ b/driver-core/src/main/com/mongodb/assertions/Assertions.java @@ -179,6 +179,19 @@ public static boolean assertTrue(final boolean value) throws AssertionError { return true; } + /** + * @param value A value to check. + * @param message The message. + * @return {@code true}. + * @throws AssertionError If {@code value} is {@code false}. + */ + public static boolean assertTrue(final boolean value, final String message) throws AssertionError { + if (!value) { + throw new AssertionError(message); + } + return true; + } + /** * @param value A value to check. * @return {@code false}. diff --git a/driver-core/src/main/com/mongodb/client/model/bulk/ClientReplaceOneOptions.java b/driver-core/src/main/com/mongodb/client/model/bulk/ClientReplaceOneOptions.java index 2142d736f60..4de01a94843 100644 --- a/driver-core/src/main/com/mongodb/client/model/bulk/ClientReplaceOneOptions.java +++ b/driver-core/src/main/com/mongodb/client/model/bulk/ClientReplaceOneOptions.java @@ -74,4 +74,19 @@ static ClientReplaceOneOptions clientReplaceOneOptions() { */ @Override ClientReplaceOneOptions upsert(@Nullable Boolean upsert); + + /** + * Sets the sort criteria to apply to the operation. A null value means no sort criteria is set. + * + *

+ * The sort criteria determines which document the operation replaces if the query matches multiple documents. + * The first document matched by the specified sort criteria will be replaced. + * + * @param sort The sort criteria. {@code null} represents the server default. + * @return this + * @mongodb.driver.manual reference/method/db.collection.replaceOne/ Sort + * @mongodb.server.release 8.0 + * @since 5.4 + */ + ClientReplaceOneOptions sort(@Nullable Bson sort); } diff --git a/driver-core/src/main/com/mongodb/client/model/bulk/ClientUpdateOneOptions.java b/driver-core/src/main/com/mongodb/client/model/bulk/ClientUpdateOneOptions.java index 9b04ec6ef15..c5abea43b2a 100644 --- a/driver-core/src/main/com/mongodb/client/model/bulk/ClientUpdateOneOptions.java +++ b/driver-core/src/main/com/mongodb/client/model/bulk/ClientUpdateOneOptions.java @@ -85,4 +85,19 @@ static ClientUpdateOneOptions clientUpdateOneOptions() { */ @Override ClientUpdateOneOptions upsert(@Nullable Boolean upsert); + + /** + * Sets the sort criteria to apply to the operation. A null value means no sort criteria is set. + * + *

+ * The sort criteria determines which document the operation updates if the query matches multiple documents. + * The first document matched by the specified sort criteria will be updated. + * + * @param sort The sort criteria. {@code null} represents the server default. + * @return this + * @mongodb.driver.manual reference/method/db.collection.updateOne/ Sort + * @mongodb.server.release 8.0 + * @since 5.4 + */ + ClientUpdateOneOptions sort(@Nullable Bson sort); } diff --git a/driver-core/src/main/com/mongodb/client/model/mql/MqlValues.java b/driver-core/src/main/com/mongodb/client/model/mql/MqlValues.java index a2d58fbc02b..e3e2bbd56a2 100644 --- a/driver-core/src/main/com/mongodb/client/model/mql/MqlValues.java +++ b/driver-core/src/main/com/mongodb/client/model/mql/MqlValues.java @@ -74,11 +74,11 @@ public static MqlBoolean of(final boolean of) { */ public static MqlArray ofBooleanArray(final boolean... array) { Assertions.notNull("array", array); - List list = new ArrayList<>(); + BsonArray bsonArray = new BsonArray(); for (boolean b : array) { - list.add(new BsonBoolean(b)); + bsonArray.add(new BsonBoolean(b)); } - return new MqlExpression<>((cr) -> new AstPlaceholder(new BsonArray(list))); + return new MqlExpression<>((cr) -> new AstPlaceholder(bsonArray)); } /** @@ -102,11 +102,11 @@ public static MqlInteger of(final int of) { */ public static MqlArray ofIntegerArray(final int... array) { Assertions.notNull("array", array); - List list = new ArrayList<>(); + BsonArray bsonArray = new BsonArray(); for (int i : array) { - list.add(new BsonInt32(i)); + bsonArray.add(new BsonInt32(i)); } - return new MqlExpression<>((cr) -> new AstPlaceholder(new BsonArray(list))); + return new MqlExpression<>((cr) -> new AstPlaceholder(bsonArray)); } /** @@ -130,11 +130,11 @@ public static MqlInteger of(final long of) { */ public static MqlArray ofIntegerArray(final long... array) { Assertions.notNull("array", array); - List list = new ArrayList<>(); + BsonArray bsonArray = new BsonArray(); for (long i : array) { - list.add(new BsonInt64(i)); + bsonArray.add(new BsonInt64(i)); } - return new MqlExpression<>((cr) -> new AstPlaceholder(new BsonArray(list))); + return new MqlExpression<>((cr) -> new AstPlaceholder(bsonArray)); } /** @@ -158,11 +158,11 @@ public static MqlNumber of(final double of) { */ public static MqlArray ofNumberArray(final double... array) { Assertions.notNull("array", array); - List list = new ArrayList<>(); + BsonArray bsonArray = new BsonArray(); for (double n : array) { - list.add(new BsonDouble(n)); + bsonArray.add(new BsonDouble(n)); } - return new MqlExpression<>((cr) -> new AstPlaceholder(new BsonArray(list))); + return new MqlExpression<>((cr) -> new AstPlaceholder(bsonArray)); } /** @@ -310,12 +310,12 @@ public static MqlDocument current() { public static MqlArray ofArray(final T... array) { Assertions.notNull("array", array); return new MqlExpression<>((cr) -> { - List list = new ArrayList<>(); + BsonArray bsonArray = new BsonArray(); for (T v : array) { Assertions.notNull("elements of array", v); - list.add(((MqlExpression) v).toBsonValue(cr)); + bsonArray.add(((MqlExpression) v).toBsonValue(cr)); } - return new AstPlaceholder(new BsonArray(list)); + return new AstPlaceholder(bsonArray); }); } diff --git a/driver-core/src/main/com/mongodb/client/model/search/EqualsSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/EqualsSearchOperator.java new file mode 100644 index 00000000000..b3aa4c278ea --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/EqualsSearchOperator.java @@ -0,0 +1,43 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; + +import java.util.UUID; + +import java.time.Instant; + +import org.bson.types.ObjectId; + +/** + * @see SearchOperator#equals(FieldSearchPath, boolean) + * @see SearchOperator#equals(FieldSearchPath, ObjectId) + * @see SearchOperator#equals(FieldSearchPath, Number) + * @see SearchOperator#equals(FieldSearchPath, Instant) + * @see SearchOperator#equals(FieldSearchPath, String) + * @see SearchOperator#equals(FieldSearchPath, UUID) + * @see SearchOperator#equalsNull(FieldSearchPath) + * @since 5.3 + */ +@Sealed +@Beta(Reason.CLIENT) +public interface EqualsSearchOperator extends SearchOperator { + @Override + EqualsSearchOperator score(SearchScore modifier); +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/InSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/InSearchOperator.java new file mode 100644 index 00000000000..4719d1b0bc6 --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/InSearchOperator.java @@ -0,0 +1,41 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; +import org.bson.types.ObjectId; + +import java.time.Instant; +import java.util.UUID; + +/** + * @see SearchOperator#in(FieldSearchPath, boolean, boolean...) + * @see SearchOperator#in(FieldSearchPath, ObjectId, ObjectId...) + * @see SearchOperator#in(FieldSearchPath, Number, Number...) + * @see SearchOperator#in(FieldSearchPath, Instant, Instant...) + * @see SearchOperator#in(FieldSearchPath, UUID, UUID...) + * @see SearchOperator#in(FieldSearchPath, String, String...) + * @see SearchOperator#in(FieldSearchPath, Iterable) + * @since 5.3 + */ +@Sealed +@Beta(Reason.CLIENT) +public interface InSearchOperator extends SearchOperator { + @Override + InSearchOperator score(SearchScore modifier); +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/MoreLikeThisSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/MoreLikeThisSearchOperator.java new file mode 100644 index 00000000000..b9f9826d858 --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/MoreLikeThisSearchOperator.java @@ -0,0 +1,33 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; +import org.bson.BsonDocument; + +/** + * @see SearchOperator#moreLikeThis(BsonDocument) + * @see SearchOperator#moreLikeThis(Iterable) + * @since 4.7 + */ +@Sealed +@Beta(Reason.CLIENT) +public interface MoreLikeThisSearchOperator extends SearchOperator { + @Override + TextSearchOperator score(SearchScore modifier); +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/PhraseConstructibleBsonElement.java b/driver-core/src/main/com/mongodb/client/model/search/PhraseConstructibleBsonElement.java new file mode 100644 index 00000000000..0f18e2db7a9 --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/PhraseConstructibleBsonElement.java @@ -0,0 +1,53 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.internal.client.model.AbstractConstructibleBsonElement; + +import org.bson.conversions.Bson; + +import static com.mongodb.assertions.Assertions.notNull; + +final class PhraseConstructibleBsonElement extends AbstractConstructibleBsonElement implements + PhraseSearchOperator { + PhraseConstructibleBsonElement(final String name, final Bson value) { + super(name, value); + } + + private PhraseConstructibleBsonElement(final Bson baseElement, final Bson appendedElementValue) { + super(baseElement, appendedElementValue); + } + + @Override + protected PhraseConstructibleBsonElement newSelf(final Bson baseElement, final Bson appendedElementValue) { + return new PhraseConstructibleBsonElement(baseElement, appendedElementValue); + } + + @Override + public PhraseSearchOperator synonyms(final String name) { + return newWithAppendedValue("synonyms", notNull("name", name)); + } + + @Override + public PhraseSearchOperator slop(final int slop) { + return newWithAppendedValue("slop", slop); + } + + @Override + public PhraseConstructibleBsonElement score(final SearchScore modifier) { + return newWithAppendedValue("score", notNull("modifier", modifier)); + } +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/PhraseSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/PhraseSearchOperator.java new file mode 100644 index 00000000000..3ac2abe05ad --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/PhraseSearchOperator.java @@ -0,0 +1,51 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; + +/** + * @see SearchOperator#phrase(SearchPath, String) + * @see SearchOperator#phrase(Iterable, Iterable) + * @since 5.3 + */ + +@Sealed +@Beta(Reason.CLIENT) +public interface PhraseSearchOperator extends SearchOperator { + @Override + PhraseSearchOperator score(SearchScore modifier); + + /** + * Creates a new {@link PhraseSearchOperator} that uses slop. The default value is 0. + * + * @param slop The allowable distance between words in the query phrase. + * @return A new {@link PhraseSearchOperator}. + */ + PhraseSearchOperator slop(int slop); + + /** + * Creates a new {@link PhraseSearchOperator} that uses synonyms. + * + * @param name The name of the synonym mapping. + * @return A new {@link PhraseSearchOperator}. + * + * @mongodb.atlas.manual atlas-search/synonyms/ Synonym mappings + */ + PhraseSearchOperator synonyms(String name); +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/QueryStringSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/QueryStringSearchOperator.java new file mode 100644 index 00000000000..eb32ee8c733 --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/QueryStringSearchOperator.java @@ -0,0 +1,31 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; + +/** + * @see SearchOperator#queryString(FieldSearchPath, String) + * @since 5.3 + */ +@Sealed +@Beta(Reason.CLIENT) +public interface QueryStringSearchOperator extends SearchOperator { + @Override + QueryStringSearchOperator score(SearchScore modifier); +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/RegexSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/RegexSearchOperator.java new file mode 100644 index 00000000000..c0286079714 --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/RegexSearchOperator.java @@ -0,0 +1,33 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; + +/** + * @see SearchOperator#regex(SearchPath, String) + * @see SearchOperator#regex(Iterable, Iterable) + * @since 5.3 + */ + +@Sealed +@Beta(Reason.CLIENT) +public interface RegexSearchOperator extends SearchOperator { + @Override + RegexSearchOperator score(SearchScore modifier); +} diff --git a/driver-core/src/main/com/mongodb/client/model/search/SearchConstructibleBsonElement.java b/driver-core/src/main/com/mongodb/client/model/search/SearchConstructibleBsonElement.java index 8f0b1e510c5..c1a37597c1a 100644 --- a/driver-core/src/main/com/mongodb/client/model/search/SearchConstructibleBsonElement.java +++ b/driver-core/src/main/com/mongodb/client/model/search/SearchConstructibleBsonElement.java @@ -32,6 +32,8 @@ final class SearchConstructibleBsonElement extends AbstractConstructibleBsonElem MustCompoundSearchOperator, MustNotCompoundSearchOperator, ShouldCompoundSearchOperator, FilterCompoundSearchOperator, ExistsSearchOperator, TextSearchOperator, AutocompleteSearchOperator, NumberNearSearchOperator, DateNearSearchOperator, GeoNearSearchOperator, + EqualsSearchOperator, InSearchOperator, MoreLikeThisSearchOperator, + RegexSearchOperator, QueryStringSearchOperator, WildcardSearchOperator, ValueBoostSearchScore, PathBoostSearchScore, ConstantSearchScore, FunctionSearchScore, GaussSearchScoreExpression, PathSearchScoreExpression, FacetSearchCollector, diff --git a/driver-core/src/main/com/mongodb/client/model/search/SearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/SearchOperator.java index 9234db91c51..aa8b01b29d4 100644 --- a/driver-core/src/main/com/mongodb/client/model/search/SearchOperator.java +++ b/driver-core/src/main/com/mongodb/client/model/search/SearchOperator.java @@ -20,19 +20,26 @@ import com.mongodb.annotations.Sealed; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.geojson.Point; +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonNull; +import org.bson.BsonBoolean; import org.bson.BsonType; import org.bson.Document; import org.bson.conversions.Bson; +import org.bson.types.ObjectId; import java.time.Duration; import java.time.Instant; import java.util.Iterator; +import java.util.UUID; import static com.mongodb.assertions.Assertions.isTrueArgument; +import static com.mongodb.assertions.Assertions.notNull; import static com.mongodb.internal.Iterables.concat; import static com.mongodb.internal.client.model.Util.combineToBsonValue; import static java.util.Collections.singleton; -import static com.mongodb.assertions.Assertions.notNull; /** * The core part of the {@link Aggregates#search(SearchOperator, SearchOptions) $search} pipeline stage of an aggregation pipeline. @@ -292,6 +299,340 @@ static GeoNearSearchOperator near(final Point origin, final Number pivot, final .append("pivot", notNull("pivot", pivot))); } + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The boolean value to search for. + * @param values More fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final boolean value, final boolean... values) { + notNull("values", values); + BsonArray bsonArray = new BsonArray(); + bsonArray.add(new BsonBoolean(value)); + for (boolean v : values) { + bsonArray.add(new BsonBoolean(v)); + } + return in(notNull("path", path), bsonArray); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The objectId value to search for. + * @param values More fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final ObjectId value, final ObjectId... values) { + return in(notNull("path", path), concat(notNull("value", value), values)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The number value to search for. + * @param values More fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final Number value, final Number... values) { + return in(notNull("path", path), concat(notNull("value", value), values)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The instant date value to search for. + * @param values More fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final Instant value, final Instant... values) { + return in(notNull("path", path), concat(notNull("value", value), values)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The uuid value to search for. + * @param values More fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final UUID value, final UUID... values) { + return in(notNull("path", path), concat(notNull("value", value), values)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The string value to search for. + * @param values More fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final String value, final String... values) { + return in(notNull("path", path), concat(notNull("value", value), values)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where the + * value or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param values The non-empty values to search for. Value can be either a single value or an array of values of only one of the supported BSON types and can't be a mix of different types. + * @param the type of elements in {@code values}. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/in/ in operator + */ + static InSearchOperator in(final FieldSearchPath path, final Iterable values) { + notNull("path", path); + Iterator valueIterator = notNull("values", values).iterator(); + isTrueArgument("values must not be empty", valueIterator.hasNext()); + T firstValue = valueIterator.next(); + boolean hasMore = valueIterator.hasNext(); + return new SearchConstructibleBsonElement("in", new Document() + .append("path", path.toValue()) + .append("value", hasMore ? values : firstValue)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The boolean value to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equals(final FieldSearchPath path, final boolean value) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", value)); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The object id value to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equals(final FieldSearchPath path, final ObjectId value) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", notNull("value", value))); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The number value to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equals(final FieldSearchPath path, final Number value) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", notNull("value", value))); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The instant date value to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equals(final FieldSearchPath path, final Instant value) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", notNull("value", value))); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The string value to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equals(final FieldSearchPath path, final String value) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", notNull("value", value))); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The uuid value to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equals(final FieldSearchPath path, final UUID value) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", notNull("value", new BsonBinary(value)))); + } + + /** + * Returns a {@link SearchOperator} that searches for documents where a field matches null. + * + * @param path The indexed field to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/equals/ equals operator + */ + static EqualsSearchOperator equalsNull(final FieldSearchPath path) { + return new SearchConstructibleBsonElement("equals", new Document("path", notNull("path", path).toValue()) + .append("value", BsonNull.VALUE)); + } + + /** + * Returns a {@link SearchOperator} that returns documents similar to input document. + * + * @param like The BSON document that is used to extract representative terms to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/morelikethis/ moreLikeThis operator + */ + static MoreLikeThisSearchOperator moreLikeThis(final BsonDocument like) { + return moreLikeThis(singleton(notNull("like", like))); + } + + /** + * Returns a {@link SearchOperator} that returns documents similar to input documents. + * + * @param likes The BSON documents that are used to extract representative terms to query for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/morelikethis/ moreLikeThis operator + */ + static MoreLikeThisSearchOperator moreLikeThis(final Iterable likes) { + Iterator likesIterator = notNull("likes", likes).iterator(); + isTrueArgument("likes must not be empty", likesIterator.hasNext()); + BsonDocument firstLike = likesIterator.next(); + return new SearchConstructibleBsonElement("moreLikeThis", new Document("like", likesIterator.hasNext() ? likes : firstLike)); + } + + /** + * Returns a {@link SearchOperator} that supports querying a combination of indexed fields and values. + * + * @param defaultPath The field to be searched by default. + * @param query One or more indexed fields and values to search. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/queryString/ queryString operator + */ + static QueryStringSearchOperator queryString(final FieldSearchPath defaultPath, final String query) { + isTrueArgument("path must not be empty", defaultPath != null); + isTrueArgument("query must not be empty", query != null); + + return new SearchConstructibleBsonElement("queryString", + new Document("defaultPath", defaultPath.toBsonValue()) + .append("query", query)); + } + + /** + * Returns a {@link SearchOperator} that performs a search for documents containing an ordered sequence of terms. + * + * @param path The field to be searched. + * @param query The string to search for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/phrase/ phrase operator + */ + static PhraseSearchOperator phrase(final SearchPath path, final String query) { + return phrase(singleton(notNull("path", path)), singleton(notNull("query", query))); + } + + /** + * Returns a {@link SearchOperator} that performs a search for documents containing an ordered sequence of terms. + * + * @param paths The non-empty fields to be searched. + * @param queries The non-empty strings to search for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/phrase/ phrase operator + */ + static PhraseSearchOperator phrase(final Iterable paths, final Iterable queries) { + Iterator pathIterator = notNull("paths", paths).iterator(); + isTrueArgument("paths must not be empty", pathIterator.hasNext()); + Iterator queryIterator = notNull("queries", queries).iterator(); + isTrueArgument("queries must not be empty", queryIterator.hasNext()); + String firstQuery = queryIterator.next(); + return new PhraseConstructibleBsonElement("phrase", new Document("path", combineToBsonValue(pathIterator, false)) + .append("query", queryIterator.hasNext() ? queries : firstQuery)); + } + + /** + * Returns a {@link SearchOperator} that performs a search using a special + * characters in the search string that can match any character. + * + * @param path The indexed field to be searched. + * @param query The string to search for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/wildcard/ wildcard operator + */ + static WildcardSearchOperator wildcard(final SearchPath path, final String query) { + return wildcard(singleton(notNull("query", query)), singleton(notNull("path", path))); + } + + /** + * Returns a {@link SearchOperator} that performs a search using a special characters in the search string that can match any character. + * + * @param queries The non-empty strings to search for. + * @param paths The non-empty index fields to be searched. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/wildcard/ wildcard operator + */ + static WildcardSearchOperator wildcard(final Iterable queries, final Iterable paths) { + Iterator queryIterator = notNull("queries", queries).iterator(); + isTrueArgument("queries must not be empty", queryIterator.hasNext()); + String firstQuery = queryIterator.next(); + Iterator pathIterator = notNull("paths", paths).iterator(); + isTrueArgument("paths must not be empty", pathIterator.hasNext()); + return new SearchConstructibleBsonElement("wildcard", new Document("query", queryIterator.hasNext() ? queries : firstQuery) + .append("path", combineToBsonValue(pathIterator, false))); + } + + /** + * Returns a {@link SearchOperator} that performs a search using a regular expression. + * + * @param path The field to be searched. + * @param query The string to search for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/regex/ regex operator + */ + static RegexSearchOperator regex(final SearchPath path, final String query) { + return regex(singleton(notNull("path", path)), singleton(notNull("query", query))); + } + + /** + * Returns a {@link SearchOperator} that performs a search using a regular expression. + * + * @param paths The non-empty fields to be searched. + * @param queries The non-empty strings to search for. + * @return The requested {@link SearchOperator}. + * @mongodb.atlas.manual atlas-search/regex/ regex operator + */ + static RegexSearchOperator regex(final Iterable paths, final Iterable queries) { + Iterator pathIterator = notNull("paths", paths).iterator(); + isTrueArgument("paths must not be empty", pathIterator.hasNext()); + Iterator queryIterator = notNull("queries", queries).iterator(); + isTrueArgument("queries must not be empty", queryIterator.hasNext()); + String firstQuery = queryIterator.next(); + return new SearchConstructibleBsonElement("regex", new Document("path", combineToBsonValue(pathIterator, false)) + .append("query", queryIterator.hasNext() ? queries : firstQuery)); + } + /** * Creates a {@link SearchOperator} from a {@link Bson} in situations when there is no builder method that better satisfies your needs. * This method cannot be used to validate the syntax. diff --git a/driver-core/src/main/com/mongodb/client/model/search/WildcardSearchOperator.java b/driver-core/src/main/com/mongodb/client/model/search/WildcardSearchOperator.java new file mode 100644 index 00000000000..95d4a5caad5 --- /dev/null +++ b/driver-core/src/main/com/mongodb/client/model/search/WildcardSearchOperator.java @@ -0,0 +1,32 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mongodb.client.model.search; + +import com.mongodb.annotations.Beta; +import com.mongodb.annotations.Reason; +import com.mongodb.annotations.Sealed; + +/** + * @see SearchOperator#wildcard(SearchPath, String) + * @see SearchOperator#wildcard(Iterable, Iterable) + * @since 4.7 + */ +@Sealed +@Beta(Reason.CLIENT) +public interface WildcardSearchOperator extends SearchOperator { + @Override + WildcardSearchOperator score(SearchScore modifier); +} diff --git a/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientReplaceOneOptions.java b/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientReplaceOneOptions.java index 18e9d060763..f7172488bfc 100644 --- a/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientReplaceOneOptions.java +++ b/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientReplaceOneOptions.java @@ -38,6 +38,8 @@ public final class ConcreteClientReplaceOneOptions implements ClientReplaceOneOp private String hintString; @Nullable private Boolean upsert; + @Nullable + private Bson sort; public ConcreteClientReplaceOneOptions() { } @@ -89,6 +91,21 @@ public ClientReplaceOneOptions upsert(@Nullable final Boolean upsert) { return this; } + /** + * @see ClientReplaceOneOptions#sort(Bson) + */ + public ClientReplaceOneOptions sort(final Bson sort) { + this.sort = sort; + return this; + } + + /** + * @see ClientReplaceOneOptions#sort(Bson) + */ + public Optional getSort() { + return ofNullable(sort); + } + /** * @see #upsert(Boolean) */ @@ -103,6 +120,7 @@ public String toString() { + ", hint=" + hint + ", hintString='" + hintString + '\'' + ", upsert=" + upsert + + ", sort=" + sort + '}'; } } diff --git a/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientUpdateOneOptions.java b/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientUpdateOneOptions.java index fdf960ed1df..3bd5f1451d7 100644 --- a/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientUpdateOneOptions.java +++ b/driver-core/src/main/com/mongodb/internal/client/model/bulk/ConcreteClientUpdateOneOptions.java @@ -20,12 +20,19 @@ import com.mongodb.lang.Nullable; import org.bson.conversions.Bson; +import java.util.Optional; + +import static java.util.Optional.ofNullable; + /** * This class is not part of the public API and may be removed or changed at any time. */ public final class ConcreteClientUpdateOneOptions extends AbstractClientUpdateOptions implements ClientUpdateOneOptions { static final ConcreteClientUpdateOneOptions MUTABLE_EMPTY = new ConcreteClientUpdateOneOptions(); + @Nullable + private Bson sort; + public ConcreteClientUpdateOneOptions() { } @@ -54,6 +61,21 @@ public ConcreteClientUpdateOneOptions upsert(@Nullable final Boolean upsert) { return (ConcreteClientUpdateOneOptions) super.upsert(upsert); } + /** + * @see ClientUpdateOneOptions#sort(Bson) + */ + public ConcreteClientUpdateOneOptions sort(final Bson sort) { + this.sort = sort; + return this; + } + + /** + * @see ClientUpdateOneOptions#sort(Bson) + */ + public Optional getSort() { + return ofNullable(sort); + } + @Override public String toString() { return "ClientUpdateOneOptions{" @@ -62,6 +84,7 @@ public String toString() { + ", hint=" + getHint().orElse(null) + ", hintString=" + getHintString().map(s -> '\'' + s + '\'') .orElse(null) + ", upsert=" + isUpsert().orElse(null) + + ", sort=" + getSort().orElse(null) + '}'; } } diff --git a/driver-core/src/main/com/mongodb/internal/connection/ClientMetadataHelper.java b/driver-core/src/main/com/mongodb/internal/connection/ClientMetadataHelper.java index 36d2d891829..825af685c10 100644 --- a/driver-core/src/main/com/mongodb/internal/connection/ClientMetadataHelper.java +++ b/driver-core/src/main/com/mongodb/internal/connection/ClientMetadataHelper.java @@ -224,7 +224,7 @@ private enum Orchestrator { K8S("kubernetes") { @Override boolean isCurrentOrchestrator() { - return System.getenv("KUBERNETES_SERVICE_HOST") != null; + return FaasEnvironment.getEnv("KUBERNETES_SERVICE_HOST") != null; } }, UNKNOWN(null); diff --git a/driver-core/src/main/com/mongodb/internal/connection/DefaultClusterableServerFactory.java b/driver-core/src/main/com/mongodb/internal/connection/DefaultClusterableServerFactory.java index 880e1db8521..aa8973ec092 100644 --- a/driver-core/src/main/com/mongodb/internal/connection/DefaultClusterableServerFactory.java +++ b/driver-core/src/main/com/mongodb/internal/connection/DefaultClusterableServerFactory.java @@ -93,6 +93,7 @@ public ClusterableServer create(final Cluster cluster, final ServerAddress serve new InternalStreamConnectionFactory(clusterMode, true, heartbeatStreamFactory, null, applicationName, mongoDriverInformation, emptyList(), loggerSettings, null, serverApi), clusterMode, serverApi, isFunctionAsAServiceEnvironment, sdamProvider, heartbeatOperationContextFactory); + ConnectionPool connectionPool = new DefaultConnectionPool(serverId, new InternalStreamConnectionFactory(clusterMode, streamFactory, credential, applicationName, mongoDriverInformation, compressorList, loggerSettings, commandListener, serverApi), diff --git a/driver-core/src/main/com/mongodb/internal/connection/DnsMultiServerCluster.java b/driver-core/src/main/com/mongodb/internal/connection/DnsMultiServerCluster.java index 0589d0f7d19..51e28ee5c84 100644 --- a/driver-core/src/main/com/mongodb/internal/connection/DnsMultiServerCluster.java +++ b/driver-core/src/main/com/mongodb/internal/connection/DnsMultiServerCluster.java @@ -21,6 +21,7 @@ import com.mongodb.connection.ClusterId; import com.mongodb.connection.ClusterSettings; import com.mongodb.connection.ClusterType; +import com.mongodb.connection.ServerDescription; import com.mongodb.lang.Nullable; import java.util.ArrayList; @@ -28,6 +29,7 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.Collectors; import static com.mongodb.assertions.Assertions.assertNotNull; @@ -38,7 +40,6 @@ public final class DnsMultiServerCluster extends AbstractMultiServerCluster { private final DnsSrvRecordMonitor dnsSrvRecordMonitor; private volatile MongoException srvResolutionException; - public DnsMultiServerCluster(final ClusterId clusterId, final ClusterSettings settings, final ClusterableServerFactory serverFactory, final DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory) { super(clusterId, settings, serverFactory); @@ -57,17 +58,33 @@ public void initialize(final Collection hosts) { } } - private Collection applySrvMaxHosts(final Collection hosts) { - Collection newHosts = hosts; + private Collection applySrvMaxHosts(final Collection latestSrvHosts) { Integer srvMaxHosts = getSettings().getSrvMaxHosts(); - if (srvMaxHosts != null && srvMaxHosts > 0) { - if (srvMaxHosts < hosts.size()) { - List newHostsList = new ArrayList<>(hosts); - Collections.shuffle(newHostsList, ThreadLocalRandom.current()); - newHosts = newHostsList.subList(0, srvMaxHosts); - } + if (srvMaxHosts == null || srvMaxHosts <= 0 || latestSrvHosts.size() <= srvMaxHosts) { + return new ArrayList<>(latestSrvHosts); } - return newHosts; + List activeHosts = getActivePriorHosts(latestSrvHosts); + int numNewHostsToAdd = srvMaxHosts - activeHosts.size(); + activeHosts.addAll(addShuffledHosts(latestSrvHosts, activeHosts, numNewHostsToAdd)); + + return activeHosts; + } + + private List getActivePriorHosts(final Collection latestSrvHosts) { + List priorHosts = DnsMultiServerCluster.this.getCurrentDescription().getServerDescriptions().stream() + .map(ServerDescription::getAddress).collect(Collectors.toList()); + priorHosts.removeIf(host -> !latestSrvHosts.contains(host)); + + return priorHosts; + } + + private List addShuffledHosts(final Collection latestSrvHosts, + final List activePriorHosts, final int numNewHostsToAdd) { + List addedHosts = new ArrayList<>(latestSrvHosts); + addedHosts.removeAll(activePriorHosts); + Collections.shuffle(addedHosts, ThreadLocalRandom.current()); + + return addedHosts.subList(0, numNewHostsToAdd); } @Override diff --git a/driver-core/src/main/com/mongodb/internal/connection/FaasEnvironment.java b/driver-core/src/main/com/mongodb/internal/connection/FaasEnvironment.java index 6627722097b..a54c1efb066 100644 --- a/driver-core/src/main/com/mongodb/internal/connection/FaasEnvironment.java +++ b/driver-core/src/main/com/mongodb/internal/connection/FaasEnvironment.java @@ -20,7 +20,9 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; enum FaasEnvironment { AWS_LAMBDA("aws.lambda"), @@ -29,21 +31,23 @@ enum FaasEnvironment { VERCEL("vercel"), UNKNOWN(null); + static final Map ENV_OVERRIDES_FOR_TESTING = new HashMap<>(); + static FaasEnvironment getFaasEnvironment() { List result = new ArrayList<>(); - String awsExecutionEnv = System.getenv("AWS_EXECUTION_ENV"); + String awsExecutionEnv = getEnv("AWS_EXECUTION_ENV"); - if (System.getenv("VERCEL") != null) { + if (getEnv("VERCEL") != null) { result.add(FaasEnvironment.VERCEL); } if ((awsExecutionEnv != null && awsExecutionEnv.startsWith("AWS_Lambda_")) - || System.getenv("AWS_LAMBDA_RUNTIME_API") != null) { + || getEnv("AWS_LAMBDA_RUNTIME_API") != null) { result.add(FaasEnvironment.AWS_LAMBDA); } - if (System.getenv("FUNCTIONS_WORKER_RUNTIME") != null) { + if (getEnv("FUNCTIONS_WORKER_RUNTIME") != null) { result.add(FaasEnvironment.AZURE_FUNC); } - if (System.getenv("K_SERVICE") != null || System.getenv("FUNCTION_NAME") != null) { + if (getEnv("K_SERVICE") != null || getEnv("FUNCTION_NAME") != null) { result.add(FaasEnvironment.GCP_FUNC); } // vercel takes precedence over aws.lambda @@ -56,6 +60,14 @@ static FaasEnvironment getFaasEnvironment() { return result.get(0); } + @Nullable + public static String getEnv(final String key) { + if (ENV_OVERRIDES_FOR_TESTING.containsKey(key)) { + return ENV_OVERRIDES_FOR_TESTING.get(key); + } + return System.getenv(key); + } + @Nullable private final String name; @@ -95,11 +107,11 @@ public Integer getMemoryMb() { public String getRegion() { switch (this) { case AWS_LAMBDA: - return System.getenv("AWS_REGION"); + return getEnv("AWS_REGION"); case GCP_FUNC: - return System.getenv("FUNCTION_REGION"); + return getEnv("FUNCTION_REGION"); case VERCEL: - return System.getenv("VERCEL_REGION"); + return getEnv("VERCEL_REGION"); default: return null; } @@ -108,7 +120,7 @@ public String getRegion() { @Nullable private static Integer getEnvInteger(final String name) { try { - String value = System.getenv(name); + String value = getEnv(name); return Integer.parseInt(value); } catch (NumberFormatException e) { return null; diff --git a/driver-core/src/main/com/mongodb/internal/connection/OidcAuthenticator.java b/driver-core/src/main/com/mongodb/internal/connection/OidcAuthenticator.java index 3d778ae0349..99fcee788ed 100644 --- a/driver-core/src/main/com/mongodb/internal/connection/OidcAuthenticator.java +++ b/driver-core/src/main/com/mongodb/internal/connection/OidcAuthenticator.java @@ -76,10 +76,11 @@ public final class OidcAuthenticator extends SaslAuthenticator { private static final String TEST_ENVIRONMENT = "test"; private static final String AZURE_ENVIRONMENT = "azure"; private static final String GCP_ENVIRONMENT = "gcp"; + private static final String K8S_ENVIRONMENT = "k8s"; private static final List IMPLEMENTED_ENVIRONMENTS = Arrays.asList( - AZURE_ENVIRONMENT, GCP_ENVIRONMENT, TEST_ENVIRONMENT); + AZURE_ENVIRONMENT, GCP_ENVIRONMENT, K8S_ENVIRONMENT, TEST_ENVIRONMENT); private static final List USER_SUPPORTED_ENVIRONMENTS = Arrays.asList( - AZURE_ENVIRONMENT, GCP_ENVIRONMENT); + AZURE_ENVIRONMENT, GCP_ENVIRONMENT, K8S_ENVIRONMENT); private static final List REQUIRES_TOKEN_RESOURCE = Arrays.asList( AZURE_ENVIRONMENT, GCP_ENVIRONMENT); private static final List ALLOWS_USERNAME = Arrays.asList( @@ -90,6 +91,10 @@ public final class OidcAuthenticator extends SaslAuthenticator { public static final String OIDC_TOKEN_FILE = "OIDC_TOKEN_FILE"; + private static final String K8S_FALLBACK_FILE = "/var/run/secrets/kubernetes.io/serviceaccount/token"; + private static final String K8S_AZURE_FILE = "AZURE_FEDERATED_TOKEN_FILE"; + private static final String K8S_AWS_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; + private static final int CALLBACK_API_VERSION_NUMBER = 1; @Nullable @@ -192,6 +197,8 @@ private OidcCallback getRequestCallback() { machine = getAzureCallback(getMongoCredential()); } else if (GCP_ENVIRONMENT.equals(environment)) { machine = getGcpCallback(getMongoCredential()); + } else if (K8S_ENVIRONMENT.equals(environment)) { + machine = getK8sCallback(); } else { machine = getOidcCallbackMechanismProperty(OIDC_CALLBACK_KEY); } @@ -206,6 +213,24 @@ private static OidcCallback getTestCallback() { }; } + @VisibleForTesting(otherwise = VisibleForTesting.AccessModifier.PRIVATE) + static OidcCallback getK8sCallback() { + return (context) -> { + String azure = System.getenv(K8S_AZURE_FILE); + String aws = System.getenv(K8S_AWS_FILE); + String path; + if (azure != null) { + path = azure; + } else if (aws != null) { + path = aws; + } else { + path = K8S_FALLBACK_FILE; + } + String accessToken = readTokenFromFile(path); + return new OidcCallbackResult(accessToken); + }; + } + @VisibleForTesting(otherwise = VisibleForTesting.AccessModifier.PRIVATE) static OidcCallback getAzureCallback(final MongoCredential credential) { return (context) -> { @@ -499,6 +524,10 @@ private static String readTokenFromFile() { throw new MongoClientException( format("Environment variable must be specified: %s", OIDC_TOKEN_FILE)); } + return readTokenFromFile(path); + } + + private static String readTokenFromFile(final String path) { try { return new String(Files.readAllBytes(Paths.get(path)), StandardCharsets.UTF_8); } catch (IOException e) { diff --git a/driver-core/src/main/com/mongodb/internal/operation/ClientBulkWriteOperation.java b/driver-core/src/main/com/mongodb/internal/operation/ClientBulkWriteOperation.java index ccd7f272e95..f6ff7632c8f 100644 --- a/driver-core/src/main/com/mongodb/internal/operation/ClientBulkWriteOperation.java +++ b/driver-core/src/main/com/mongodb/internal/operation/ClientBulkWriteOperation.java @@ -1247,6 +1247,14 @@ private void encodeWriteModelInternals( }); } + private void encodeWriteModelInternals(final BsonWriter writer, final ConcreteClientUpdateOneModel model) { + encodeWriteModelInternals(writer, (AbstractClientUpdateModel) model); + model.getOptions().getSort().ifPresent(value -> { + writer.writeName("sort"); + encodeUsingRegistry(writer, value); + }); + } + private void encodeWriteModelInternals(final BsonWriter writer, final AbstractClientUpdateModel model) { writer.writeName("filter"); encodeUsingRegistry(writer, model.getFilter()); @@ -1294,6 +1302,10 @@ private void encodeWriteModelInternals(final BsonBinaryWriter writer, final Conc }); options.getHintString().ifPresent(value -> writer.writeString("hint", value)); options.isUpsert().ifPresent(value -> writer.writeBoolean("upsert", value)); + options.getSort().ifPresent(value -> { + writer.writeName("sort"); + encodeUsingRegistry(writer, value); + }); } private void encodeWriteModelInternals(final BsonWriter writer, final AbstractClientDeleteModel model) { diff --git a/driver-core/src/main/com/mongodb/internal/time/Timeout.java b/driver-core/src/main/com/mongodb/internal/time/Timeout.java index 85b92d9fde1..3dba42e580f 100644 --- a/driver-core/src/main/com/mongodb/internal/time/Timeout.java +++ b/driver-core/src/main/com/mongodb/internal/time/Timeout.java @@ -22,7 +22,7 @@ import com.mongodb.internal.function.CheckedRunnable; import com.mongodb.internal.function.CheckedSupplier; import com.mongodb.lang.Nullable; -import org.jetbrains.annotations.NotNull; +import com.mongodb.lang.NonNull; import java.util.Arrays; import java.util.Collections; @@ -82,7 +82,7 @@ static Timeout nullAsInfinite(@Nullable final Timeout timeout) { * @param zeroSemantics what to interpret a 0 duration as (infinite or expired) * @return a timeout that expires in the specified duration after now. */ - @NotNull + @NonNull static Timeout expiresIn(final long duration, final TimeUnit unit, final ZeroSemantics zeroSemantics) { if (duration < 0) { throw new AssertionError("Timeouts must not be in the past"); diff --git a/driver-core/src/test/functional/com/mongodb/ClusterFixture.java b/driver-core/src/test/functional/com/mongodb/ClusterFixture.java index dde9682de8d..f0004cd9e03 100644 --- a/driver-core/src/test/functional/com/mongodb/ClusterFixture.java +++ b/driver-core/src/test/functional/com/mongodb/ClusterFixture.java @@ -261,7 +261,14 @@ static class ShutdownHook extends Thread { @Override public void run() { if (cluster != null) { - new DropDatabaseOperation(getDefaultDatabaseName(), WriteConcern.ACKNOWLEDGED).execute(getBinding()); + try { + new DropDatabaseOperation(getDefaultDatabaseName(), WriteConcern.ACKNOWLEDGED).execute(getBinding()); + } catch (MongoCommandException e) { + // if we do not have permission to drop the database, assume it is cleaned up in some other way + if (!e.getMessage().contains("Command dropDatabase requires authentication")) { + throw e; + } + } cluster.close(); } } diff --git a/driver-core/src/test/functional/com/mongodb/client/WithWrapper.java b/driver-core/src/test/functional/com/mongodb/client/WithWrapper.java index 6484f642a1a..e610f578112 100644 --- a/driver-core/src/test/functional/com/mongodb/client/WithWrapper.java +++ b/driver-core/src/test/functional/com/mongodb/client/WithWrapper.java @@ -16,13 +16,11 @@ package com.mongodb.client; +import com.mongodb.internal.connection.FaasEnvironmentAccessor; import com.mongodb.lang.Nullable; -import java.lang.reflect.Field; import java.util.Map; -import static java.lang.System.getenv; - @FunctionalInterface public interface WithWrapper { @@ -34,7 +32,7 @@ static WithWrapper withWrapper() { default WithWrapper withEnvironmentVariable(final String name, @Nullable final String value) { return runnable -> { - Map innerMap = getEnvInnerMap(); + Map innerMap = FaasEnvironmentAccessor.getFaasEnvMap(); String original = innerMap.get(name); if (value == null) { innerMap.remove(name); @@ -65,16 +63,4 @@ default WithWrapper withSystemProperty(final String name, final String value) { }; } - static Map getEnvInnerMap() { - try { - Map env = getenv(); - Field field = env.getClass().getDeclaredField("m"); - field.setAccessible(true); - @SuppressWarnings("unchecked") - Map result = (Map) field.get(env); - return result; - } catch (IllegalAccessException | NoSuchFieldException e) { - throw new RuntimeException(e); - } - } } diff --git a/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesBinaryVectorSearchIntegrationTest.java b/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesBinaryVectorSearchIntegrationTest.java index 0d5aad1085a..a242367992f 100644 --- a/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesBinaryVectorSearchIntegrationTest.java +++ b/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesBinaryVectorSearchIntegrationTest.java @@ -22,9 +22,9 @@ import com.mongodb.client.model.SearchIndexType; import com.mongodb.client.test.CollectionHelper; import com.mongodb.internal.operation.SearchIndexRequest; +import org.bson.BinaryVector; import org.bson.BsonDocument; import org.bson.Document; -import org.bson.BinaryVector; import org.bson.codecs.DocumentCodec; import org.bson.conversions.Bson; import org.junit.jupiter.api.AfterAll; diff --git a/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesSearchIntegrationTest.java b/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesSearchIntegrationTest.java index 29de80dda32..bc34cb0060c 100644 --- a/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesSearchIntegrationTest.java +++ b/driver-core/src/test/functional/com/mongodb/client/model/search/AggregatesSearchIntegrationTest.java @@ -23,6 +23,7 @@ import com.mongodb.client.model.geojson.Position; import com.mongodb.client.test.CollectionHelper; import org.bson.BsonDocument; +import org.bson.BsonString; import org.bson.codecs.BsonDocumentCodec; import org.bson.conversions.Bson; import org.bson.json.JsonWriterSettings; @@ -79,10 +80,17 @@ import static com.mongodb.client.model.search.SearchOperator.autocomplete; import static com.mongodb.client.model.search.SearchOperator.compound; import static com.mongodb.client.model.search.SearchOperator.dateRange; +import static com.mongodb.client.model.search.SearchOperator.equalsNull; import static com.mongodb.client.model.search.SearchOperator.exists; +import static com.mongodb.client.model.search.SearchOperator.in; +import static com.mongodb.client.model.search.SearchOperator.moreLikeThis; import static com.mongodb.client.model.search.SearchOperator.near; import static com.mongodb.client.model.search.SearchOperator.numberRange; +import static com.mongodb.client.model.search.SearchOperator.queryString; +import static com.mongodb.client.model.search.SearchOperator.regex; +import static com.mongodb.client.model.search.SearchOperator.phrase; import static com.mongodb.client.model.search.SearchOperator.text; +import static com.mongodb.client.model.search.SearchOperator.wildcard; import static com.mongodb.client.model.search.SearchOptions.searchOptions; import static com.mongodb.client.model.search.SearchPath.fieldPath; import static com.mongodb.client.model.search.SearchPath.wildcardPath; @@ -221,6 +229,9 @@ * * * + *

+ * Use this class when needing to test against MFLIX specifically. Otherwise, + * see AggregatesSearchTest. */ final class AggregatesSearchIntegrationTest { private static final MongoNamespace MFLIX_MOVIES_NS = new MongoNamespace("sample_mflix", "movies"); @@ -608,7 +619,18 @@ private static Stream searchAndSearchMetaArgs() { dateRange(fieldPath("fieldName6")) .lte(Instant.ofEpochMilli(1)), near(0, 1.5, fieldPath("fieldName7"), fieldPath("fieldName8")), - near(Instant.ofEpochMilli(1), Duration.ofMillis(3), fieldPath("fieldName9")) + near(Instant.ofEpochMilli(1), Duration.ofMillis(3), fieldPath("fieldName9")), + phrase(fieldPath("fieldName10"), "term6"), + in(fieldPath("fieldName10"), true), + in(fieldPath("fieldName11"), "term4", "term5"), + regex(fieldPath("title").multi("keyword"), "term7"), + queryString(fieldPath("fieldName12"), "term8"), + moreLikeThis(new BsonDocument("like", new BsonDocument("fieldName10", + new BsonString("term6")))), + wildcard(asList("term10", "term11"), asList(wildcardPath("wildc*rd"), fieldPath("title").multi( + "keyword"))), + SearchOperator.equals(fieldPath("fieldName11"), "term7"), + equalsNull(fieldPath("fieldName12")) )) .minimumShouldMatch(1) .mustNot(singleton( diff --git a/driver-core/src/test/functional/com/mongodb/internal/connection/FaasEnvironmentAccessor.java b/driver-core/src/test/functional/com/mongodb/internal/connection/FaasEnvironmentAccessor.java new file mode 100644 index 00000000000..ccc71f718ba --- /dev/null +++ b/driver-core/src/test/functional/com/mongodb/internal/connection/FaasEnvironmentAccessor.java @@ -0,0 +1,31 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.internal.connection; + +import java.util.Map; + +/** + * In the same package as FaasEnvironment, to access package-private + */ +public final class FaasEnvironmentAccessor { + private FaasEnvironmentAccessor() { + } + + public static Map getFaasEnvMap() { + return FaasEnvironment.ENV_OVERRIDES_FOR_TESTING; + } +} diff --git a/driver-core/src/test/resources/auth/legacy/connection-string.json b/driver-core/src/test/resources/auth/legacy/connection-string.json index f8b0f9426c1..dfed11656d4 100644 --- a/driver-core/src/test/resources/auth/legacy/connection-string.json +++ b/driver-core/src/test/resources/auth/legacy/connection-string.json @@ -481,7 +481,7 @@ }, { "description": "should throw an exception if username is specified for test (MONGODB-OIDC)", - "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&ENVIRONMENT:test", + "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test", "valid": false, "credential": null }, @@ -631,6 +631,26 @@ "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp", "valid": false, "credential": null + }, + { + "description": "should recognise the mechanism with k8s provider (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:k8s", + "valid": true, + "credential": { + "username": null, + "password": null, + "source": "$external", + "mechanism": "MONGODB-OIDC", + "mechanism_properties": { + "ENVIRONMENT": "k8s" + } + } + }, + { + "description": "should throw an error for a username and password with k8s provider (MONGODB-OIDC)", + "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:k8s", + "valid": false, + "credential": null } ] } diff --git a/driver-core/src/test/resources/client-side-encryption-data/lookup/key-doc.json b/driver-core/src/test/resources/client-side-encryption-data/lookup/key-doc.json new file mode 100644 index 00000000000..566b56c354f --- /dev/null +++ b/driver-core/src/test/resources/client-side-encryption-data/lookup/key-doc.json @@ -0,0 +1,30 @@ +{ + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } +} diff --git a/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-csfle.json b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-csfle.json new file mode 100644 index 00000000000..29ac9ad5da4 --- /dev/null +++ b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-csfle.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-csfle2.json b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-csfle2.json new file mode 100644 index 00000000000..3f1c02781c5 --- /dev/null +++ b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-csfle2.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle2": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-qe.json b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-qe.json new file mode 100644 index 00000000000..9428ea1b458 --- /dev/null +++ b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-qe.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe.esc", + "ecocCollection": "enxcol_.qe.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-qe2.json b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-qe2.json new file mode 100644 index 00000000000..77d5bd37cbb --- /dev/null +++ b/driver-core/src/test/resources/client-side-encryption-data/lookup/schema-qe2.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe2.esc", + "ecocCollection": "enxcol_.qe2.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe2", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/driver-core/src/test/resources/server-discovery-and-monitoring/rs/secondary_ipv6_literal.json b/driver-core/src/test/resources/server-discovery-and-monitoring/rs/secondary_ipv6_literal.json new file mode 100644 index 00000000000..c23d8dc4c95 --- /dev/null +++ b/driver-core/src/test/resources/server-discovery-and-monitoring/rs/secondary_ipv6_literal.json @@ -0,0 +1,38 @@ +{ + "description": "Secondary with IPv6 literal", + "uri": "mongodb://[::1]/?replicaSet=rs", + "phases": [ + { + "responses": [ + [ + "[::1]:27017", + { + "ok": 1, + "helloOk": true, + "isWritablePrimary": false, + "secondary": true, + "setName": "rs", + "me": "[::1]:27017", + "hosts": [ + "[::1]:27017" + ], + "minWireVersion": 0, + "maxWireVersion": 26 + } + ] + ], + "outcome": { + "servers": { + "[::1]:27017": { + "type": "RSSecondary", + "setName": "rs" + } + }, + "topologyType": "ReplicaSetNoPrimary", + "setName": "rs", + "logicalSessionTimeoutMinutes": null, + "compatible": true + } + } + ] +} diff --git a/driver-core/src/test/resources/unified-test-format/crud/client-bulkWrite-replaceOne-sort.json b/driver-core/src/test/resources/unified-test-format/crud/client-bulkWrite-replaceOne-sort.json new file mode 100644 index 00000000000..b86bc5f9429 --- /dev/null +++ b/driver-core/src/test/resources/unified-test-format/crud/client-bulkWrite-replaceOne-sort.json @@ -0,0 +1,163 @@ +{ + "description": "client bulkWrite updateOne-sort", + "schemaVersion": "1.4", + "runOnRequirements": [ + { + "minServerVersion": "8.0", + "serverless": "forbid" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "_yamlAnchors": { + "namespace": "crud-tests.coll0" + }, + "tests": [ + { + "description": "client bulkWrite replaceOne with sort option", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "replaceOne": { + "namespace": "crud-tests.coll0", + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "replacement": { + "x": 1 + } + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "bulkWrite", + "databaseName": "admin", + "command": { + "bulkWrite": 1, + "ops": [ + { + "update": 0, + "filter": { + "_id": { + "$gt": 1 + } + }, + "updateMods": { + "x": 1 + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "nsInfo": [ + { + "ns": "crud-tests.coll0" + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "nErrors": 0, + "nMatched": 1, + "nModified": 1 + }, + "commandName": "bulkWrite" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 1 + } + ] + } + ] + } + ] +} diff --git a/driver-core/src/test/resources/unified-test-format/crud/client-bulkWrite-updateOne-sort.json b/driver-core/src/test/resources/unified-test-format/crud/client-bulkWrite-updateOne-sort.json new file mode 100644 index 00000000000..ef75dcb3741 --- /dev/null +++ b/driver-core/src/test/resources/unified-test-format/crud/client-bulkWrite-updateOne-sort.json @@ -0,0 +1,167 @@ +{ + "description": "client bulkWrite updateOne-sort", + "schemaVersion": "1.4", + "runOnRequirements": [ + { + "minServerVersion": "8.0", + "serverless": "forbid" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "_yamlAnchors": { + "namespace": "crud-tests.coll0" + }, + "tests": [ + { + "description": "client bulkWrite updateOne with sort option", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "updateOne": { + "namespace": "crud-tests.coll0", + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "update": { + "$inc": { + "x": 1 + } + } + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "bulkWrite", + "databaseName": "admin", + "command": { + "bulkWrite": 1, + "ops": [ + { + "update": 0, + "filter": { + "_id": { + "$gt": 1 + } + }, + "updateMods": { + "$inc": { + "x": 1 + } + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "nsInfo": [ + { + "ns": "crud-tests.coll0" + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "nErrors": 0, + "nMatched": 1, + "nModified": 1 + }, + "commandName": "bulkWrite" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 34 + } + ] + } + ] + } + ] +} diff --git a/driver-core/src/test/resources/unified-test-format/load-balancers/transactions.json b/driver-core/src/test/resources/unified-test-format/load-balancers/transactions.json index 0dd04ee8540..ca9c1452179 100644 --- a/driver-core/src/test/resources/unified-test-format/load-balancers/transactions.json +++ b/driver-core/src/test/resources/unified-test-format/load-balancers/transactions.json @@ -1616,6 +1616,50 @@ ] } ] + }, + { + "description": "pinned connection is released when session ended", + "operations": [ + { + "name": "startTransaction", + "object": "session0" + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "x": 1 + }, + "session": "session0" + } + }, + { + "name": "commitTransaction", + "object": "session0" + }, + { + "name": "endSession", + "object": "session0" + } + ], + "expectEvents": [ + { + "client": "client0", + "eventType": "cmap", + "events": [ + { + "connectionReadyEvent": {} + }, + { + "connectionCheckedOutEvent": {} + }, + { + "connectionCheckedInEvent": {} + } + ] + } + ] } ] } diff --git a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientDeleteOptionsTest.java b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientDeleteOptionsTest.java index e9832c24b21..fdcba01c2d3 100644 --- a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientDeleteOptionsTest.java +++ b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientDeleteOptionsTest.java @@ -16,7 +16,7 @@ package com.mongodb.client.model.bulk; -import com.mongodb.MongoBaseInterfaceAssertions; +import com.mongodb.testing.MongoBaseInterfaceAssertions; import org.junit.jupiter.api.Test; class BaseClientDeleteOptionsTest { diff --git a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpdateOptionsTest.java b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpdateOptionsTest.java index 43ba8e0967e..c9131452063 100644 --- a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpdateOptionsTest.java +++ b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpdateOptionsTest.java @@ -16,7 +16,7 @@ package com.mongodb.client.model.bulk; -import com.mongodb.MongoBaseInterfaceAssertions; +import com.mongodb.testing.MongoBaseInterfaceAssertions; import org.junit.jupiter.api.Test; class BaseClientUpdateOptionsTest { diff --git a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpsertableWriteModelOptionsTest.java b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpsertableWriteModelOptionsTest.java index 5992a508574..8fecf8d14fd 100644 --- a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpsertableWriteModelOptionsTest.java +++ b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientUpsertableWriteModelOptionsTest.java @@ -16,7 +16,7 @@ package com.mongodb.client.model.bulk; -import com.mongodb.MongoBaseInterfaceAssertions; +import com.mongodb.testing.MongoBaseInterfaceAssertions; import org.junit.jupiter.api.Test; final class BaseClientUpsertableWriteModelOptionsTest { diff --git a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientWriteModelOptionsTest.java b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientWriteModelOptionsTest.java index 66fec81632e..17b3803727a 100644 --- a/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientWriteModelOptionsTest.java +++ b/driver-core/src/test/unit/com/mongodb/client/model/bulk/BaseClientWriteModelOptionsTest.java @@ -16,7 +16,7 @@ package com.mongodb.client.model.bulk; -import com.mongodb.MongoBaseInterfaceAssertions; +import com.mongodb.testing.MongoBaseInterfaceAssertions; import org.junit.jupiter.api.Test; final class BaseClientWriteModelOptionsTest { diff --git a/driver-core/src/test/unit/com/mongodb/client/model/search/SearchOperatorTest.java b/driver-core/src/test/unit/com/mongodb/client/model/search/SearchOperatorTest.java index c0ea645fb73..ccf5a44cd1f 100644 --- a/driver-core/src/test/unit/com/mongodb/client/model/search/SearchOperatorTest.java +++ b/driver-core/src/test/unit/com/mongodb/client/model/search/SearchOperatorTest.java @@ -19,17 +19,26 @@ import com.mongodb.client.model.geojson.Point; import com.mongodb.client.model.geojson.Position; import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; +import org.bson.BsonNull; +import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.Document; +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistries; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; import org.junit.jupiter.api.Test; import java.time.Duration; import java.time.Instant; +import java.util.UUID; import static com.mongodb.client.model.search.FuzzySearchOptions.fuzzySearchOptions; import static com.mongodb.client.model.search.SearchPath.fieldPath; @@ -581,6 +590,418 @@ void near() { ); } + @Test + void in() { + ObjectId objectId = new ObjectId(); + UUID uuid = UUID.randomUUID(); + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // paths must not be empty + SearchOperator.in(null, true) + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonBoolean(true)) + ), + SearchOperator.in(fieldPath("fieldName1"), true) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonArray(asList(new BsonBoolean(true), new BsonBoolean(false)))) + ), + SearchOperator.in(fieldPath("fieldName1"), asList(true, false)) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonObjectId(objectId)) + ), + SearchOperator.in(fieldPath("fieldName1"), objectId) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonInt32(1)) + ), + SearchOperator.in(fieldPath("fieldName1"), 1) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonInt64(Long.MAX_VALUE)) + ), + SearchOperator.in(fieldPath("fieldName1"), Long.MAX_VALUE) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonDouble(Double.MAX_VALUE)) + ), + SearchOperator.in(fieldPath("fieldName1"), Double.MAX_VALUE) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonDateTime(Instant.EPOCH.toEpochMilli())) + ), + SearchOperator.in(fieldPath("fieldName1"), Instant.EPOCH) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonBinary(uuid)) + ), + SearchOperator.in(fieldPath("fieldName1"), uuid) + .toBsonDocument( + Document.class, + CodecRegistries.withUuidRepresentation(Bson.DEFAULT_CODEC_REGISTRY, UuidRepresentation.STANDARD)) + ), + () -> assertEquals( + new BsonDocument("in", + new BsonDocument("path", fieldPath("fieldName1").toBsonValue()) + .append("value", new BsonString("value")) + ), + SearchOperator.in(fieldPath("fieldName1"), "value") + .toBsonDocument() + ) + ); + } + + @Test + void equals() { + ObjectId objectId = new ObjectId(); + UUID uuid = UUID.randomUUID(); + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // path must not be null + SearchOperator.equals(null, "term") + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonBoolean(true)) + ), + SearchOperator.equals( + fieldPath("fieldName"), + true) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonObjectId(objectId)) + ), + SearchOperator.equals( + fieldPath("fieldName"), + objectId) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonInt32(1)) + ), + SearchOperator.equals( + fieldPath("fieldName"), + 1) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonInt64(Long.MAX_VALUE)) + ), + SearchOperator.equals( + fieldPath("fieldName"), + Long.MAX_VALUE) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonDouble(Double.MAX_VALUE)) + ), + SearchOperator.equals( + fieldPath("fieldName"), + Double.MAX_VALUE) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonDateTime(Instant.EPOCH.toEpochMilli())) + ), + SearchOperator.equals( + fieldPath("fieldName"), + Instant.EPOCH) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonString("term")) + ), + SearchOperator.equals( + fieldPath("fieldName"), + "term") + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", new BsonBinary(uuid)) + ), + SearchOperator.equals( + fieldPath("fieldName"), + uuid) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("equals", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("value", BsonNull.VALUE) + ), + SearchOperator.equalsNull(fieldPath("fieldName")) + .toBsonDocument() + ) + ); + } + + @Test + void moreLikeThis() { + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // likes must not be empty + SearchOperator.moreLikeThis(emptyList()) + ), + () -> assertEquals( + new BsonDocument("moreLikeThis", + new BsonDocument("like", new BsonDocument("fieldName", new BsonString("fieldValue"))) + ), + SearchOperator.moreLikeThis(new BsonDocument("fieldName", new BsonString("fieldValue"))) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("moreLikeThis", + new BsonDocument("like", new BsonDocument("fieldName", new BsonString("fieldValue")) + .append("fieldName2", new BsonString("fieldValue2"))) + ), + SearchOperator.moreLikeThis(new BsonDocument("fieldName", new BsonString("fieldValue")) + .append("fieldName2", new BsonString("fieldValue2"))) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("moreLikeThis", + new BsonDocument("like", new BsonArray(asList( + new BsonDocument("fieldName", new BsonString("fieldValue")) + .append("fieldName2", new BsonString("fieldValue2")), + new BsonDocument("fieldName3", new BsonString("fieldValue3")) + ))) + ), + SearchOperator.moreLikeThis(asList( + new BsonDocument("fieldName", new BsonString("fieldValue")) + .append("fieldName2", new BsonString("fieldValue2")), + new BsonDocument("fieldName3", new BsonString("fieldValue3")))) + .toBsonDocument() + ) + ); + } + + @Test + void wildcard() { + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // queries must not be empty + SearchOperator.wildcard(emptyList(), singleton(fieldPath("fieldName"))) + ), + () -> assertThrows(IllegalArgumentException.class, () -> + // paths must not be empty + SearchOperator.wildcard(singleton("term"), emptyList()) + ), + () -> assertEquals( + new BsonDocument("wildcard", + new BsonDocument("query", new BsonString("term")) + .append("path", fieldPath("fieldName").toBsonValue()) + ), + SearchOperator.wildcard( + fieldPath("fieldName"), "term" + ) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("wildcard", + new BsonDocument("query", new BsonArray(asList( + new BsonString("term1"), + new BsonString("term2")))) + .append("path", new BsonArray(asList( + fieldPath("fieldName").toBsonValue(), + wildcardPath("wildc*rd").toBsonValue()))) + ), + SearchOperator.wildcard( + asList( + "term1", + "term2"), + asList( + fieldPath("fieldName"), + wildcardPath("wildc*rd"))) + .toBsonDocument() + ) + ); + } + + @Test + void queryString() { + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // queries must not be empty + SearchOperator.queryString(fieldPath("fieldName"), null) + ), + () -> assertThrows(IllegalArgumentException.class, () -> + // paths must not be empty + SearchOperator.queryString(null, "term1 AND (term2 OR term3)") + ), + () -> assertEquals( + new BsonDocument("queryString", + new BsonDocument("defaultPath", fieldPath("fieldName").toBsonValue()) + .append("query", new BsonString("term1 AND (term2 OR term3)")) + ), + SearchOperator.queryString( + fieldPath("fieldName"), + "term1 AND (term2 OR term3)") + .toBsonDocument() + ) + ); + } + + @Test + void phrase() { + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // queries must not be empty + SearchOperator.phrase(singleton(fieldPath("fieldName")), emptyList()) + ), + () -> assertThrows(IllegalArgumentException.class, () -> + // paths must not be empty + SearchOperator.phrase(emptyList(), singleton("term")) + ), + () -> assertEquals( + new BsonDocument("phrase", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("query", new BsonString("term")) + ), + SearchOperator.phrase( + fieldPath("fieldName"), + "term") + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("phrase", + new BsonDocument("path", new BsonArray(asList( + fieldPath("fieldName").toBsonValue(), + wildcardPath("wildc*rd").toBsonValue()))) + .append("query", new BsonArray(asList( + new BsonString("term1"), + new BsonString("term2")))) + ), + SearchOperator.phrase( + asList( + fieldPath("fieldName"), + wildcardPath("wildc*rd")), + asList( + "term1", + "term2")) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("phrase", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("query", new BsonString("term")) + .append("synonyms", new BsonString("synonymMappingName")) + ), + SearchOperator.phrase( + singleton(fieldPath("fieldName")), + singleton("term")) + .synonyms("synonymMappingName") + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("phrase", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("query", new BsonString("term")) + .append("synonyms", new BsonString("synonymMappingName")) + .append("slop", new BsonInt32(5)) + ), + SearchOperator.phrase( + singleton(fieldPath("fieldName")), + singleton("term")) + .synonyms("synonymMappingName") + .slop(5) + .toBsonDocument() + ) + ); + } + + @Test + void regex() { + assertAll( + () -> assertThrows(IllegalArgumentException.class, () -> + // queries must not be empty + SearchOperator.regex(singleton(fieldPath("fieldName")), emptyList()) + ), + () -> assertThrows(IllegalArgumentException.class, () -> + // paths must not be empty + SearchOperator.regex(emptyList(), singleton("term")) + ), + () -> assertEquals( + new BsonDocument("regex", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("query", new BsonString("term")) + ), + SearchOperator.regex( + fieldPath("fieldName"), + "term") + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("regex", + new BsonDocument("path", fieldPath("fieldName").toBsonValue()) + .append("query", new BsonString("term")) + ), + SearchOperator.regex( + singleton(fieldPath("fieldName")), + singleton("term")) + .toBsonDocument() + ), + () -> assertEquals( + new BsonDocument("regex", + new BsonDocument("path", new BsonArray(asList( + fieldPath("fieldName").toBsonValue(), + wildcardPath("wildc*rd").toBsonValue()))) + .append("query", new BsonArray(asList( + new BsonString("term1"), + new BsonString("term2")))) + ), + SearchOperator.regex( + asList( + fieldPath("fieldName"), + wildcardPath("wildc*rd")), + asList( + "term1", + "term2")) + .toBsonDocument() + ) + ); + } + private static SearchOperator docExamplePredefined() { return SearchOperator.exists( fieldPath("fieldName")); diff --git a/driver-core/src/test/unit/com/mongodb/internal/async/SameThreadAsyncFunctionsTest.java b/driver-core/src/test/unit/com/mongodb/internal/async/SameThreadAsyncFunctionsTest.java index 04b9290af55..7dd4c0f37ac 100644 --- a/driver-core/src/test/unit/com/mongodb/internal/async/SameThreadAsyncFunctionsTest.java +++ b/driver-core/src/test/unit/com/mongodb/internal/async/SameThreadAsyncFunctionsTest.java @@ -16,7 +16,7 @@ package com.mongodb.internal.async; -import org.jetbrains.annotations.NotNull; +import com.mongodb.lang.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; @@ -62,7 +62,7 @@ void testInvalid() { private static class SameThreadExecutorService extends AbstractExecutorService { @Override - public void execute(@NotNull final Runnable command) { + public void execute(@NonNull final Runnable command) { command.run(); } @@ -70,7 +70,7 @@ public void execute(@NotNull final Runnable command) { public void shutdown() { } - @NotNull + @NonNull @Override public List shutdownNow() { return Collections.emptyList(); @@ -87,7 +87,7 @@ public boolean isTerminated() { } @Override - public boolean awaitTermination(final long timeout, @NotNull final TimeUnit unit) { + public boolean awaitTermination(final long timeout, @NonNull final TimeUnit unit) { return true; } } diff --git a/driver-core/src/test/unit/com/mongodb/internal/connection/SrvPollingProseTests.java b/driver-core/src/test/unit/com/mongodb/internal/connection/SrvPollingProseTests.java index a6605725cf8..a0f08a82360 100644 --- a/driver-core/src/test/unit/com/mongodb/internal/connection/SrvPollingProseTests.java +++ b/driver-core/src/test/unit/com/mongodb/internal/connection/SrvPollingProseTests.java @@ -160,9 +160,10 @@ public void shouldUseAllRecordsWhenSrvMaxHostsIsGreaterThanOrEqualToNumSrvRecord public void shouldUseSrvMaxHostsWhenSrvMaxHostsIsLessThanNumSrvRecords() { int srvMaxHosts = 2; List updatedHosts = asList(firstHost, thirdHost, fourthHost); - initCluster(updatedHosts, srvMaxHosts); + assertEquals(srvMaxHosts, clusterHostsSet().size()); + assertTrue(updatedHosts.contains(firstHost)); assertTrue(updatedHosts.containsAll(clusterHostsSet())); } diff --git a/driver-core/src/test/unit/com/mongodb/testing/MongoAssertions.java b/driver-core/src/test/unit/com/mongodb/testing/MongoAssertions.java new file mode 100644 index 00000000000..8f1bbf8df67 --- /dev/null +++ b/driver-core/src/test/unit/com/mongodb/testing/MongoAssertions.java @@ -0,0 +1,43 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.testing; + +import org.junit.jupiter.api.function.Executable; +import org.opentest4j.AssertionFailedError; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +public final class MongoAssertions { + + private MongoAssertions() { + //NOP + } + + public static void assertCause( + final Class expectedCause, final String expectedMessageFragment, final Executable e) { + Throwable cause = assertThrows(Throwable.class, e); + while (cause.getCause() != null) { + cause = cause.getCause(); + } + if (!cause.getMessage().contains(expectedMessageFragment)) { + throw new AssertionFailedError("Unexpected message: " + cause.getMessage(), cause); + } + if (!expectedCause.isInstance(cause)) { + throw new AssertionFailedError("Unexpected cause: " + cause.getClass(), assertThrows(Throwable.class, e)); + } + } +} diff --git a/driver-core/src/test/unit/com/mongodb/MongoBaseInterfaceAssertions.java b/driver-core/src/test/unit/com/mongodb/testing/MongoBaseInterfaceAssertions.java similarity index 98% rename from driver-core/src/test/unit/com/mongodb/MongoBaseInterfaceAssertions.java rename to driver-core/src/test/unit/com/mongodb/testing/MongoBaseInterfaceAssertions.java index 93f784b0506..0c0fe913123 100644 --- a/driver-core/src/test/unit/com/mongodb/MongoBaseInterfaceAssertions.java +++ b/driver-core/src/test/unit/com/mongodb/testing/MongoBaseInterfaceAssertions.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.mongodb; +package com.mongodb.testing; import org.reflections.Reflections; diff --git a/driver-kotlin-coroutine/src/integration/kotlin/com/mongodb/kotlin/client/coroutine/UnifiedCrudTest.kt b/driver-kotlin-coroutine/src/integration/kotlin/com/mongodb/kotlin/client/coroutine/UnifiedCrudTest.kt index 036ec5afcc4..5091058573e 100644 --- a/driver-kotlin-coroutine/src/integration/kotlin/com/mongodb/kotlin/client/coroutine/UnifiedCrudTest.kt +++ b/driver-kotlin-coroutine/src/integration/kotlin/com/mongodb/kotlin/client/coroutine/UnifiedCrudTest.kt @@ -24,7 +24,7 @@ internal class UnifiedCrudTest() : UnifiedTest() { @JvmStatic @Throws(URISyntaxException::class, IOException::class) fun data(): Collection? { - return getTestData("unified-test-format/crud") + return getTestData("unified-test-format/crud", true) } } } diff --git a/driver-kotlin-extensions/build.gradle.kts b/driver-kotlin-extensions/build.gradle.kts index 25b437e0fad..76f36ca33b2 100644 --- a/driver-kotlin-extensions/build.gradle.kts +++ b/driver-kotlin-extensions/build.gradle.kts @@ -165,4 +165,6 @@ tasks.javadocJar.configure { // =========================== tasks.sourcesJar { from(project.sourceSets.main.map { it.kotlin }) } -afterEvaluate { tasks.jar { manifest { attributes["Automatic-Module-Name"] = "org.mongodb.driver.kotlin.core" } } } +afterEvaluate { + tasks.jar { manifest { attributes["Automatic-Module-Name"] = "org.mongodb.driver.kotlin.extensions" } } +} diff --git a/driver-kotlin-extensions/src/main/kotlin/com/mongodb/kotlin/client/model/Properties.kt b/driver-kotlin-extensions/src/main/kotlin/com/mongodb/kotlin/client/model/Properties.kt index b35b44d4d93..fc8a4e94e87 100644 --- a/driver-kotlin-extensions/src/main/kotlin/com/mongodb/kotlin/client/model/Properties.kt +++ b/driver-kotlin-extensions/src/main/kotlin/com/mongodb/kotlin/client/model/Properties.kt @@ -67,7 +67,7 @@ public operator fun KProperty1?>.div( * - BsonProperty annotation * - Property name */ -internal fun KProperty.path(): String { +public fun KProperty.path(): String { return if (this is KPropertyPath<*, T>) { this.name } else { diff --git a/driver-kotlin-sync/src/integration/kotlin/com/mongodb/kotlin/client/UnifiedCrudTest.kt b/driver-kotlin-sync/src/integration/kotlin/com/mongodb/kotlin/client/UnifiedCrudTest.kt index eb06f5c1875..f030cb54645 100644 --- a/driver-kotlin-sync/src/integration/kotlin/com/mongodb/kotlin/client/UnifiedCrudTest.kt +++ b/driver-kotlin-sync/src/integration/kotlin/com/mongodb/kotlin/client/UnifiedCrudTest.kt @@ -24,7 +24,7 @@ internal class UnifiedCrudTest() : UnifiedTest() { @JvmStatic @Throws(URISyntaxException::class, IOException::class) fun data(): Collection? { - return getTestData("unified-test-format/crud") + return getTestData("unified-test-format/crud", false) } } } diff --git a/driver-lambda/template.yaml b/driver-lambda/template.yaml index 7a53bb20272..9441f804f20 100644 --- a/driver-lambda/template.yaml +++ b/driver-lambda/template.yaml @@ -37,26 +37,6 @@ Resources: Properties: Path: /mongodb Method: get - ApplicationResourceGroup: - Type: AWS::ResourceGroups::Group - Properties: - Name: - Fn::Join: - - '' - - - ApplicationInsights-SAM- - - Ref: AWS::StackName - ResourceQuery: - Type: CLOUDFORMATION_STACK_1_0 - ApplicationInsightsMonitoring: - Type: AWS::ApplicationInsights::Application - Properties: - ResourceGroupName: - Fn::Join: - - '' - - - ApplicationInsights-SAM- - - Ref: AWS::StackName - AutoConfigurationEnabled: true - DependsOn: ApplicationResourceGroup Outputs: LambdaTestApi: Description: API Gateway endpoint URL for Prod stage for Lambda Test function diff --git a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/CollectionInfoRetriever.java b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/CollectionInfoRetriever.java index 08df35c00f0..786055b1886 100644 --- a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/CollectionInfoRetriever.java +++ b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/CollectionInfoRetriever.java @@ -20,7 +20,7 @@ import com.mongodb.lang.Nullable; import com.mongodb.reactivestreams.client.MongoClient; import org.bson.BsonDocument; -import reactor.core.publisher.Mono; +import reactor.core.publisher.Flux; import static com.mongodb.assertions.Assertions.notNull; import static com.mongodb.reactivestreams.client.internal.TimeoutHelper.databaseWithTimeoutDeferred; @@ -35,8 +35,8 @@ class CollectionInfoRetriever { this.client = notNull("client", client); } - public Mono filter(final String databaseName, final BsonDocument filter, @Nullable final Timeout operationTimeout) { + public Flux filter(final String databaseName, final BsonDocument filter, @Nullable final Timeout operationTimeout) { return databaseWithTimeoutDeferred(client.getDatabase(databaseName), TIMEOUT_ERROR_MESSAGE, operationTimeout) - .flatMap(database -> Mono.from(database.listCollections(BsonDocument.class).filter(filter).first())); + .flatMapMany(database -> Flux.from(database.listCollections(BsonDocument.class).filter(filter))); } } diff --git a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/Crypt.java b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/Crypt.java index 17d82e32c49..61ccaa320fe 100644 --- a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/Crypt.java +++ b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/Crypt.java @@ -304,10 +304,8 @@ private void collInfo(final MongoCryptContext cryptContext, } else { collectionInfoRetriever.filter(databaseName, cryptContext.getMongoOperation(), operationTimeout) .contextWrite(sink.contextView()) - .doOnSuccess(result -> { - if (result != null) { - cryptContext.addMongoOperationResult(result); - } + .doOnNext(result -> cryptContext.addMongoOperationResult(result)) + .doOnComplete(() -> { cryptContext.completeMongoOperation(); executeStateMachineWithSink(cryptContext, databaseName, sink, operationTimeout); }) diff --git a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/KeyManagementService.java b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/KeyManagementService.java index 019445e6cde..b82dd590618 100644 --- a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/KeyManagementService.java +++ b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/crypt/KeyManagementService.java @@ -39,7 +39,7 @@ import com.mongodb.lang.Nullable; import org.bson.ByteBuf; import org.bson.ByteBufNIO; -import org.jetbrains.annotations.NotNull; +import com.mongodb.lang.NonNull; import reactor.core.publisher.Mono; import reactor.core.publisher.MonoSink; @@ -179,7 +179,7 @@ private OperationContext createOperationContext(@Nullable final Timeout operatio return OperationContext.simpleOperationContext(new TimeoutContext(timeoutSettings)); } - @NotNull + @NonNull private static TimeoutSettings createTimeoutSettings(final SocketSettings socketSettings, @Nullable final Long ms) { return new TimeoutSettings( diff --git a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/gridfs/GridFSBucketImpl.java b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/gridfs/GridFSBucketImpl.java index 1e81db2045e..948c666489c 100644 --- a/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/gridfs/GridFSBucketImpl.java +++ b/driver-reactive-streams/src/main/com/mongodb/reactivestreams/client/internal/gridfs/GridFSBucketImpl.java @@ -208,7 +208,7 @@ public GridFSUploadPublisher uploadFromPublisher(final ClientSession clien final Publisher source, final GridFSUploadOptions options) { return createGridFSUploadPublisher(chunkSizeBytes, filesCollection, chunksCollection, - notNull("clientSession", clientSession), new BsonObjectId(), filename, options, source); + notNull("clientSession", clientSession), id, filename, options, source); } @Override diff --git a/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/ClientSideEncryption25LookupProseTests.java b/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/ClientSideEncryption25LookupProseTests.java new file mode 100644 index 00000000000..6cbd9b60e0d --- /dev/null +++ b/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/ClientSideEncryption25LookupProseTests.java @@ -0,0 +1,39 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.reactivestreams.client; + +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.MongoClient; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.reactivestreams.client.syncadapter.SyncClientEncryption; +import com.mongodb.reactivestreams.client.syncadapter.SyncMongoClient; +import com.mongodb.reactivestreams.client.vault.ClientEncryptions; + +public class ClientSideEncryption25LookupProseTests extends com.mongodb.client.ClientSideEncryption25LookupProseTests { + + @Override + protected MongoClient createMongoClient(final MongoClientSettings settings) { + return new SyncMongoClient(MongoClients.create(settings)); + } + + @Override + protected ClientEncryption createClientEncryption(final ClientEncryptionSettings settings) { + return new SyncClientEncryption(ClientEncryptions.create(settings)); + } + +} diff --git a/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/ClientSideOperationTimeoutTest.java b/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/ClientSideOperationTimeoutTest.java index 168ff4b8f81..a1063f05362 100644 --- a/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/ClientSideOperationTimeoutTest.java +++ b/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/ClientSideOperationTimeoutTest.java @@ -99,18 +99,25 @@ The Reactive Streams specification prevents us from allowing a subsequent next c @MethodSource("data") @Override public void shouldPassAllOutcomes( + final String testName, @Nullable final String fileDescription, @Nullable final String testDescription, @Nullable final String directoryName, + final int attemptNumber, + final int totalAttempts, final String schemaVersion, @Nullable final BsonArray runOnRequirements, final BsonArray entitiesArray, final BsonArray initialData, final BsonDocument definition) { try { - super.shouldPassAllOutcomes(fileDescription, + super.shouldPassAllOutcomes( + testName, + fileDescription, testDescription, directoryName, + attemptNumber, + totalAttempts, schemaVersion, runOnRequirements, entitiesArray, diff --git a/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/UnifiedReactiveStreamsTest.java b/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/UnifiedReactiveStreamsTest.java index 62c1315e240..28c8a27f8fa 100644 --- a/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/UnifiedReactiveStreamsTest.java +++ b/driver-reactive-streams/src/test/functional/com/mongodb/reactivestreams/client/unified/UnifiedReactiveStreamsTest.java @@ -24,6 +24,7 @@ import com.mongodb.client.unified.UnifiedTest; import com.mongodb.client.unified.UnifiedTestModifications; import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.lang.NonNull; import com.mongodb.reactivestreams.client.MongoClients; import com.mongodb.reactivestreams.client.gridfs.GridFSBuckets; import com.mongodb.reactivestreams.client.internal.vault.ClientEncryptionImpl; @@ -31,6 +32,11 @@ import com.mongodb.reactivestreams.client.syncadapter.SyncGridFSBucket; import com.mongodb.reactivestreams.client.syncadapter.SyncMongoClient; import com.mongodb.reactivestreams.client.syncadapter.SyncMongoDatabase; +import org.junit.jupiter.params.provider.Arguments; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.Collection; import static com.mongodb.client.unified.UnifiedTestModifications.Modifier; import static com.mongodb.client.unified.UnifiedTestModifications.TestDef; @@ -94,4 +100,9 @@ protected void postCleanUp(final TestDef testDef) { disableSleep(); } } + + @NonNull + protected static Collection getTestData(final String directory) throws URISyntaxException, IOException { + return getTestData(directory, true); + } } diff --git a/driver-scala/src/integration/scala/org/mongodb/scala/syncadapter/SyncMongoCluster.scala b/driver-scala/src/integration/scala/org/mongodb/scala/syncadapter/SyncMongoCluster.scala index 972831f197f..439188e3792 100644 --- a/driver-scala/src/integration/scala/org/mongodb/scala/syncadapter/SyncMongoCluster.scala +++ b/driver-scala/src/integration/scala/org/mongodb/scala/syncadapter/SyncMongoCluster.scala @@ -1,6 +1,5 @@ package org.mongodb.scala.syncadapter -import com.mongodb.assertions.Assertions import com.mongodb.client.model.bulk.{ ClientBulkWriteOptions, ClientBulkWriteResult, ClientNamespacedWriteModel } import com.mongodb.{ ClientSessionOptions, ReadConcern, ReadPreference, WriteConcern } import com.mongodb.client.{ ClientSession, MongoCluster => JMongoCluster, MongoDatabase => JMongoDatabase } @@ -129,33 +128,21 @@ class SyncMongoCluster(wrapped: MongoCluster) extends JMongoCluster { override def bulkWrite( models: util.List[_ <: ClientNamespacedWriteModel] - ): ClientBulkWriteResult = { - org.junit.Assume.assumeTrue("TODO-JAVA-5531 implement", java.lang.Boolean.parseBoolean(toString)) - throw Assertions.fail("TODO-JAVA-5531 implement") - } + ): ClientBulkWriteResult = wrapped.bulkWrite(models.asScala.toList).toFuture().get() override def bulkWrite( models: util.List[_ <: ClientNamespacedWriteModel], options: ClientBulkWriteOptions - ): ClientBulkWriteResult = { - org.junit.Assume.assumeTrue("TODO-JAVA-5531 implement", java.lang.Boolean.parseBoolean(toString)) - throw Assertions.fail("TODO-JAVA-5531 implement") - } + ): ClientBulkWriteResult = wrapped.bulkWrite(models.asScala.toList, options).toFuture().get() override def bulkWrite( clientSession: ClientSession, models: util.List[_ <: ClientNamespacedWriteModel] - ): ClientBulkWriteResult = { - org.junit.Assume.assumeTrue("TODO-JAVA-5531 implement", java.lang.Boolean.parseBoolean(toString)) - throw Assertions.fail("TODO-JAVA-5531 implement") - } + ): ClientBulkWriteResult = wrapped.bulkWrite(unwrap(clientSession), models.asScala.toList).toFuture().get() override def bulkWrite( clientSession: ClientSession, models: util.List[_ <: ClientNamespacedWriteModel], options: ClientBulkWriteOptions - ): ClientBulkWriteResult = { - org.junit.Assume.assumeTrue("TODO-JAVA-5531 implement", java.lang.Boolean.parseBoolean(toString)) - throw Assertions.fail("TODO-JAVA-5531 implement") - } + ): ClientBulkWriteResult = wrapped.bulkWrite(unwrap(clientSession), models.asScala.toList, options).toFuture().get() } diff --git a/driver-scala/src/main/scala/org/mongodb/scala/MongoCluster.scala b/driver-scala/src/main/scala/org/mongodb/scala/MongoCluster.scala index a7352d5ac41..bd0422761b5 100644 --- a/driver-scala/src/main/scala/org/mongodb/scala/MongoCluster.scala +++ b/driver-scala/src/main/scala/org/mongodb/scala/MongoCluster.scala @@ -22,6 +22,7 @@ import com.mongodb.reactivestreams.client.{ MongoCluster => JMongoCluster } import org.bson.codecs.configuration.CodecRegistry import org.mongodb.scala.bson.DefaultHelper.DefaultsTo import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.model.bulk.{ ClientBulkWriteOptions, ClientBulkWriteResult, ClientNamespacedWriteModel } import scala.collection.JavaConverters._ import scala.concurrent.duration.{ Duration, MILLISECONDS } @@ -290,4 +291,124 @@ class MongoCluster(private val wrapped: JMongoCluster) { )(implicit e: C DefaultsTo Document, ct: ClassTag[C]): ChangeStreamObservable[C] = ChangeStreamObservable(wrapped.watch(clientSession, pipeline.asJava, ct)) + /** + * Executes a client-level bulk write operation. + * This method is functionally equivalent to `bulkWrite(List, ClientBulkWriteOptions)` + * with the [[org.mongodb.scala.model.bulk.ClientBulkWriteOptions.clientBulkWriteOptions default options]]. + * + * This operation supports retryable writes. + * Depending on the number of `models`, encoded size of `models`, and the size limits in effect, + * executing this operation may require multiple `bulkWrite` commands. + * The eligibility for retries is determined per each `bulkWrite` command: + * [[org.mongodb.scala.model.bulk.ClientNamespacedUpdateManyModel]], [[org.mongodb.scala.model.bulk.ClientNamespacedDeleteManyModel]] in a command render it non-retryable. + * + * This operation is not supported by MongoDB Atlas Serverless instances. + * + * [[https://www.mongodb.com/docs/manual/reference/command/bulkWrite/ bulkWrite]] + * @param models The [[org.mongodb.scala.model.bulk.ClientNamespacedWriteModel]] individual write operations. + * @return The [[SingleObservable]] signalling at most one element [[org.mongodb.scala.model.bulk.ClientBulkWriteResult]] if the operation is successful, + * or the following errors: + * - [[ClientBulkWriteException]]: If and only if the operation is unsuccessful or partially unsuccessful, + * and there is at least one of the following pieces of information to report: + * [[ClientBulkWriteException ClientBulkWriteException#getWriteConcernErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getWriteErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getPartialResult]]. + * - [[MongoException]]: Only if the operation is unsuccessful. + * @since 5.4 + * @note Requires MongoDB 8.0 or greater. + */ + def bulkWrite(models: List[_ <: ClientNamespacedWriteModel]): SingleObservable[ClientBulkWriteResult] = + wrapped.bulkWrite(models.asJava) + + /** + * Executes a client-level bulk write operation. + * + * This operation supports retryable writes. + * Depending on the number of `models`, encoded size of `models`, and the size limits in effect, + * executing this operation may require multiple `bulkWrite` commands. + * The eligibility for retries is determined per each `bulkWrite` command: + * [[org.mongodb.scala.model.bulk.ClientNamespacedUpdateManyModel]], [[org.mongodb.scala.model.bulk.ClientNamespacedDeleteManyModel]] in a command render it non-retryable. + * + * This operation is not supported by MongoDB Atlas Serverless instances. + * + * [[https://www.mongodb.com/docs/manual/reference/command/bulkWrite/ bulkWrite]] + * @param models The [[org.mongodb.scala.model.bulk.ClientNamespacedWriteModel]] individual write operations. + * @param options The options. + * @return The [[SingleObservable]] signalling at most one element [[org.mongodb.scala.model.bulk.ClientBulkWriteResult]] if the operation is successful, + * or the following errors: + * - [[ClientBulkWriteException]]: If and only if the operation is unsuccessful or partially unsuccessful, + * and there is at least one of the following pieces of information to report: + * [[ClientBulkWriteException ClientBulkWriteException#getWriteConcernErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getWriteErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getPartialResult]]. + * - [[MongoException]]: Only if the operation is unsuccessful. + * @since 5.4 + * @note Requires MongoDB 8.0 or greater. + */ + def bulkWrite( + models: List[_ <: ClientNamespacedWriteModel], + options: ClientBulkWriteOptions + ): SingleObservable[ClientBulkWriteResult] = wrapped.bulkWrite(models.asJava, options) + + /** + * Executes a client-level bulk write operation. + * This method is functionally equivalent to `bulkWrite(ClientSession, List, ClientBulkWriteOptions)` + * with the [[org.mongodb.scala.model.bulk.ClientBulkWriteOptions.clientBulkWriteOptions default options]]. + * + * This operation supports retryable writes. + * Depending on the number of `models`, encoded size of `models`, and the size limits in effect, + * executing this operation may require multiple `bulkWrite` commands. + * The eligibility for retries is determined per each `bulkWrite` command: + * [[org.mongodb.scala.model.bulk.ClientNamespacedUpdateManyModel]], [[org.mongodb.scala.model.bulk.ClientNamespacedDeleteManyModel]] in a command render it non-retryable. + * + * This operation is not supported by MongoDB Atlas Serverless instances. + * + * [[https://www.mongodb.com/docs/manual/reference/command/bulkWrite/ bulkWrite]] + * @param clientSession [[ClientSession client session]] with which to associate this operation. + * @param models The [[org.mongodb.scala.model.bulk.ClientNamespacedWriteModel]] individual write operations. + * @return The [[SingleObservable]] signalling at most one element [[org.mongodb.scala.model.bulk.ClientBulkWriteResult]] if the operation is successful, + * or the following errors: + * - [[ClientBulkWriteException]]: If and only if the operation is unsuccessful or partially unsuccessful, + * and there is at least one of the following pieces of information to report: + * [[ClientBulkWriteException ClientBulkWriteException#getWriteConcernErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getWriteErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getPartialResult]]. + * - [[MongoException]]: Only if the operation is unsuccessful. + * @since 5.4 + * @note Requires MongoDB 8.0 or greater. + */ + def bulkWrite( + clientSession: ClientSession, + models: List[_ <: ClientNamespacedWriteModel] + ): SingleObservable[ClientBulkWriteResult] = wrapped.bulkWrite(clientSession, models.asJava) + + /** + * Executes a client-level bulk write operation. + * + * This operation supports retryable writes. + * Depending on the number of `models`, encoded size of `models`, and the size limits in effect, + * executing this operation may require multiple `bulkWrite` commands. + * The eligibility for retries is determined per each `bulkWrite` command: + * [[org.mongodb.scala.model.bulk.ClientNamespacedUpdateManyModel]], [[org.mongodb.scala.model.bulk.ClientNamespacedDeleteManyModel]] in a command render it non-retryable. + * + * [[https://www.mongodb.com/docs/manual/reference/command/bulkWrite/ bulkWrite]] + * @param clientSession The [[ClientSession client session]] with which to associate this operation. + * @param models The [[org.mongodb.scala.model.bulk.ClientNamespacedWriteModel]] individual write operations. + * @param options The options. + * @return The [[SingleObservable]] signalling at most one element [[org.mongodb.scala.model.bulk.ClientBulkWriteResult]] if the operation is successful, + * or the following errors: + * - [[ClientBulkWriteException]]: If and only if the operation is unsuccessful or partially unsuccessful, + * and there is at least one of the following pieces of information to report: + * [[ClientBulkWriteException ClientBulkWriteException#getWriteConcernErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getWriteErrors]], + * [[ClientBulkWriteException ClientBulkWriteException#getPartialResult]]. + * - [[MongoException]]: Only if the operation is unsuccessful. + * @since 5.4 + * @note Requires MongoDB 8.0 or greater. + */ + def bulkWrite( + clientSession: ClientSession, + models: List[_ <: ClientNamespacedWriteModel], + options: ClientBulkWriteOptions + ): SingleObservable[ClientBulkWriteResult] = wrapped.bulkWrite(clientSession, models.asJava, options) } diff --git a/driver-scala/src/main/scala/org/mongodb/scala/model/bulk/package.scala b/driver-scala/src/main/scala/org/mongodb/scala/model/bulk/package.scala new file mode 100644 index 00000000000..44dcd7e3c84 --- /dev/null +++ b/driver-scala/src/main/scala/org/mongodb/scala/model/bulk/package.scala @@ -0,0 +1,235 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.mongodb.scala.model + +import org.mongodb.scala.MongoNamespace +import org.mongodb.scala.bson.conversions.Bson + +import scala.collection.JavaConverters._ + +/** + * Models, options, results for the client-level bulk write operation. + * + * @since 5.4 + */ +package object bulk { + + /** + * A model for inserting a document. + */ + type ClientNamespacedInsertOneModel = com.mongodb.client.model.bulk.ClientNamespacedInsertOneModel + + /** + * A model for updating at most one document matching a filter. + */ + type ClientNamespacedUpdateOneModel = com.mongodb.client.model.bulk.ClientNamespacedUpdateOneModel + + /** + * A model for updating all documents matching a filter. + */ + type ClientNamespacedUpdateManyModel = com.mongodb.client.model.bulk.ClientNamespacedUpdateManyModel + + /** + * A model for replacing at most one document matching a filter. + */ + type ClientNamespacedReplaceOneModel = com.mongodb.client.model.bulk.ClientNamespacedReplaceOneModel + + /** + * A model for deleting at most one document matching a filter. + */ + type ClientNamespacedDeleteOneModel = com.mongodb.client.model.bulk.ClientNamespacedDeleteOneModel + + /** + * A model for deleting all documents matching a filter. + */ + type ClientNamespacedDeleteManyModel = com.mongodb.client.model.bulk.ClientNamespacedDeleteManyModel + + /** + * A combination of an individual write operation and a [[MongoNamespace]] + * the operation is targeted at. + */ + type ClientNamespacedWriteModel = com.mongodb.client.model.bulk.ClientNamespacedWriteModel + + object ClientNamespacedWriteModel { + + def insertOne[TDocument](namespace: MongoNamespace, document: TDocument): ClientNamespacedInsertOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.insertOne(namespace, document) + + def updateOne(namespace: MongoNamespace, filter: Bson, update: Bson): ClientNamespacedUpdateOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateOne(namespace, filter, update) + + def updateOne( + namespace: MongoNamespace, + filter: Bson, + update: Bson, + options: ClientUpdateOneOptions + ): ClientNamespacedUpdateOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateOne(namespace, filter, update, options) + + def updateOne( + namespace: MongoNamespace, + filter: Bson, + updatePipeline: Iterable[_ <: Bson] + ): ClientNamespacedUpdateOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateOne(namespace, filter, updatePipeline.asJava) + + def updateOne( + namespace: MongoNamespace, + filter: Bson, + updatePipeline: Iterable[_ <: Bson], + options: ClientUpdateOneOptions + ): ClientNamespacedUpdateOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateOne( + namespace, + filter, + updatePipeline.asJava, + options + ) + + def updateMany(namespace: MongoNamespace, filter: Bson, update: Bson): ClientNamespacedUpdateManyModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateMany(namespace, filter, update) + + def updateMany( + namespace: MongoNamespace, + filter: Bson, + update: Bson, + options: ClientUpdateManyOptions + ): ClientNamespacedUpdateManyModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateMany(namespace, filter, update, options) + + def updateMany( + namespace: MongoNamespace, + filter: Bson, + updatePipeline: Iterable[_ <: Bson] + ): ClientNamespacedUpdateManyModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateMany(namespace, filter, updatePipeline.asJava) + + def updateMany( + namespace: MongoNamespace, + filter: Bson, + updatePipeline: Iterable[_ <: Bson], + options: ClientUpdateManyOptions + ): ClientNamespacedUpdateManyModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.updateMany( + namespace, + filter, + updatePipeline.asJava, + options + ) + + def replaceOne[TDocument]( + namespace: MongoNamespace, + filter: Bson, + replacement: TDocument + ): ClientNamespacedReplaceOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.replaceOne(namespace, filter, replacement) + + def replaceOne[TDocument]( + namespace: MongoNamespace, + filter: Bson, + replacement: TDocument, + options: ClientReplaceOneOptions + ): ClientNamespacedReplaceOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.replaceOne(namespace, filter, replacement, options) + + def deleteOne(namespace: MongoNamespace, filter: Bson): ClientNamespacedDeleteOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.deleteOne(namespace, filter) + + def deleteOne( + namespace: MongoNamespace, + filter: Bson, + options: ClientDeleteOneOptions + ): ClientNamespacedDeleteOneModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.deleteOne(namespace, filter, options) + + def deleteMany(namespace: MongoNamespace, filter: Bson): ClientNamespacedDeleteManyModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.deleteMany(namespace, filter) + + def deleteMany( + namespace: MongoNamespace, + filter: Bson, + options: ClientDeleteManyOptions + ): ClientNamespacedDeleteManyModel = + com.mongodb.client.model.bulk.ClientNamespacedWriteModel.deleteMany(namespace, filter, options) + } + + /** + * The options to apply when executing a client-level bulk write operation. + */ + type ClientBulkWriteOptions = com.mongodb.client.model.bulk.ClientBulkWriteOptions + + object ClientBulkWriteOptions { + def clientBulkWriteOptions(): ClientBulkWriteOptions = + com.mongodb.client.model.bulk.ClientBulkWriteOptions.clientBulkWriteOptions() + } + + /** + * The options to apply when updating a document. + */ + type ClientUpdateOneOptions = com.mongodb.client.model.bulk.ClientUpdateOneOptions + + object ClientUpdateOneOptions { + def clientUpdateOneOptions(): ClientUpdateOneOptions = + com.mongodb.client.model.bulk.ClientUpdateOneOptions.clientUpdateOneOptions() + } + + /** + * The options to apply when updating documents. + */ + type ClientUpdateManyOptions = com.mongodb.client.model.bulk.ClientUpdateManyOptions + + object ClientUpdateManyOptions { + def clientUpdateManyOptions(): ClientUpdateManyOptions = + com.mongodb.client.model.bulk.ClientUpdateManyOptions.clientUpdateManyOptions() + } + + /** + * The options to apply when replacing a document. + */ + type ClientReplaceOneOptions = com.mongodb.client.model.bulk.ClientReplaceOneOptions + + object ClientReplaceOneOptions { + def clientReplaceOneOptions(): ClientReplaceOneOptions = + com.mongodb.client.model.bulk.ClientReplaceOneOptions.clientReplaceOneOptions() + } + + /** + * The options to apply when deleting a document. + */ + type ClientDeleteOneOptions = com.mongodb.client.model.bulk.ClientDeleteOneOptions + + object ClientDeleteOneOptions { + def clientDeleteOneOptions(): ClientDeleteOneOptions = + com.mongodb.client.model.bulk.ClientDeleteOneOptions.clientDeleteOneOptions() + } + + /** + * The options to apply when deleting documents. + */ + type ClientDeleteManyOptions = com.mongodb.client.model.bulk.ClientDeleteManyOptions + + object ClientDeleteManyOptions { + def clientDeleteManyOptions(): ClientDeleteManyOptions = + com.mongodb.client.model.bulk.ClientDeleteManyOptions.clientDeleteManyOptions() + } + + /** + * The result of a successful or partially successful client-level bulk write operation. + * + */ + type ClientBulkWriteResult = com.mongodb.client.model.bulk.ClientBulkWriteResult +} diff --git a/driver-scala/src/main/scala/org/mongodb/scala/model/search/SearchOperator.scala b/driver-scala/src/main/scala/org/mongodb/scala/model/search/SearchOperator.scala index 90f27092ebc..1fa47a54e1b 100644 --- a/driver-scala/src/main/scala/org/mongodb/scala/model/search/SearchOperator.scala +++ b/driver-scala/src/main/scala/org/mongodb/scala/model/search/SearchOperator.scala @@ -17,10 +17,17 @@ package org.mongodb.scala.model.search import com.mongodb.annotations.{ Beta, Reason } import com.mongodb.client.model.search.{ SearchOperator => JSearchOperator } + +import java.util.UUID + +import org.mongodb.scala.bson.BsonDocument import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.geojson.Point +import org.bson.types.ObjectId; + import java.time.{ Duration, Instant } + import collection.JavaConverters._ /** @@ -228,6 +235,266 @@ object SearchOperator { def near(origin: Point, pivot: Number, paths: Iterable[_ <: FieldSearchPath]): GeoNearSearchOperator = JSearchOperator.near(origin, pivot, paths.asJava) + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The boolean value to search for. + * @param values More fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in(path: FieldSearchPath, value: Boolean, values: Boolean*): InSearchOperator = + JSearchOperator.in(path, value, values: _*) + + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The objectId value to search for. + * @param values More fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in(path: FieldSearchPath, value: ObjectId, values: ObjectId*): InSearchOperator = + JSearchOperator.in(path, value, values: _*) + + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The number value to search for. + * @param values More fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in(path: FieldSearchPath, value: Number, values: Number*): InSearchOperator = + JSearchOperator.in(path, value, values: _*) + + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The instant date value to search for. + * @param values More fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in(path: FieldSearchPath, value: Instant, values: Instant*): InSearchOperator = + JSearchOperator.in(path, value, values: _*) + + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The uuid value to search for. + * @param values More fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in(path: FieldSearchPath, value: UUID, values: UUID*): InSearchOperator = + JSearchOperator.in(path, value, values: _*) + + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param value The string value to search for. + * @param values More fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in(path: FieldSearchPath, value: String, values: String*): InSearchOperator = + JSearchOperator.in(path, value, values: _*) + + /** + * Returns a `SearchOperator` that searches for documents where the value + * or array of values at a given path contains any of the specified values + * + * @param path The indexed field to be searched. + * @param values The non-empty values to search for. Value can be either a single value or an array of values of only one of the supported BSON types and can't be a mix of different types. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/in/ in operator]] + */ + def in[T](path: FieldSearchPath, values: Iterable[_ <: T]): InSearchOperator = + JSearchOperator.in(path, values.asJava) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The boolean value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equals(path: FieldSearchPath, value: Boolean): EqualsSearchOperator = + JSearchOperator.equals(path, value) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The object id value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equals(path: FieldSearchPath, value: ObjectId): EqualsSearchOperator = + JSearchOperator.equals(path, value) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The number value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equals(path: FieldSearchPath, value: Number): EqualsSearchOperator = + JSearchOperator.equals(path, value) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The instant date value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equals(path: FieldSearchPath, value: Instant): EqualsSearchOperator = + JSearchOperator.equals(path, value) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The string value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equals(path: FieldSearchPath, value: String): EqualsSearchOperator = + JSearchOperator.equals(path, value) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches the specified value. + * + * @param path The indexed field to be searched. + * @param value The uuid value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equals(path: FieldSearchPath, value: UUID): EqualsSearchOperator = + JSearchOperator.equals(path, value) + + /** + * Returns a `SearchOperator` that searches for documents where a field matches null. + * + * @param path The indexed field to be searched. + * @param value The uuid value to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/equals/ equals operator]] + */ + def equalsNull(path: FieldSearchPath): EqualsSearchOperator = + JSearchOperator.equalsNull(path) + + /** + * Returns a `SearchOperator` that returns documents similar to input document. + * + * @param like The BSON document that is used to extract representative terms to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/morelikethis/ moreLikeThis operator]] + */ + def moreLikeThis(like: BsonDocument): MoreLikeThisSearchOperator = JSearchOperator.moreLikeThis(like) + + /** + * Returns a `SearchOperator` that returns documents similar to input documents. + * + * @param likes The BSON documents that are used to extract representative terms to query for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/morelikethis/ moreLikeThis operator]] + */ + def moreLikeThis(likes: Iterable[BsonDocument]): MoreLikeThisSearchOperator = + JSearchOperator.moreLikeThis(likes.asJava) + + /** + * Returns a `SearchOperator` that enables queries which use special characters in the search string that can match any character. + * + * @param query The string to search for. + * @param path The indexed field to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/wildcard/ wildcard operator]] + */ + def wildcard(query: String, path: SearchPath): WildcardSearchOperator = JSearchOperator.wildcard(path, query) + + /** + * Returns a `SearchOperator` that enables queries which use special characters in the search string that can match any character. + * + * @param queries The non-empty strings to search for. + * @param paths The non-empty indexed fields to be searched. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/wildcard/ wildcard operator]] + */ + def wildcard(queries: Iterable[String], paths: Iterable[_ <: SearchPath]): WildcardSearchOperator = + JSearchOperator.wildcard(queries.asJava, paths.asJava) + + /** + * Returns a `SearchOperator` that supports querying a combination of indexed fields and values. + * + * @param defaultPath The field to be searched by default. + * @param query One or more indexed fields and values to search. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/queryString/ queryString operator]] + */ + def queryString(defaultPath: FieldSearchPath, query: String): QueryStringSearchOperator = + JSearchOperator.queryString(defaultPath, query) + + /** + * Returns a `SearchOperator` that performs a search for documents containing an ordered sequence of terms. + * + * @param path The field to be searched. + * @param query The string to search for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/phrase/ phrase operator]] + */ + def phrase(path: SearchPath, query: String): PhraseSearchOperator = JSearchOperator.phrase(path, query) + + /** + * Returns a `SearchOperator` that performs a search for documents containing an ordered sequence of terms. + * + * @param paths The non-empty fields to be searched. + * @param queries The non-empty strings to search for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/phrase/ phrase operator]] + */ + def phrase(paths: Iterable[_ <: SearchPath], queries: Iterable[String]): PhraseSearchOperator = + JSearchOperator.phrase(paths.asJava, queries.asJava) + + /** + * Returns a `SearchOperator` that performs a search using a regular expression. + * + * @param path The field to be searched. + * @param query The string to search for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/regex/ regex operator]] + */ + def regex(path: SearchPath, query: String): RegexSearchOperator = JSearchOperator.regex(path, query) + + /** + * Returns a `SearchOperator` that performs a search using a regular expression. + * + * @param paths The non-empty fields to be searched. + * @param queries The non-empty strings to search for. + * @return The requested `SearchOperator`. + * @see [[https://www.mongodb.com/docs/atlas/atlas-search/regex/ regex operator]] + */ + def regex(paths: Iterable[_ <: SearchPath], queries: Iterable[String]): RegexSearchOperator = + JSearchOperator.regex(paths.asJava, queries.asJava) + /** * Creates a `SearchOperator` from a `Bson` in situations when there is no builder method that better satisfies your needs. * This method cannot be used to validate the syntax. diff --git a/driver-scala/src/main/scala/org/mongodb/scala/model/search/package.scala b/driver-scala/src/main/scala/org/mongodb/scala/model/search/package.scala index 557060324cd..771e800801d 100644 --- a/driver-scala/src/main/scala/org/mongodb/scala/model/search/package.scala +++ b/driver-scala/src/main/scala/org/mongodb/scala/model/search/package.scala @@ -119,6 +119,14 @@ package object search { @Beta(Array(Reason.CLIENT)) type TextSearchOperator = com.mongodb.client.model.search.TextSearchOperator + /** + * @see `SearchOperator.phrase(String, SearchPath)` + * @see `SearchOperator.phrase(Iterable, Iterable)` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type PhraseSearchOperator = com.mongodb.client.model.search.PhraseSearchOperator + /** * @see `SearchOperator.autocomplete(String, FieldSearchPath)` * @see `SearchOperator.autocomplete(Iterable, FieldSearchPath)` @@ -127,6 +135,14 @@ package object search { @Beta(Array(Reason.CLIENT)) type AutocompleteSearchOperator = com.mongodb.client.model.search.AutocompleteSearchOperator + /** + * @see `SearchOperator.regex(String, SearchPath)` + * @see `SearchOperator.regex(Iterable, Iterable)` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type RegexSearchOperator = com.mongodb.client.model.search.RegexSearchOperator + /** * A base for a [[NumberRangeSearchOperatorBase]] which allows creating instances of this operator. * This interface is a technicality and does not represent a meaningful element of the full-text search query syntax. @@ -182,6 +198,42 @@ package object search { @Beta(Array(Reason.CLIENT)) type GeoNearSearchOperator = com.mongodb.client.model.search.GeoNearSearchOperator + /** + * @see `SearchOperator.in` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type InSearchOperator = com.mongodb.client.model.search.InSearchOperator + + /** + * @see `SearchOperator.equals` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type EqualsSearchOperator = com.mongodb.client.model.search.EqualsSearchOperator + + /** + * @see `SearchOperator.moreLikeThis` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type MoreLikeThisSearchOperator = com.mongodb.client.model.search.MoreLikeThisSearchOperator + + /** + * @see `SearchOperator.wildcard(String, SearchPath)` + * @see `SearchOperator.wildcard(Iterable, Iterable)` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type WildcardSearchOperator = com.mongodb.client.model.search.WildcardSearchOperator + + /** + * @see `SearchOperator.queryString` + */ + @Sealed + @Beta(Array(Reason.CLIENT)) + type QueryStringSearchOperator = com.mongodb.client.model.search.QueryStringSearchOperator + /** * Fuzzy search options that may be used with some [[SearchOperator]]s. * diff --git a/driver-scala/src/main/scala/org/mongodb/scala/package.scala b/driver-scala/src/main/scala/org/mongodb/scala/package.scala index 7da5578ff96..9a4cba0e35c 100644 --- a/driver-scala/src/main/scala/org/mongodb/scala/package.scala +++ b/driver-scala/src/main/scala/org/mongodb/scala/package.scala @@ -234,6 +234,13 @@ package object scala extends ClientSessionImplicits with ObservableImplicits wit */ type MongoBulkWriteException = com.mongodb.MongoBulkWriteException + /** + * The result of an unsuccessful or partially unsuccessful client-level bulk write operation. + * + * @since 5.4 + */ + type ClientBulkWriteException = com.mongodb.ClientBulkWriteException + /** * An exception indicating that a failure occurred when running a `\$changeStream`. * @since 2.2 diff --git a/driver-scala/src/test/scala/org/mongodb/scala/ApiAliasAndCompanionSpec.scala b/driver-scala/src/test/scala/org/mongodb/scala/ApiAliasAndCompanionSpec.scala index 5b8e46c598d..2e21d30526a 100644 --- a/driver-scala/src/test/scala/org/mongodb/scala/ApiAliasAndCompanionSpec.scala +++ b/driver-scala/src/test/scala/org/mongodb/scala/ApiAliasAndCompanionSpec.scala @@ -153,9 +153,7 @@ class ApiAliasAndCompanionSpec extends BaseSpec { .asScala .map(_.getSimpleName) .toSet + - "MongoException" - "MongoGridFSException" - "MongoConfigurationException" - "MongoWriteConcernWithResponseException" - - // TODO-JAVA-5531 remove the `"ClientBulkWriteException"` exclusion - "ClientBulkWriteException" + "MongoException" - "MongoGridFSException" - "MongoConfigurationException" - "MongoWriteConcernWithResponseException" val objects = new Reflections( new ConfigurationBuilder() diff --git a/driver-scala/src/test/scala/org/mongodb/scala/MongoClientSpec.scala b/driver-scala/src/test/scala/org/mongodb/scala/MongoClientSpec.scala index 4e0189bfd5e..a888e33ae7f 100644 --- a/driver-scala/src/test/scala/org/mongodb/scala/MongoClientSpec.scala +++ b/driver-scala/src/test/scala/org/mongodb/scala/MongoClientSpec.scala @@ -19,6 +19,7 @@ package org.mongodb.scala import com.mongodb.reactivestreams.client.{ MongoClient => JMongoClient } import org.bson.BsonDocument import org.mockito.Mockito.verify +import org.mongodb.scala.model.bulk.{ ClientBulkWriteOptions, ClientBulkWriteResult, ClientNamespacedWriteModel } import org.scalatestplus.mockito.MockitoSugar import scala.collection.JavaConverters._ @@ -28,6 +29,7 @@ class MongoClientSpec extends BaseSpec with MockitoSugar { val wrapped = mock[JMongoClient] val clientSession = mock[ClientSession] val mongoClient = new MongoClient(wrapped) + val namespace = new MongoNamespace("db.coll") "MongoClient" should "have the same methods as the wrapped MongoClient" in { val wrapped = classOf[JMongoClient].getMethods.map(_.getName).toSet -- Seq("getSettings") @@ -35,12 +37,7 @@ class MongoClientSpec extends BaseSpec with MockitoSugar { wrapped.foreach((name: String) => { val cleanedName = name.stripPrefix("get") - - if (!cleanedName.contains("bulkWrite")) { - // TODO-JAVA-5531 remove this whole `if` block - assert(local.contains(name) | local.contains(cleanedName.head.toLower + cleanedName.tail), s"Missing: $name") - } - // TODO-JAVA-5531 uncomment: assert(local.contains(name) | local.contains(cleanedName.head.toLower + cleanedName.tail), s"Missing: $name") + assert(local.contains(name) | local.contains(cleanedName.head.toLower + cleanedName.tail), s"Missing: $name") }) } @@ -104,6 +101,37 @@ class MongoClientSpec extends BaseSpec with MockitoSugar { verify(wrapped).watch(clientSession, pipeline.asJava, classOf[BsonDocument]) } + it should "call the underlying bulkWrite with models only" in { + val models = List(ClientNamespacedWriteModel.insertOne(namespace, Document("key" -> "value"))) + mongoClient.bulkWrite(models) shouldBe a[SingleObservable[_]] + verify(wrapped).bulkWrite(models.asJava) + } + + it should "call the underlying bulkWrite with models and options" in { + val models = List(ClientNamespacedWriteModel.insertOne(namespace, Document("key" -> "value"))) + val options = ClientBulkWriteOptions.clientBulkWriteOptions() + + mongoClient.bulkWrite(models, options) + + verify(wrapped).bulkWrite(models.asJava, options) + } + + it should "call the underlying bulkWrite with clientSession and models" in { + val models = List(ClientNamespacedWriteModel.insertOne(namespace, Document("key" -> "value"))) + + mongoClient.bulkWrite(clientSession, models) + + verify(wrapped).bulkWrite(clientSession, models.asJava) + } + + it should "call the underlying bulkWrite with clientSession, models, and options" in { + val models = List(ClientNamespacedWriteModel.insertOne(namespace, Document("key" -> "value"))) + val options = ClientBulkWriteOptions.clientBulkWriteOptions() + + mongoClient.bulkWrite(clientSession, models, options) + verify(wrapped).bulkWrite(clientSession, models.asJava, options) + } + it should "call the underlying getClusterDescription" in { mongoClient.getClusterDescription verify(wrapped).getClusterDescription diff --git a/driver-scala/src/test/scala/org/mongodb/scala/ScalaPackageSpec.scala b/driver-scala/src/test/scala/org/mongodb/scala/ScalaPackageSpec.scala index 3a91b8c3034..19b1140e8f7 100644 --- a/driver-scala/src/test/scala/org/mongodb/scala/ScalaPackageSpec.scala +++ b/driver-scala/src/test/scala/org/mongodb/scala/ScalaPackageSpec.scala @@ -20,7 +20,6 @@ import java.util.concurrent.TimeUnit import _root_.scala.concurrent.duration.Duration import com.mongodb.{ MongoCredential => JMongoCredential } import org.bson.BsonDocumentWrapper -import org.bson.codecs.DocumentCodec import org.mongodb.scala import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY import org.mongodb.scala.bson._ diff --git a/driver-scala/src/test/scala/org/mongodb/scala/model/bulk/BulkModelSpec.scala b/driver-scala/src/test/scala/org/mongodb/scala/model/bulk/BulkModelSpec.scala new file mode 100644 index 00000000000..f96ca2d96ee --- /dev/null +++ b/driver-scala/src/test/scala/org/mongodb/scala/model/bulk/BulkModelSpec.scala @@ -0,0 +1,111 @@ +package org.mongodb.scala.model.bulk + +import org.mongodb.scala.bson.Document +import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.{ BaseSpec, MongoNamespace } + +class BulkModelSpec extends BaseSpec { + + val namespace = new MongoNamespace("db.coll") + val filter: Bson = Document("a" -> 1) + val update: Bson = Document("$set" -> Document("b" -> 2)) + val replacement = Document("b" -> 2) + val document = Document("a" -> 1) + val updatePipeline: Seq[Document] = Seq(Document("$set" -> Document("b" -> 2))) + + it should "be able to create ClientNamespacedInsertOneModel" in { + val insertOneModel = ClientNamespacedWriteModel.insertOne(namespace, document) + insertOneModel shouldBe a[ClientNamespacedInsertOneModel] + insertOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedInsertOneModel] + } + + it should "be able to create ClientNamespacedUpdateOneModel with filter and update" in { + val updateOneModel = ClientNamespacedWriteModel.updateOne(namespace, filter, update) + updateOneModel shouldBe a[ClientNamespacedUpdateOneModel] + updateOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateOneModel] + } + + it should "be able to create ClientNamespacedUpdateOneModel with filter, update, and options" in { + val options = ClientUpdateOneOptions.clientUpdateOneOptions() + val updateOneModel = ClientNamespacedWriteModel.updateOne(namespace, filter, update, options) + updateOneModel shouldBe a[ClientNamespacedUpdateOneModel] + updateOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateOneModel] + } + + it should "be able to create ClientNamespacedUpdateOneModel with update pipeline" in { + val updateOneModel = ClientNamespacedWriteModel.updateOne(namespace, filter, updatePipeline) + updateOneModel shouldBe a[ClientNamespacedUpdateOneModel] + updateOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateOneModel] + } + + it should "be able to create ClientNamespacedUpdateOneModel with update pipeline and options" in { + val options = ClientUpdateOneOptions.clientUpdateOneOptions() + val updateOneModel = ClientNamespacedWriteModel.updateOne(namespace, filter, updatePipeline, options) + updateOneModel shouldBe a[ClientNamespacedUpdateOneModel] + updateOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateOneModel] + } + + it should "be able to create ClientNamespacedUpdateManyModel with filter and update" in { + val updateManyModel = ClientNamespacedWriteModel.updateMany(namespace, filter, update) + updateManyModel shouldBe a[ClientNamespacedUpdateManyModel] + updateManyModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateManyModel] + } + it should "be able to create ClientNamespacedUpdateManyModel with filter, update and options" in { + val options = ClientUpdateManyOptions.clientUpdateManyOptions() + val updateManyModel = ClientNamespacedWriteModel.updateMany(namespace, filter, update, options) + updateManyModel shouldBe a[ClientNamespacedUpdateManyModel] + updateManyModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateManyModel] + } + + it should "be able to create ClientNamespacedUpdateManyModel with filter, updatePipeline" in { + val updateManyModel = ClientNamespacedWriteModel.updateMany(namespace, filter, updatePipeline) + updateManyModel shouldBe a[ClientNamespacedUpdateManyModel] + updateManyModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateManyModel] + } + + it should "be able to create ClientNamespacedUpdateManyModel with filter, updatePipeline and options" in { + val options = ClientUpdateManyOptions.clientUpdateManyOptions() + val updateManyModel = ClientNamespacedWriteModel.updateMany(namespace, filter, updatePipeline, options) + updateManyModel shouldBe a[ClientNamespacedUpdateManyModel] + updateManyModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedUpdateManyModel] + } + + it should "be able to create ClientNamespacedReplaceOneModel" in { + val replaceOneModel = ClientNamespacedWriteModel.replaceOne(namespace, filter, replacement) + replaceOneModel shouldBe a[ClientNamespacedReplaceOneModel] + replaceOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedReplaceOneModel] + } + + it should "be able to create ClientNamespacedReplaceOneModel with options" in { + val options = ClientReplaceOneOptions.clientReplaceOneOptions() + val replaceOneModel = ClientNamespacedWriteModel.replaceOne(namespace, filter, replacement, options) + replaceOneModel shouldBe a[ClientNamespacedReplaceOneModel] + replaceOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedReplaceOneModel] + } + + it should "be able to create ClientNamespacedDeleteOneModel" in { + val deleteOneModel = ClientNamespacedWriteModel.deleteOne(namespace, filter) + deleteOneModel shouldBe a[ClientNamespacedDeleteOneModel] + deleteOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedDeleteOneModel] + } + + it should "be able to create ClientNamespacedDeleteOneModel with options" in { + val options = ClientDeleteOneOptions.clientDeleteOneOptions() + val deleteOneModel = ClientNamespacedWriteModel.deleteOne(namespace, filter, options) + deleteOneModel shouldBe a[ClientNamespacedDeleteOneModel] + deleteOneModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedDeleteOneModel] + } + + it should "be able to create ClientNamespacedDeleteManyModel" in { + val deleteManyModel = ClientNamespacedWriteModel.deleteMany(namespace, filter) + deleteManyModel shouldBe a[ClientNamespacedDeleteManyModel] + deleteManyModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedDeleteManyModel] + } + + it should "be able to create ClientNamespacedDeleteManyModel with options" in { + val options = ClientDeleteManyOptions.clientDeleteManyOptions() + val deleteManyModel = ClientNamespacedWriteModel.deleteMany(namespace, filter, options) + deleteManyModel shouldBe a[ClientNamespacedDeleteManyModel] + deleteManyModel shouldBe a[com.mongodb.client.model.bulk.ClientNamespacedDeleteManyModel] + } +} diff --git a/driver-sync/src/main/com/mongodb/client/internal/CollectionInfoRetriever.java b/driver-sync/src/main/com/mongodb/client/internal/CollectionInfoRetriever.java index 934a3dce486..9d02a1e8756 100644 --- a/driver-sync/src/main/com/mongodb/client/internal/CollectionInfoRetriever.java +++ b/driver-sync/src/main/com/mongodb/client/internal/CollectionInfoRetriever.java @@ -21,6 +21,9 @@ import com.mongodb.lang.Nullable; import org.bson.BsonDocument; +import java.util.ArrayList; +import java.util.List; + import static com.mongodb.assertions.Assertions.notNull; import static com.mongodb.client.internal.TimeoutHelper.databaseWithTimeout; @@ -33,9 +36,10 @@ class CollectionInfoRetriever { this.client = notNull("client", client); } - @Nullable - public BsonDocument filter(final String databaseName, final BsonDocument filter, @Nullable final Timeout operationTimeout) { - return databaseWithTimeout(client.getDatabase(databaseName), TIMEOUT_ERROR_MESSAGE, - operationTimeout).listCollections(BsonDocument.class).filter(filter).first(); + public List filter(final String databaseName, final BsonDocument filter, @Nullable final Timeout operationTimeout) { + return databaseWithTimeout(client.getDatabase(databaseName), TIMEOUT_ERROR_MESSAGE, operationTimeout) + .listCollections(BsonDocument.class) + .filter(filter) + .into(new ArrayList<>()); } } diff --git a/driver-sync/src/main/com/mongodb/client/internal/Crypt.java b/driver-sync/src/main/com/mongodb/client/internal/Crypt.java index b910f0ab01c..15ba16e66da 100644 --- a/driver-sync/src/main/com/mongodb/client/internal/Crypt.java +++ b/driver-sync/src/main/com/mongodb/client/internal/Crypt.java @@ -41,6 +41,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; +import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -308,9 +309,10 @@ private void fetchCredentials(final MongoCryptContext cryptContext) { private void collInfo(final MongoCryptContext cryptContext, final String databaseName, @Nullable final Timeout operationTimeout) { try { - BsonDocument collectionInfo = assertNotNull(collectionInfoRetriever).filter(databaseName, cryptContext.getMongoOperation(), operationTimeout); - if (collectionInfo != null) { - cryptContext.addMongoOperationResult(collectionInfo); + List results = assertNotNull(collectionInfoRetriever) + .filter(databaseName, cryptContext.getMongoOperation(), operationTimeout); + for (BsonDocument result : results) { + cryptContext.addMongoOperationResult(result); } cryptContext.completeMongoOperation(); } catch (Throwable t) { diff --git a/driver-sync/src/main/com/mongodb/client/internal/KeyManagementService.java b/driver-sync/src/main/com/mongodb/client/internal/KeyManagementService.java index fee5ddac729..806f768a923 100644 --- a/driver-sync/src/main/com/mongodb/client/internal/KeyManagementService.java +++ b/driver-sync/src/main/com/mongodb/client/internal/KeyManagementService.java @@ -23,7 +23,7 @@ import com.mongodb.internal.diagnostics.logging.Loggers; import com.mongodb.internal.time.Timeout; import com.mongodb.lang.Nullable; -import org.jetbrains.annotations.NotNull; +import com.mongodb.lang.NonNull; import javax.net.SocketFactory; import javax.net.ssl.SSLContext; @@ -149,13 +149,13 @@ public int read() throws IOException { } @Override - public int read(@NotNull final byte[] b) throws IOException { + public int read(@NonNull final byte[] b) throws IOException { setSocketSoTimeoutToOperationTimeout(); return wrapped.read(b); } @Override - public int read(@NotNull final byte[] b, final int off, final int len) throws IOException { + public int read(@NonNull final byte[] b, final int off, final int len) throws IOException { setSocketSoTimeoutToOperationTimeout(); return wrapped.read(b, off, len); } diff --git a/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryption25LookupProseTests.java b/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryption25LookupProseTests.java new file mode 100644 index 00000000000..f7c672b289b --- /dev/null +++ b/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryption25LookupProseTests.java @@ -0,0 +1,255 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.client; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoNamespace; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.ValidationOptions; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.client.vault.ClientEncryptions; +import com.mongodb.crypt.capi.MongoCryptException; +import com.mongodb.fixture.EncryptionFixture; +import org.bson.BsonArray; +import org.bson.BsonDocument; +import org.bson.Document; +import org.bson.types.Binary; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import java.io.File; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static com.mongodb.ClusterFixture.isStandalone; +import static com.mongodb.ClusterFixture.serverVersionAtLeast; +import static com.mongodb.ClusterFixture.serverVersionLessThan; +import static com.mongodb.client.Fixture.getMongoClientSettingsBuilder; +import static com.mongodb.fixture.EncryptionFixture.getKmsProviders; +import static com.mongodb.testing.MongoAssertions.assertCause; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static util.JsonPoweredTestHelper.getTestDocument; + +/** + * + * 25. Test $lookup + */ +public class ClientSideEncryption25LookupProseTests { + private MongoClient client; + + protected MongoClient createMongoClient(final MongoClientSettings settings) { + return MongoClients.create(settings); + } + + protected ClientEncryption createClientEncryption(final ClientEncryptionSettings settings) { + return ClientEncryptions.create(settings); + } + + @BeforeEach + public void setUp() { + assumeFalse(isStandalone()); + assumeTrue(serverVersionAtLeast(7, 0)); + + // Create an encrypted MongoClient named `encryptedClient` configured with: + MongoNamespace dataKeysNamespace = new MongoNamespace("db.keyvault"); + Map> kmsProviders = getKmsProviders(EncryptionFixture.KmsProviderType.LOCAL); + MongoClient encryptedClient = createMongoClient(getMongoClientSettingsBuilder() + .autoEncryptionSettings( + AutoEncryptionSettings.builder() + .keyVaultNamespace(dataKeysNamespace.getFullName()) + .kmsProviders(kmsProviders) + .build()) + .build()); + // Use `encryptedClient` to drop `db.keyvault`. + MongoDatabase encryptedDb = encryptedClient.getDatabase("db"); + MongoCollection encryptedCollection = encryptedDb + .getCollection(dataKeysNamespace.getCollectionName(), BsonDocument.class) + .withWriteConcern(WriteConcern.MAJORITY); + encryptedCollection.drop(); + // Insert `` into `db.keyvault` with majority write concern. + encryptedCollection.insertOne(bsonDocumentFromPath("key-doc.json")); + + // Use `encryptedClient` to drop and create the following collections: + Arrays.asList("csfle", "csfle2", "qe", "qe2", "no_schema", "no_schema2").forEach(c -> { + encryptedDb.getCollection(c).drop(); + }); + // create + encryptedDb.createCollection("csfle", new CreateCollectionOptions() + .validationOptions(new ValidationOptions() + .validator(new BsonDocument("$jsonSchema", bsonDocumentFromPath("schema-csfle.json"))))); + encryptedDb.createCollection("csfle2", new CreateCollectionOptions() + .validationOptions(new ValidationOptions() + .validator(new BsonDocument("$jsonSchema", bsonDocumentFromPath("schema-csfle2.json"))))); + + encryptedDb.createCollection("qe", + new CreateCollectionOptions().encryptedFields(bsonDocumentFromPath("schema-qe.json"))); + encryptedDb.createCollection("qe2", + new CreateCollectionOptions().encryptedFields(bsonDocumentFromPath("schema-qe2.json"))); + + encryptedDb.createCollection("no_schema"); + encryptedDb.createCollection("no_schema2"); + + // Insert documents with `encryptedClient`: + Consumer insert = (name) -> { + encryptedDb.getCollection(name).insertOne(new Document(name, name)); + }; + insert.accept("csfle"); + insert.accept("csfle2"); + insert.accept("qe"); + insert.accept("qe2"); + insert.accept("no_schema"); + insert.accept("no_schema2"); + + // Create an unencrypted MongoClient named `unencryptedClient`. + MongoClient unencryptedClient = createMongoClient(getMongoClientSettingsBuilder().build()); + MongoDatabase unencryptedDb = unencryptedClient.getDatabase("db"); + + Consumer assertDocument = (name) -> { + List pipeline = Arrays.asList( + BsonDocument.parse("{\"$project\" : {\"_id\" : 0, \"__safeContent__\" : 0}}") + ); + Document decryptedDoc = encryptedDb.getCollection(name) + .aggregate(pipeline).first(); + assertEquals(decryptedDoc, new Document(name, name)); + Document encryptedDoc = unencryptedDb.getCollection(name) + .aggregate(pipeline).first(); + assertNotNull(encryptedDoc); + assertEquals(Binary.class, encryptedDoc.get(name).getClass()); + }; + + assertDocument.accept("csfle"); + assertDocument.accept("csfle2"); + assertDocument.accept("qe"); + assertDocument.accept("qe2"); + + unencryptedClient.close(); + encryptedClient.close(); + + client = createMongoClient(getMongoClientSettingsBuilder() + .autoEncryptionSettings( + AutoEncryptionSettings.builder() + .keyVaultNamespace(dataKeysNamespace.getFullName()) + .kmsProviders(kmsProviders) + .build()) + .build()); + } + + @AfterEach + @SuppressWarnings("try") + public void cleanUp() { + //noinspection EmptyTryBlock + try (MongoClient ignored = this.client) { + // just using try-with-resources to ensure they all get closed, even in the case of exceptions + } + } + + @ParameterizedTest + @CsvSource({ + "csfle, no_schema", + "qe, no_schema", + "no_schema, csfle", + "no_schema, qe", + "csfle, csfle2", + "qe, qe2", + "no_schema, no_schema2"}) + void testCase1Through7(final String from, final String to) { + assumeTrue(serverVersionAtLeast(8, 1)); + String mql = ("[\n" + + " {\"$match\" : {\"\" : \"\"}},\n" + + " {\n" + + " \"$lookup\" : {\n" + + " \"from\" : \"\",\n" + + " \"as\" : \"matched\",\n" + + " \"pipeline\" : [ {\"$match\" : {\"\" : \"\"}}, {\"$project\" : {\"_id\" : 0, \"__safeContent__\" : 0}} ]\n" + + " }\n" + + " },\n" + + " {\"$project\" : {\"_id\" : 0, \"__safeContent__\" : 0}}\n" + + "]").replace("", from).replace("", to); + + List pipeline = BsonArray.parse(mql).stream() + .map(stage -> stage.asDocument()) + .collect(Collectors.toList()); + assertEquals( + Document.parse("{\"\" : \"\", \"matched\" : [ {\"\" : \"\"} ]}" + .replace("", from).replace("", to)), + client.getDatabase("db").getCollection(from).aggregate(pipeline).first()); + } + + @Test + void testCase8() { + assumeTrue(serverVersionAtLeast(8, 1)); + List pipeline = BsonArray.parse("[\n" + + " {\"$match\" : {\"csfle\" : \"qe\"}},\n" + + " {\n" + + " \"$lookup\" : {\n" + + " \"from\" : \"qe\",\n" + + " \"as\" : \"matched\",\n" + + " \"pipeline\" : [ {\"$match\" : {\"qe\" : \"qe\"}}, {\"$project\" : {\"_id\" : 0}} ]\n" + + " }\n" + + " },\n" + + " {\"$project\" : {\"_id\" : 0}}\n" + + "]").stream().map(stage -> stage.asDocument()).collect(Collectors.toList()); + + assertCause( + MongoCryptException.class, + "not supported", + () -> client.getDatabase("db").getCollection("csfle").aggregate(pipeline).first()); + } + + @Test + void testCase9() { + assumeTrue(serverVersionLessThan(8, 1)); + List pipeline = BsonArray.parse("[\n" + + " {\"$match\" : {\"csfle\" : \"csfle\"}},\n" + + " {\n" + + " \"$lookup\" : {\n" + + " \"from\" : \"no_schema\",\n" + + " \"as\" : \"matched\",\n" + + " \"pipeline\" : [ {\"$match\" : {\"no_schema\" : \"no_schema\"}}, {\"$project\" : {\"_id\" : 0}} ]\n" + + " }\n" + + " },\n" + + " {\"$project\" : {\"_id\" : 0}}\n" + + "]").stream().map(stage -> stage.asDocument()).collect(Collectors.toList()); + assertCause( + RuntimeException.class, + "Upgrade", + () -> client.getDatabase("db").getCollection("csfle").aggregate(pipeline).first()); + } + + public static BsonDocument bsonDocumentFromPath(final String path) { + try { + return getTestDocument(new File(ClientSideEncryption25LookupProseTests.class + .getResource("/client-side-encryption-data/lookup/" + path).toURI())); + } catch (Exception e) { + fail("Unable to load resource", e); + return null; + } + } +} diff --git a/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryptionExternalKeyVaultTest.java b/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryptionExternalKeyVaultTest.java index f4b6b336e96..da513bb2a9e 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryptionExternalKeyVaultTest.java +++ b/driver-sync/src/test/functional/com/mongodb/client/ClientSideEncryptionExternalKeyVaultTest.java @@ -20,9 +20,11 @@ import com.mongodb.ClientEncryptionSettings; import com.mongodb.MongoClientSettings; import com.mongodb.MongoCredential; +import com.mongodb.MongoNamespace; import com.mongodb.MongoSecurityException; import com.mongodb.WriteConcern; import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.test.CollectionHelper; import com.mongodb.client.vault.ClientEncryption; import com.mongodb.client.vault.ClientEncryptions; import org.bson.BsonBinary; @@ -57,6 +59,7 @@ public class ClientSideEncryptionExternalKeyVaultTest { private MongoClient client, clientEncrypted; private ClientEncryption clientEncryption; private final boolean withExternalKeyVault; + private static final MongoNamespace NAMESPACE = new MongoNamespace("db", ClientSideEncryptionExternalKeyVaultTest.class.getName()); public ClientSideEncryptionExternalKeyVaultTest(final boolean withExternalKeyVault) { this.withExternalKeyVault = withExternalKeyVault; @@ -84,7 +87,7 @@ public void setUp() throws IOException, URISyntaxException { + "UN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk"); localMasterkey.put("key", localMasterkeyBytes); kmsProviders.put("local", localMasterkey); - schemaMap.put("db.coll", bsonDocumentFromPath("external-schema.json")); + schemaMap.put(NAMESPACE.getFullName(), bsonDocumentFromPath("external-schema.json")); AutoEncryptionSettings.Builder autoEncryptionSettingsBuilder = AutoEncryptionSettings.builder() .keyVaultNamespace("keyvault.datakeys") @@ -123,8 +126,8 @@ public void setUp() throws IOException, URISyntaxException { public void testExternal() { boolean authExceptionThrown = false; MongoCollection coll = clientEncrypted - .getDatabase("db") - .getCollection("coll", BsonDocument.class); + .getDatabase(NAMESPACE.getDatabaseName()) + .getCollection(NAMESPACE.getCollectionName(), BsonDocument.class); try { coll.insertOne(new BsonDocument().append("encrypted", new BsonString("test"))); } catch (MongoSecurityException mse) { @@ -169,5 +172,7 @@ public void after() { // ignore } } + + CollectionHelper.drop(NAMESPACE); } } diff --git a/driver-sync/src/test/functional/com/mongodb/client/CrudProseTest.java b/driver-sync/src/test/functional/com/mongodb/client/CrudProseTest.java index 7138cdfe67e..016887c33b7 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/CrudProseTest.java +++ b/driver-sync/src/test/functional/com/mongodb/client/CrudProseTest.java @@ -34,6 +34,7 @@ import com.mongodb.client.model.bulk.ClientBulkWriteOptions; import com.mongodb.client.model.bulk.ClientBulkWriteResult; import com.mongodb.client.model.bulk.ClientNamespacedWriteModel; +import com.mongodb.client.test.CollectionHelper; import com.mongodb.event.CommandStartedEvent; import com.mongodb.internal.connection.TestCommandListener; import org.bson.BsonArray; @@ -47,6 +48,7 @@ import org.bson.RawBsonDocument; import org.bson.codecs.configuration.CodecRegistry; import org.bson.codecs.pojo.PojoCodecProvider; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -96,7 +98,7 @@ * CRUD Prose Tests. */ public class CrudProseTest { - private static final MongoNamespace NAMESPACE = new MongoNamespace("db", "coll"); + private static final MongoNamespace NAMESPACE = new MongoNamespace("db", CrudProseTest.class.getName()); @DisplayName("1. WriteConcernError.details exposes writeConcernError.errInfo") @Test @@ -367,7 +369,8 @@ private void testBulkWriteSplitsWhenExceedingMaxMessageSizeBytesDueToNsInfo( Document helloResponse = droppedDatabase(client).runCommand(new Document("hello", 1)); int maxBsonObjectSize = helloResponse.getInteger("maxBsonObjectSize"); int maxMessageSizeBytes = helloResponse.getInteger("maxMessageSizeBytes"); - int opsBytes = maxMessageSizeBytes - 1122; + // By the spec test, we have to subtract only 1122, however, we have different collection name. + int opsBytes = maxMessageSizeBytes - 1118 - NAMESPACE.getCollectionName().length(); int numModels = opsBytes / maxBsonObjectSize; int remainderBytes = opsBytes % maxBsonObjectSize; List models = new ArrayList<>(nCopies( @@ -613,4 +616,9 @@ private static ClientBulkWriteResult runInTransaction(final ClientSession sessio throw throwable; } } + + @AfterAll + public static void cleanUp() { + CollectionHelper.drop(NAMESPACE); + } } diff --git a/driver-sync/src/test/functional/com/mongodb/client/DatabaseTestCase.java b/driver-sync/src/test/functional/com/mongodb/client/DatabaseTestCase.java index f2f1c5382cd..70479c4670b 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/DatabaseTestCase.java +++ b/driver-sync/src/test/functional/com/mongodb/client/DatabaseTestCase.java @@ -16,11 +16,8 @@ package com.mongodb.client; -import com.mongodb.MongoNamespace; import com.mongodb.client.test.CollectionHelper; import com.mongodb.internal.connection.ServerHelper; -import org.bson.BsonDocument; -import org.bson.BsonDocumentWrapper; import org.bson.Document; import org.bson.codecs.DocumentCodec; import org.junit.jupiter.api.AfterEach; @@ -40,7 +37,7 @@ public class DatabaseTestCase { @BeforeEach public void setUp() { - client = getMongoClient(); + client = getMongoClient(); database = client.getDatabase(getDefaultDatabaseName()); collection = database.getCollection(getClass().getName()); collection.drop(); @@ -58,23 +55,7 @@ public void tearDown() { } } - protected String getDatabaseName() { - return database.getName(); - } - - protected String getCollectionName() { - return collection.getNamespace().getCollectionName(); - } - - protected MongoNamespace getNamespace() { - return collection.getNamespace(); - } - protected CollectionHelper getCollectionHelper() { - return new CollectionHelper<>(new DocumentCodec(), getNamespace()); - } - - protected BsonDocument wrap(final Document document) { - return new BsonDocumentWrapper<>(document, new DocumentCodec()); + return new CollectionHelper<>(new DocumentCodec(), collection.getNamespace()); } } diff --git a/driver-sync/src/test/functional/com/mongodb/client/Fixture.java b/driver-sync/src/test/functional/com/mongodb/client/Fixture.java index 3b0d45dca88..8114d62e41a 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/Fixture.java +++ b/driver-sync/src/test/functional/com/mongodb/client/Fixture.java @@ -16,6 +16,7 @@ package com.mongodb.client; +import com.mongodb.ClusterFixture; import com.mongodb.ConnectionString; import com.mongodb.MongoClientSettings; import com.mongodb.ServerAddress; @@ -24,7 +25,6 @@ import java.util.List; import java.util.concurrent.TimeUnit; -import static com.mongodb.ClusterFixture.getConnectionString; import static com.mongodb.ClusterFixture.getMultiMongosConnectionString; import static com.mongodb.ClusterFixture.getServerApi; import static com.mongodb.internal.connection.ClusterDescriptionHelper.getPrimaries; @@ -34,7 +34,6 @@ * Helper class for the acceptance tests. */ public final class Fixture { - private static final String DEFAULT_DATABASE_NAME = "JavaDriverTest"; private static final long MIN_HEARTBEAT_FREQUENCY_MS = 50L; private static MongoClient mongoClient; @@ -44,10 +43,23 @@ private Fixture() { } public static synchronized MongoClient getMongoClient() { - if (mongoClient == null) { - mongoClient = MongoClients.create(getMongoClientSettings()); - Runtime.getRuntime().addShutdownHook(new ShutdownHook()); + if (mongoClient != null) { + return mongoClient; } + MongoClientSettings mongoClientSettings = getMongoClientSettings(); + mongoClient = MongoClients.create(mongoClientSettings); + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + synchronized (Fixture.class) { + if (mongoClient == null) { + return; + } + if (defaultDatabase != null) { + defaultDatabase.drop(); + } + mongoClient.close(); + mongoClient = null; + } + })); return mongoClient; } @@ -59,34 +71,15 @@ public static synchronized MongoDatabase getDefaultDatabase() { } public static String getDefaultDatabaseName() { - return DEFAULT_DATABASE_NAME; - } - - static class ShutdownHook extends Thread { - @Override - public void run() { - synchronized (Fixture.class) { - if (mongoClient != null) { - if (defaultDatabase != null) { - defaultDatabase.drop(); - } - mongoClient.close(); - mongoClient = null; - } - } - } + return ClusterFixture.getDefaultDatabaseName(); } public static MongoClientSettings getMongoClientSettings() { return getMongoClientSettingsBuilder().build(); } - public static MongoClientSettings getMultiMongosMongoClientSettings() { - return getMultiMongosMongoClientSettingsBuilder().build(); - } - public static MongoClientSettings.Builder getMongoClientSettingsBuilder() { - return getMongoClientSettings(getConnectionString()); + return getMongoClientSettings(ClusterFixture.getConnectionString()); } public static MongoClientSettings.Builder getMultiMongosMongoClientSettingsBuilder() { diff --git a/driver-sync/src/test/functional/com/mongodb/client/model/search/AggregatesSearchFunctionalTest.java b/driver-sync/src/test/functional/com/mongodb/client/model/search/AggregatesSearchFunctionalTest.java new file mode 100644 index 00000000000..1513d5495bc --- /dev/null +++ b/driver-sync/src/test/functional/com/mongodb/client/model/search/AggregatesSearchFunctionalTest.java @@ -0,0 +1,262 @@ +/* + * Copyright 2008-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.client.model.search; + +import com.mongodb.client.AggregateIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.SearchIndexModel; +import com.mongodb.internal.connection.ServerHelper; +import org.bson.BsonArray; +import org.bson.BsonDocument; +import org.bson.Document; +import org.bson.codecs.DecoderContext; +import org.bson.conversions.Bson; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import static com.mongodb.ClusterFixture.isAtlasSearchTest; +import static com.mongodb.ClusterFixture.serverVersionAtLeast; +import static com.mongodb.MongoClientSettings.getDefaultCodecRegistry; +import static com.mongodb.client.Fixture.getMongoClient; +import static com.mongodb.client.Fixture.getPrimary; +import static com.mongodb.client.model.Aggregates.search; +import static com.mongodb.client.model.Aggregates.sort; +import static com.mongodb.client.model.Sorts.ascending; +import static com.mongodb.client.model.search.SearchOptions.searchOptions; +import static com.mongodb.client.model.search.SearchPath.fieldPath; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +public class AggregatesSearchFunctionalTest { + public static final String ATLAS_SEARCH_DATABASE = "javaVectorSearchTest"; + private static MongoClient client; + private static MongoDatabase database; + private static MongoCollection collection; + private static String searchIndexName; + + @BeforeAll + public static void beforeAll() { + assumeTrue(isAtlasSearchTest()); + assumeTrue(serverVersionAtLeast(8, 0)); + + client = getMongoClient(); + database = client.getDatabase(ATLAS_SEARCH_DATABASE); + String collectionName = AggregatesSearchFunctionalTest.class.getName(); + collection = database.getCollection(collectionName); + collection.drop(); + + // We insert documents first. The ensuing indexing guarantees that all + // data present at the time indexing commences will be indexed before + // the index enters the READY state. + insertDocuments("[\n" + + " { _id: 1 },\n" + + " { _id: 2, title: null },\n" + + " { _id: 3, title: 'test' },\n" + + " { _id: 4, title: ['test', 'xyz'] },\n" + + " { _id: 5, title: 'not test' },\n" + + " { _id: 6, description: 'desc 1' },\n" + + " { _id: 7, description: 'desc 8' },\n" + + " { _id: 8, summary: 'summary 1 one five' },\n" + + " { _id: 9, summary: 'summary 2 one two three four five' },\n" + + "]"); + + searchIndexName = "not_default"; + // Index creation can take disproportionately long, so we create it once + // for all tests. + // We set dynamic to true to index unspecified fields. Different kinds + // of fields are needed for different tests. + collection.createSearchIndexes(Arrays.asList(new SearchIndexModel(searchIndexName, Document.parse( + "{\n" + + " \"mappings\": {\n" + + " \"dynamic\": true,\n" + + " \"fields\": {\n" + + " \"title\": {\n" + + " \"type\": \"token\"\n" + + " },\n" + + " \"description\": {\n" + + " \"analyzer\": \"lucene.keyword\"," + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}")))); + waitForIndex(collection, searchIndexName); + } + + @AfterAll + public static void afterAll() { + if (collection != null) { + collection.drop(); + } + try { + ServerHelper.checkPool(getPrimary()); + } catch (InterruptedException e) { + // ignore + } + } + + @Test + public void testExists() { + List pipeline = Arrays.asList( + search(SearchOperator.exists(fieldPath("title")), + searchOptions().index(searchIndexName))); + assertResults(pipeline, "[\n" + + " { _id: 2, title: null },\n" + + " { _id: 3, title: 'test' },\n" + + " { _id: 4, title: ['test', 'xyz'] },\n" + + " { _id: 5, title: 'not test' },\n" + + "]"); + } + + @Test + public void testEquals() { + List pipeline1 = Arrays.asList( + search(SearchOperator.equals(fieldPath("title"), "test"), + searchOptions().index(searchIndexName))); + assertResults(pipeline1, "[\n" + + " { _id: 3, title: 'test' }\n" + + " { _id: 4, title: ['test', 'xyz'] }\n" + + "]"); + + // equals null does not match non-existent fields + List pipeline2 = Arrays.asList( + search(SearchOperator.equalsNull(fieldPath("title")), + searchOptions().index(searchIndexName))); + assertResults(pipeline2, "[\n" + + " { _id: 2, title: null }\n" + + "]"); + } + + @Test + public void testMoreLikeThis() { + List pipeline = Arrays.asList( + search(SearchOperator.moreLikeThis(Document.parse("{ summary: 'summary' }").toBsonDocument()), + searchOptions().index(searchIndexName))); + assertResults(pipeline, "[\n" + + " { _id: 8, summary: 'summary 1 one five' },\n" + + " { _id: 9, summary: 'summary 2 one two three four five' },\n" + + "]"); + } + + @Test + public void testRegex() { + List pipeline = Arrays.asList( + search(SearchOperator.regex(fieldPath("description"), "des[c]+ <1-4>"), + searchOptions().index(searchIndexName))); + assertResults(pipeline, "[\n" + + " { _id: 6, description: 'desc 1' },\n" + + "]"); + } + + @Test + public void testWildcard() { + List pipeline = Arrays.asList( + search(SearchOperator.wildcard(fieldPath("description"), "desc*"), + searchOptions().index(searchIndexName))); + assertResults(pipeline, "[\n" + + " { _id: 6, description: 'desc 1' },\n" + + " { _id: 7, description: 'desc 8' },\n" + + "]"); + } + + @Test + public void testPhrase() { + List pipeline = Arrays.asList( + search(SearchOperator.phrase(fieldPath("summary"), "one five").slop(2), + searchOptions().index(searchIndexName))); + assertResults(pipeline, "[\n" + + " { _id: 8, summary: 'summary 1 one five' },\n" + + "]"); + } + + @Test + public void testQueryString() { + List pipeline = Arrays.asList( + search(SearchOperator.queryString(fieldPath("summary"), "summary: one AND summary: three"), + searchOptions().index(searchIndexName))); + assertResults(pipeline, "[\n" + + " { _id: 9, summary: 'summary 2 one two three four five' },\n" + + "]"); + } + + private static void insertDocuments(final String s) { + List documents = BsonArray.parse(s).stream() + .map(v -> new Document(v.asDocument())) + .collect(Collectors.toList()); + collection.insertMany(documents); + } + + private static void assertResults(final List pipeline, final String expectedResultsAsString) { + ArrayList pipeline2 = new ArrayList<>(pipeline); + pipeline2.add(sort(ascending("_id"))); + + List expectedResults = parseToList(expectedResultsAsString); + List actualResults = aggregate(pipeline2); + assertEquals(expectedResults, actualResults); + } + + private static List aggregate(final List stages) { + AggregateIterable result = collection.aggregate(stages); + List results = new ArrayList<>(); + result.forEach(r -> results.add(r.toBsonDocument())); + return results; + } + + public static List parseToList(final String s) { + return BsonArray.parse(s).stream().map(v -> toBsonDocument(v.asDocument())).collect(Collectors.toList()); + } + + public static BsonDocument toBsonDocument(final BsonDocument bsonDocument) { + return getDefaultCodecRegistry().get(BsonDocument.class).decode(bsonDocument.asBsonReader(), DecoderContext.builder().build()); + } + + public static boolean waitForIndex(final MongoCollection collection, final String indexName) { + long startTime = System.nanoTime(); + long timeoutNanos = TimeUnit.SECONDS.toNanos(60); + while (System.nanoTime() - startTime < timeoutNanos) { + Document indexRecord = StreamSupport.stream(collection.listSearchIndexes().spliterator(), false) + .filter(index -> indexName.equals(index.getString("name"))) + .findAny().orElse(null); + if (indexRecord != null) { + if ("FAILED".equals(indexRecord.getString("status"))) { + throw new RuntimeException("Search index has failed status."); + } + if (indexRecord.getBoolean("queryable")) { + return true; + } + } + try { + Thread.sleep(100); // busy-wait, avoid in production + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } + } + return false; + } + +} diff --git a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedCrudHelper.java b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedCrudHelper.java index 5c925d97272..735c35dc9ed 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedCrudHelper.java +++ b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedCrudHelper.java @@ -134,6 +134,7 @@ import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static java.util.Objects.requireNonNull; +import static java.util.Optional.ofNullable; import static java.util.stream.Collectors.toList; @SuppressWarnings("deprecation") @@ -1607,7 +1608,7 @@ private static SearchIndexModel toIndexSearchModel(final BsonValue bsonValue) { BsonDocument model = bsonValue.asDocument(); BsonDocument definition = model.getDocument("definition"); SearchIndexType type = model.containsKey("type") ? getSearchIndexType(model.getString("type")) : null; - String name = Optional.ofNullable(model.getString("name", null)) + String name = ofNullable(model.getString("name", null)) .map(BsonString::getValue). orElse(null); return new SearchIndexModel(name, definition, type); @@ -1616,7 +1617,7 @@ private static SearchIndexModel toIndexSearchModel(final BsonValue bsonValue) { OperationResult executeListSearchIndexes(final BsonDocument operation) { MongoCollection collection = getMongoCollection(operation); - Optional arguments = Optional.ofNullable(operation.getOrDefault("arguments", null)).map(BsonValue::asDocument); + Optional arguments = ofNullable(operation.getOrDefault("arguments", null)).map(BsonValue::asDocument); if (arguments.isPresent()) { ListSearchIndexesIterable iterable = createListSearchIndexesIterable(collection, arguments.get()); @@ -1634,7 +1635,7 @@ OperationResult executeListSearchIndexes(final BsonDocument operation) { private ListSearchIndexesIterable createListSearchIndexesIterable(final MongoCollection collection, final BsonDocument arguments) { - Optional name = Optional.ofNullable(arguments.getOrDefault("name", null)) + Optional name = ofNullable(arguments.getOrDefault("name", null)) .map(BsonValue::asString).map(BsonString::getValue); ListSearchIndexesIterable iterable = collection.listSearchIndexes(BsonDocument.class); @@ -1930,6 +1931,9 @@ private static ClientReplaceOneOptions getClientReplaceOneOptions(final BsonDocu case "upsert": options.upsert(argument.asBoolean().getValue()); break; + case "sort": + options.sort(argument.asDocument()); + break; default: throw new UnsupportedOperationException(format("Unsupported argument: key=%s, argument=%s", key, argument)); } @@ -1938,7 +1942,16 @@ private static ClientReplaceOneOptions getClientReplaceOneOptions(final BsonDocu } private static ClientUpdateOneOptions getClientUpdateOneOptions(final BsonDocument arguments) { - return fillAbstractClientUpdateOptions(new ConcreteClientUpdateOneOptions(), arguments); + ConcreteClientUpdateOneOptions options = new ConcreteClientUpdateOneOptions(); + + if (arguments.containsKey("sort")) { + BsonDocument sort = arguments + .remove("sort") + .asDocument(); + options.sort(sort); + } + + return fillAbstractClientUpdateOptions(options, arguments); } private static ClientUpdateManyOptions getClientUpdateManyOptions(final BsonDocument arguments) { diff --git a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedSyncTest.java b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedSyncTest.java index 37db7cfe907..afcc8e4f1a3 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedSyncTest.java +++ b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedSyncTest.java @@ -25,6 +25,12 @@ import com.mongodb.client.gridfs.GridFSBuckets; import com.mongodb.client.internal.ClientEncryptionImpl; import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.lang.NonNull; +import org.junit.jupiter.params.provider.Arguments; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.Collection; public abstract class UnifiedSyncTest extends UnifiedTest { protected UnifiedSyncTest() { @@ -44,4 +50,9 @@ protected GridFSBucket createGridFSBucket(final MongoDatabase database) { protected ClientEncryption createClientEncryption(final MongoClient keyVaultClient, final ClientEncryptionSettings clientEncryptionSettings) { return new ClientEncryptionImpl(keyVaultClient, clientEncryptionSettings); } + + @NonNull + protected static Collection getTestData(final String directory) throws URISyntaxException, IOException { + return getTestData(directory, false); + } } diff --git a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTest.java b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTest.java index 7ee16484df1..a437084ac1d 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTest.java +++ b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTest.java @@ -62,9 +62,11 @@ import java.io.File; import java.io.IOException; import java.net.URISyntaxException; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -81,6 +83,8 @@ import static com.mongodb.client.test.CollectionHelper.getCurrentClusterTime; import static com.mongodb.client.test.CollectionHelper.killAllSessions; import static com.mongodb.client.unified.RunOnRequirementsMatcher.runOnRequirementsMet; +import static com.mongodb.client.unified.UnifiedTestModifications.Modifier; +import static com.mongodb.client.unified.UnifiedTestModifications.applyCustomizations; import static com.mongodb.client.unified.UnifiedTestModifications.testDef; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.toList; @@ -91,6 +95,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.abort; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; import static util.JsonPoweredTestHelper.getTestDocument; @@ -101,6 +106,10 @@ public abstract class UnifiedTest { private static final Set PRESTART_POOL_ASYNC_WORK_MANAGER_FILE_DESCRIPTIONS = Collections.singleton( "wait queue timeout errors include details about checked out connections"); + public static final int RETRY_ATTEMPTS = 3; + public static final int FORCE_FLAKY_ATTEMPTS = 10; + private static final Set ATTEMPTED_TESTS_TO_HENCEFORTH_IGNORE = new HashSet<>(); + @Nullable private String fileDescription; private String schemaVersion; @@ -156,32 +165,49 @@ public Entities getEntities() { } @NonNull - protected static Collection getTestData(final String directory) throws URISyntaxException, IOException { + protected static Collection getTestData(final String directory, final boolean isReactive) + throws URISyntaxException, IOException { List data = new ArrayList<>(); for (File file : getTestFiles("/" + directory + "/")) { BsonDocument fileDocument = getTestDocument(file); - for (BsonValue cur : fileDocument.getArray("tests")) { - data.add(UnifiedTest.createTestData(directory, fileDocument, cur.asDocument())); + + final BsonDocument testDocument = cur.asDocument(); + String testDescription = testDocument.getString("description").getValue(); + String fileDescription = fileDocument.getString("description").getValue(); + TestDef testDef = testDef(directory, fileDescription, testDescription, isReactive); + applyCustomizations(testDef); + + boolean forceFlaky = testDef.wasAssignedModifier(Modifier.FORCE_FLAKY); + boolean retry = forceFlaky || testDef.wasAssignedModifier(Modifier.RETRY); + + int attempts; + if (retry) { + attempts = forceFlaky ? FORCE_FLAKY_ATTEMPTS : RETRY_ATTEMPTS; + } else { + attempts = 1; + } + + for (int attempt = 1; attempt <= attempts; attempt++) { + String testName = MessageFormat.format("{0}: {1}", fileDescription, testDescription); + data.add(Arguments.of( + testName, + fileDescription, + testDescription, + directory, + attempt, + attempts, + fileDocument.getString("schemaVersion").getValue(), + fileDocument.getArray("runOnRequirements", null), + fileDocument.getArray("createEntities", new BsonArray()), + fileDocument.getArray("initialData", new BsonArray()), + testDocument.clone())); + } } } return data; } - @NonNull - private static Arguments createTestData( - final String directory, final BsonDocument fileDocument, final BsonDocument testDocument) { - return Arguments.of( - fileDocument.getString("description").getValue(), - testDocument.getString("description").getValue(), - directory, - fileDocument.getString("schemaVersion").getValue(), - fileDocument.getArray("runOnRequirements", null), - fileDocument.getArray("createEntities", new BsonArray()), - fileDocument.getArray("initialData", new BsonArray()), - testDocument); - } - protected BsonDocument getDefinition() { return definition; } @@ -194,9 +220,12 @@ protected BsonDocument getDefinition() { @BeforeEach public void setUp( + final String testName, @Nullable final String fileDescription, @Nullable final String testDescription, @Nullable final String directoryName, + final int attemptNumber, + final int totalAttempts, final String schemaVersion, @Nullable final BsonArray runOnRequirements, final BsonArray entitiesArray, @@ -218,9 +247,9 @@ public void setUp( ignoreExtraEvents = false; if (directoryName != null && fileDescription != null && testDescription != null) { testDef = testDef(directoryName, fileDescription, testDescription, isReactive()); - UnifiedTestModifications.doSkips(testDef); + applyCustomizations(testDef); - boolean skip = testDef.wasAssignedModifier(UnifiedTestModifications.Modifier.SKIP); + boolean skip = testDef.wasAssignedModifier(Modifier.SKIP); assumeFalse(skip, "Skipping test"); } skips(fileDescription, testDescription); @@ -295,8 +324,9 @@ protected void postCleanUp(final TestDef testDef) { } /** - * This method is called once per {@link #setUp(String, String, String, String, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonDocument)}, - * unless {@link #setUp(String, String, String, String, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonDocument)} fails unexpectedly. + * This method is called once per + * {@link #setUp(String, String, String, String, int, int, String, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonDocument)}, unless + * {@link #setUp(String, String, String, String, int, int, String, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonArray, org.bson.BsonDocument)} fails unexpectedly. */ protected void skips(final String fileDescription, final String testDescription) { } @@ -305,40 +335,72 @@ protected boolean isReactive() { return false; } - @ParameterizedTest(name = "{0}: {1}") + @ParameterizedTest(name = "{0}") @MethodSource("data") public void shouldPassAllOutcomes( + final String testName, @Nullable final String fileDescription, @Nullable final String testDescription, @Nullable final String directoryName, + final int attemptNumber, + final int totalAttempts, final String schemaVersion, @Nullable final BsonArray runOnRequirements, final BsonArray entitiesArray, final BsonArray initialData, final BsonDocument definition) { - BsonArray operations = definition.getArray("operations"); - for (int i = 0; i < operations.size(); i++) { - BsonValue cur = operations.get(i); - assertOperation(rootContext, cur.asDocument(), i); + boolean forceFlaky = testDef.wasAssignedModifier(Modifier.FORCE_FLAKY); + if (!forceFlaky) { + boolean ignoreThisTest = ATTEMPTED_TESTS_TO_HENCEFORTH_IGNORE.contains(testName); + assumeFalse(ignoreThisTest, "Skipping a retryable test that already succeeded"); + // The attempt is what counts, since a test may fail with + // something like "ignored", and would not be retried. + // Only failures should trigger another attempt. + ATTEMPTED_TESTS_TO_HENCEFORTH_IGNORE.add(testName); } + try { + BsonArray operations = definition.getArray("operations"); + for (int i = 0; i < operations.size(); i++) { + BsonValue cur = operations.get(i); + assertOperation(rootContext, cur.asDocument(), i); + } - if (definition.containsKey("outcome")) { - assertOutcome(rootContext); - } + if (definition.containsKey("outcome")) { + assertOutcome(rootContext); + } - if (definition.containsKey("expectEvents")) { - compareEvents(rootContext, definition); - } + if (definition.containsKey("expectEvents")) { + compareEvents(rootContext, definition); + } - if (definition.containsKey("expectLogMessages")) { - ArrayList tweaks = new ArrayList<>(singletonList( - // `LogMessage.Entry.Name.OPERATION` is not supported, therefore we skip matching its value - LogMatcher.Tweak.skip(LogMessage.Entry.Name.OPERATION))); - if (getMongoClientSettings().getClusterSettings() - .getHosts().stream().anyMatch(serverAddress -> serverAddress instanceof UnixServerAddress)) { - tweaks.add(LogMatcher.Tweak.skip(LogMessage.Entry.Name.SERVER_PORT)); + if (definition.containsKey("expectLogMessages")) { + ArrayList tweaks = new ArrayList<>(singletonList( + // `LogMessage.Entry.Name.OPERATION` is not supported, therefore we skip matching its value + LogMatcher.Tweak.skip(LogMessage.Entry.Name.OPERATION))); + if (getMongoClientSettings().getClusterSettings() + .getHosts().stream().anyMatch(serverAddress -> serverAddress instanceof UnixServerAddress)) { + tweaks.add(LogMatcher.Tweak.skip(LogMessage.Entry.Name.SERVER_PORT)); + } + compareLogMessages(rootContext, definition, tweaks); + } + } catch (TestAbortedException e) { + // if a test is ignored, we do not retry + throw e; + } catch (Throwable e) { + if (forceFlaky) { + throw e; + } + if (testDef != null && !testDef.matchesThrowable(e)) { + // if the throwable is not matched, test definitions were not intended to apply; rethrow it + throw e; } - compareLogMessages(rootContext, definition, tweaks); + boolean isLastAttempt = attemptNumber == totalAttempts; + if (isLastAttempt) { + throw e; + } + + ATTEMPTED_TESTS_TO_HENCEFORTH_IGNORE.remove(testName); + abort("Ignoring failure and retrying attempt " + attemptNumber); } } diff --git a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestFailureValidator.java b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestFailureValidator.java index 88458f8af8e..0472ef8e6ce 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestFailureValidator.java +++ b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestFailureValidator.java @@ -36,9 +36,12 @@ final class UnifiedTestFailureValidator extends UnifiedSyncTest { @Override @BeforeEach public void setUp( + final String testName, @Nullable final String fileDescription, @Nullable final String testDescription, final String directoryName, + final int attemptNumber, + final int totalAttempts, final String schemaVersion, @Nullable final BsonArray runOnRequirements, final BsonArray entitiesArray, @@ -46,9 +49,12 @@ public void setUp( final BsonDocument definition) { try { super.setUp( + testName, fileDescription, testDescription, directoryName, + attemptNumber, + totalAttempts, schemaVersion, runOnRequirements, entitiesArray, @@ -63,9 +69,12 @@ public void setUp( @ParameterizedTest @MethodSource("data") public void shouldPassAllOutcomes( + final String testName, @Nullable final String fileDescription, @Nullable final String testDescription, @Nullable final String directoryName, + final int attemptNumber, + final int totalAttempts, final String schemaVersion, @Nullable final BsonArray runOnRequirements, final BsonArray entitiesArray, @@ -74,9 +83,12 @@ public void shouldPassAllOutcomes( if (exception == null) { try { super.shouldPassAllOutcomes( + testName, fileDescription, testDescription, directoryName, + attemptNumber, + totalAttempts, schemaVersion, runOnRequirements, entitiesArray, diff --git a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestModifications.java b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestModifications.java index 5184fd699be..df9706b8dd7 100644 --- a/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestModifications.java +++ b/driver-sync/src/test/functional/com/mongodb/client/unified/UnifiedTestModifications.java @@ -16,11 +16,12 @@ package com.mongodb.client.unified; -import com.mongodb.assertions.Assertions; +import org.opentest4j.AssertionFailedError; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.function.Function; import java.util.function.Supplier; import static com.mongodb.ClusterFixture.isDataLakeTest; @@ -29,14 +30,17 @@ import static com.mongodb.ClusterFixture.isSharded; import static com.mongodb.ClusterFixture.serverVersionLessThan; import static com.mongodb.assertions.Assertions.assertNotNull; +import static com.mongodb.assertions.Assertions.assertTrue; import static com.mongodb.client.unified.UnifiedTestModifications.Modifier.IGNORE_EXTRA_EVENTS; +import static com.mongodb.client.unified.UnifiedTestModifications.Modifier.RETRY; import static com.mongodb.client.unified.UnifiedTestModifications.Modifier.SKIP; import static com.mongodb.client.unified.UnifiedTestModifications.Modifier.SLEEP_AFTER_CURSOR_CLOSE; import static com.mongodb.client.unified.UnifiedTestModifications.Modifier.SLEEP_AFTER_CURSOR_OPEN; import static com.mongodb.client.unified.UnifiedTestModifications.Modifier.WAIT_FOR_BATCH_CURSOR_CREATION; +import static java.lang.String.format; public final class UnifiedTestModifications { - public static void doSkips(final TestDef def) { + public static void applyCustomizations(final TestDef def) { // atlas-data-lake @@ -45,7 +49,7 @@ public static void doSkips(final TestDef def) { .directory("atlas-data-lake-testing"); // change-streams - def.skipNoncompliantReactive("error required from change stream initialization") // TODO reason? + def.skipNoncompliantReactive("error required from change stream initialization") // TODO-JAVA-5711 reason? .test("change-streams", "change-streams", "Test with document comment - pre 4.4"); def.skipNoncompliantReactive("event sensitive tests. We can't guarantee the amount of GetMore commands sent in the reactive driver") .test("change-streams", "change-streams", "Test that comment is set on getMore") @@ -61,17 +65,17 @@ public static void doSkips(final TestDef def) { // client-side-operation-timeout (CSOT) - // TODO + // TODO-JAVA-5712 // collection-management - def.skipNoncompliant("") // TODO reason? + def.skipNoncompliant("") // TODO-JAVA-5711 reason? .test("collection-management", "modifyCollection-pre_and_post_images", "modifyCollection to changeStreamPreAndPostImages enabled"); // command-logging-and-monitoring - def.skipNoncompliant("TODO") - .when(() -> !def.isReactive() && isServerlessTest()) // TODO why reactive check? + def.skipNoncompliant("") // TODO-JAVA-5711 + .when(() -> !def.isReactive() && isServerlessTest()) // TODO-JAVA-5711 why reactive check? .directory("command-logging") .directory("command-monitoring"); @@ -84,7 +88,7 @@ public static void doSkips(final TestDef def) { // connection-monitoring-and-pooling - // TODO reason, jira + // TODO-JAVA-5711 reason, jira // added as part of https://jira.mongodb.org/browse/JAVA-4976 , but unknown Jira to complete // The implementation of the functionality related to clearing the connection pool before closing the connection // will be carried out once the specification is finalized and ready. @@ -259,6 +263,7 @@ public static final class TestDef { private final boolean reactive; private final List modifiers = new ArrayList<>(); + private Function matchesThrowable; private TestDef(final String dir, final String file, final String test, final boolean reactive) { this.dir = assertNotNull(dir); @@ -274,7 +279,7 @@ private TestDef(final String dir, final String file, final String test, final bo * @param ticket reason for skipping the test; must start with a Jira URL */ public TestApplicator skipJira(final String ticket) { - Assertions.assertTrue(ticket.startsWith("https://jira.mongodb.org/browse/JAVA-")); + assertTrue(ticket.startsWith("https://jira.mongodb.org/browse/JAVA-")); return new TestApplicator(this, ticket, SKIP); } @@ -322,6 +327,21 @@ public TestApplicator skipUnknownReason(final String reason) { return new TestApplicator(this, reason, SKIP); } + /** + * The test will be retried, for the reason provided + */ + public TestApplicator retry(final String reason) { + return new TestApplicator(this, reason, RETRY); + } + + /** + * The reactive test will be retried, for the reason provided + */ + public TestApplicator retryReactive(final String reason) { + return new TestApplicator(this, reason, RETRY) + .when(this::isReactive); + } + public TestApplicator modify(final Modifier... modifiers) { return new TestApplicator(this, null, modifiers); } @@ -333,6 +353,13 @@ public boolean isReactive() { public boolean wasAssignedModifier(final Modifier modifier) { return this.modifiers.contains(modifier); } + + public boolean matchesThrowable(final Throwable e) { + if (matchesThrowable != null) { + return matchesThrowable.apply(e); + } + return false; + } } /** @@ -340,17 +367,19 @@ public boolean wasAssignedModifier(final Modifier modifier) { */ public static final class TestApplicator { private final TestDef testDef; - private final List modifiersToApply; private Supplier precondition; private boolean matchWasPerformed = false; + private final List modifiersToApply; + private Function matchesThrowable; + private TestApplicator( final TestDef testDef, final String reason, final Modifier... modifiersToApply) { this.testDef = testDef; this.modifiersToApply = Arrays.asList(modifiersToApply); - if (this.modifiersToApply.contains(SKIP)) { + if (this.modifiersToApply.contains(SKIP) || this.modifiersToApply.contains(RETRY)) { assertNotNull(reason); } } @@ -362,6 +391,7 @@ private TestApplicator onMatch(final boolean match) { } if (match) { this.testDef.modifiers.addAll(this.modifiersToApply); + this.testDef.matchesThrowable = this.matchesThrowable; } return this; } @@ -453,6 +483,26 @@ public TestApplicator when(final Supplier precondition) { this.precondition = precondition; return this; } + + /** + * The modification, if it is a RETRY, will only be applied when the + * failure message contains the provided message fragment. If an + * {@code AssertionFailedError} occurs, and has a cause, the cause's + * message will be checked. Otherwise, the throwable will be checked. + */ + public TestApplicator whenFailureContains(final String messageFragment) { + assertTrue(this.modifiersToApply.contains(RETRY), + format("Modifier %s was not specified before calling whenFailureContains", RETRY)); + this.matchesThrowable = (final Throwable e) -> { + // inspect the cause for failed assertions with a cause + if (e instanceof AssertionFailedError && e.getCause() != null) { + return e.getCause().getMessage().contains(messageFragment); + } else { + return e.getMessage().contains(messageFragment); + } + }; + return this; + } } public enum Modifier { @@ -478,5 +528,17 @@ public enum Modifier { * Skip the test. */ SKIP, + /** + * Ignore results and retry the test on failure. Will not repeat the + * test if the test succeeds. Multiple copies of the test are used to + * facilitate retries. + */ + RETRY, + /** + * The test will be retried multiple times, without the results being + * ignored. This is a helper that can be used, in patches, to check + * if certain tests are (still) flaky. + */ + FORCE_FLAKY, } } diff --git a/driver-sync/src/test/functional/com/mongodb/internal/connection/OidcAuthenticationProseTests.java b/driver-sync/src/test/functional/com/mongodb/internal/connection/OidcAuthenticationProseTests.java index 2d82ecf3d92..b6a23a576ce 100644 --- a/driver-sync/src/test/functional/com/mongodb/internal/connection/OidcAuthenticationProseTests.java +++ b/driver-sync/src/test/functional/com/mongodb/internal/connection/OidcAuthenticationProseTests.java @@ -39,8 +39,6 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.function.Executable; -import org.opentest4j.AssertionFailedError; import java.io.IOException; import java.lang.reflect.Field; @@ -70,6 +68,7 @@ import static com.mongodb.MongoCredential.OidcCallbackResult; import static com.mongodb.MongoCredential.TOKEN_RESOURCE_KEY; import static com.mongodb.assertions.Assertions.assertNotNull; +import static com.mongodb.testing.MongoAssertions.assertCause; import static java.lang.System.getenv; import static java.util.Arrays.asList; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -826,6 +825,7 @@ private MongoClientSettings createSettings( String cleanedConnectionString = callback == null ? connectionString : connectionString .replace("ENVIRONMENT:azure,", "") .replace("ENVIRONMENT:gcp,", "") + .replace("&authMechanismProperties=ENVIRONMENT:k8s", "") .replace("ENVIRONMENT:test,", ""); return createSettings(cleanedConnectionString, callback, commandListener, OIDC_CALLBACK_KEY); } @@ -922,20 +922,6 @@ private void performFind(final MongoClient mongoClient) { .first(); } - private static void assertCause( - final Class expectedCause, final String expectedMessageFragment, final Executable e) { - Throwable cause = assertThrows(Throwable.class, e); - while (cause.getCause() != null) { - cause = cause.getCause(); - } - if (!cause.getMessage().contains(expectedMessageFragment)) { - throw new AssertionFailedError("Unexpected message: " + cause.getMessage(), cause); - } - if (!expectedCause.isInstance(cause)) { - throw new AssertionFailedError("Unexpected cause: " + cause.getClass(), assertThrows(Throwable.class, e)); - } - } - protected void delayNextFind() { try (MongoClient client = createMongoClient(Fixture.getMongoClientSettings())) { @@ -1042,6 +1028,8 @@ private OidcCallbackResult callback(final OidcCallbackContext context) { c = OidcAuthenticator.getAzureCallback(credential); } else if (oidcEnv.contains("gcp")) { c = OidcAuthenticator.getGcpCallback(credential); + } else if (oidcEnv.contains("k8s")) { + c = OidcAuthenticator.getK8sCallback(); } else { c = getProseTestCallback(); } diff --git a/driver-workload-executor/src/main/com/mongodb/workload/WorkloadExecutor.java b/driver-workload-executor/src/main/com/mongodb/workload/WorkloadExecutor.java index 0e995cb34fd..7aba736aebc 100644 --- a/driver-workload-executor/src/main/com/mongodb/workload/WorkloadExecutor.java +++ b/driver-workload-executor/src/main/com/mongodb/workload/WorkloadExecutor.java @@ -98,18 +98,24 @@ protected boolean terminateLoop() { BsonArray createEntities = fileDocument.getArray("createEntities", new BsonArray()); BsonArray initialData = fileDocument.getArray("initialData", new BsonArray()); unifiedTest.setUp( + "", null, null, null, + 1, + 1, schemaVersion, runOnRequirements, createEntities, initialData, testDocument); unifiedTest.shouldPassAllOutcomes( + "", null, null, null, + 1, + 1, schemaVersion, runOnRequirements, createEntities, diff --git a/gradle.properties b/gradle.properties index 12f1750c442..d3514e32f68 100644 --- a/gradle.properties +++ b/gradle.properties @@ -16,6 +16,7 @@ org.gradle.daemon=true org.gradle.jvmargs=-Duser.country=US -Duser.language=en +## NOTE: This property is also used to generate scala compile versions in BOM. scalaVersions=2.11.12,2.12.20,2.13.15 defaultScalaVersions=2.13.15 runOnceTasks=clean,release diff --git a/gradle/publish.gradle b/gradle/publish.gradle index 9add25f9261..fa56f09f138 100644 --- a/gradle/publish.gradle +++ b/gradle/publish.gradle @@ -76,9 +76,10 @@ ext { def projectNamesNotToBePublished = ["driver-benchmarks", "driver-lambda", "driver-workload-executor", "graalvm-native-image-app", "util", "spock", "taglets"] def publishedProjects = subprojects.findAll { !projectNamesNotToBePublished.contains(it.name) } -def scalaProjects = publishedProjects.findAll { it.name.contains('scala') } -def javaProjects = publishedProjects - scalaProjects -def projectsWithManifest = publishedProjects.findAll {it.name != 'driver-legacy' } +def bomProjects = project(":bom") +def scalaProjects = publishedProjects.findAll { it.name.contains('scala') } - bomProjects +def javaProjects = publishedProjects - scalaProjects - bomProjects +def projectsWithManifest = publishedProjects.findAll {it.name != 'driver-legacy' } - bomProjects configure(javaProjects) { project -> apply plugin: 'maven-publish' @@ -169,3 +170,98 @@ configure(projectsWithManifest) { project -> jar configureJarManifestAttributes(project) } } + +configure(bomProjects) { project -> + apply plugin: 'maven-publish' + apply plugin: 'signing' + apply plugin: 'java-platform' + + // Get the Scala versions from the project property. Only major.minor versions. + def scalaVersions = project.findProperty("scalaVersions")?.split(",") + ?.collect { it.split("\\.")[0] + "." + it.split("\\.")[1] } + + assert scalaVersions != null && !scalaVersions.isEmpty() : "Scala versions must be provided as a comma-separated list" + + " in the 'scalaVersions' project property" + + publishing { + publications { + mavenJava(MavenPublication) { + artifactId = "bom".equals(project.archivesBaseName) ? "mongodb-driver-bom" : project.archivesBaseName + from components.javaPlatform + + // Modify the generated POM to add multiple compile versions of driver-scala or bson-scala. + // Scala multi-version support generates only one for BOM. + pom.withXml { + def pomXml = asNode() + + def dependencyManagementNode = pomXml.get("dependencyManagement")?.getAt(0) + assert dependencyManagementNode : " node not found in the generated BOM POM" + + def dependenciesNode = dependencyManagementNode.get("dependencies")?.getAt(0) + assert dependenciesNode : " node not found inside " + + // Check if scala dependencies are present in the BOM. + def existingScalaDeps = dependenciesNode.children().findAll { + it.artifactId.text().contains("scala") + } + + existingScalaDeps.each { existingDep -> + String groupId = existingDep.groupId.text() + String originalArtifactId = existingDep.artifactId.text() + String artifactVersion = existingDep.version.text() + + // Add multiple versions with Scala suffixes for each Scala-related dependency. + scalaVersions.each { scalaVersion -> + // Remove existing Scala version suffix (_2.12, _2.13, etc.) + String baseArtifactId = originalArtifactId.replaceAll("_\\d+\\.\\d+(\\.\\d+)?\$", "") + String newArtifactId = "${baseArtifactId}_${scalaVersion}" + + // Skip if Scala dependency with this scalaVersion already exists in BOM. + if(newArtifactId != originalArtifactId) { + def dependencyNode = dependenciesNode.appendNode("dependency") + dependencyNode.appendNode("groupId", groupId) + dependencyNode.appendNode("artifactId", newArtifactId) + dependencyNode.appendNode("version", artifactVersion) + } + } + } + } + } + } + + repositories configureMavenRepositories(project) + } + + afterEvaluate { + publishing.publications.mavenJava.pom configurePom(project) + signing { + useInMemoryPgpKeys(findProperty("signingKey"), findProperty("signingPassword")) + sign publishing.publications.mavenJava + } + } + + tasks.withType(GenerateModuleMetadata) { + enabled = false + } + + tasks.withType(GenerateMavenPom).configureEach { + doLast { + def xml = file(destination).text + def root = new groovy.xml.XmlSlurper().parseText(xml) + + def dependencies = root.dependencyManagement.dependencies.children() + assert dependencies.children().size() > 1 : "BOM must contain more then one element:\n$destination" + + dependencies.each { dependency -> + def groupId = dependency.groupId.text() + assert groupId.startsWith('org.mongodb') : "BOM must contain only 'org.mongodb' dependencies, but found '$groupId':\n$destination" + /* The and tags should be omitted in BOM dependencies. + This ensures that consuming projects have the flexibility to decide whether a + dependency is optional in their context. The BOM's role is to provide version information, + not to dictate inclusion or exclusion of dependencies. */ + assert dependency.scope.size() == 0 : "BOM must not contain elements in dependency:\n$destination" + assert dependency.optional.size() == 0 : "BOM must not contain elements in dependency:\n$destination" + } + } + } +} diff --git a/mongodb-crypt/build.gradle.kts b/mongodb-crypt/build.gradle.kts index 6c07a315185..72d7fd47292 100644 --- a/mongodb-crypt/build.gradle.kts +++ b/mongodb-crypt/build.gradle.kts @@ -60,7 +60,7 @@ val jnaLibsPath: String = System.getProperty("jnaLibsPath", "${jnaResourcesDir}$ val jnaResources: String = System.getProperty("jna.library.path", jnaLibsPath) // Download jnaLibs that match the git tag or revision to jnaResourcesBuildDir -val downloadRevision = "9a88ac5698e8e3ffcd6580b98c247f0126f26c40" // r1.11.0 +val downloadRevision = "1.13.0" val binariesArchiveName = "libmongocrypt-java.tar.gz" /** diff --git a/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/CAPI.java b/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/CAPI.java index b8e2cacc677..075bbe15c9c 100644 --- a/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/CAPI.java +++ b/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/CAPI.java @@ -486,6 +486,14 @@ public interface mongocrypt_random_fn extends Callback { public static native void mongocrypt_setopt_bypass_query_analysis (mongocrypt_t crypt); + /** + * Opt-into enabling sending multiple collection info documents. + * + * @param crypt The @ref mongocrypt_t object to update + */ + public static native void + mongocrypt_setopt_enable_multiple_collinfo (mongocrypt_t crypt); + /** * Set the contention factor used for explicit encryption. * The contention factor is only used for indexed Queryable Encryption. diff --git a/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/MongoCryptImpl.java b/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/MongoCryptImpl.java index 37f2263da69..d365f7f7671 100644 --- a/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/MongoCryptImpl.java +++ b/mongodb-crypt/src/main/com/mongodb/internal/crypt/capi/MongoCryptImpl.java @@ -65,6 +65,7 @@ import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_bypass_query_analysis; import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_crypto_hook_sign_rsaes_pkcs1_v1_5; import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_crypto_hooks; +import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_enable_multiple_collinfo; import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_encrypted_field_config_map; import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_kms_provider_aws; import static com.mongodb.internal.crypt.capi.CAPI.mongocrypt_setopt_kms_provider_local; @@ -120,6 +121,8 @@ class MongoCryptImpl implements MongoCrypt { logCallback = new LogCallback(); + mongocrypt_setopt_enable_multiple_collinfo(wrapped); + configure(() -> mongocrypt_setopt_log_handler(wrapped, logCallback, null)); if (mongocrypt_is_crypto_available()) { diff --git a/settings.gradle b/settings.gradle index c8a32bd7df5..e390791d5d5 100644 --- a/settings.gradle +++ b/settings.gradle @@ -33,6 +33,7 @@ include ':driver-scala' include ':mongodb-crypt' include 'util:spock' include 'util:taglets' +include ':bom' if(hasProperty("includeGraalvm")) { include ':graalvm-native-image-app'