diff --git a/.evergreen/config.yml b/.evergreen/config.yml index f8b34384fe..12cce5bf77 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -58,14 +58,12 @@ functions: export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" - export UPLOAD_BUCKET="${project}" cat < expansion.yml CURRENT_VERSION: "$CURRENT_VERSION" DRIVERS_TOOLS: "$DRIVERS_TOOLS" MONGO_ORCHESTRATION_HOME: "$MONGO_ORCHESTRATION_HOME" MONGODB_BINARIES: "$MONGODB_BINARIES" - UPLOAD_BUCKET: "$UPLOAD_BUCKET" PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" PREPARE_SHELL: | set -o errexit @@ -73,7 +71,6 @@ functions: export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" - export UPLOAD_BUCKET="$UPLOAD_BUCKET" export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" @@ -103,30 +100,35 @@ functions: echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config "upload coverage" : + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: src/.coverage optional: true # Upload the coverage report for all tasks in a single build to the same directory. - remote_file: ${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name} - bucket: mciuploads + remote_file: coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name} + bucket: ${bucket_name} permissions: public-read content_type: text/html display_name: "Raw Coverage Report" "download and merge coverage" : + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: shell.exec params: - silent: true working_dir: "src" + silent: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] script: | - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - # Download all the task coverage files. - aws s3 cp --recursive s3://mciuploads/${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/coverage/ coverage/ + aws s3 cp --recursive s3://${bucket_name}/coverage/${revision}/${version_id}/coverage/ coverage/ - command: shell.exec params: working_dir: "src" @@ -136,20 +138,20 @@ functions: # Upload the resulting html coverage report. - command: shell.exec params: - silent: true working_dir: "src" + silent: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] script: | - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp htmlcov/ s3://mciuploads/${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/htmlcov/ --recursive --acl public-read --region us-east-1 + aws s3 cp htmlcov/ s3://${bucket_name}/coverage/${revision}/${version_id}/htmlcov/ --recursive --acl public-read --region us-east-1 # Attach the index.html with s3.put so it shows up in the Evergreen UI. - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: src/htmlcov/index.html - remote_file: ${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/htmlcov/index.html - bucket: mciuploads + remote_file: coverage/${revision}/${version_id}/htmlcov/index.html + bucket: ${bucket_name} permissions: public-read content_type: text/html display_name: "Coverage Report HTML" @@ -172,34 +174,40 @@ functions: include: - "./**.core" - "./**.mdmp" # Windows: minidumps + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: mongo-coredumps.tgz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/gzip} display_name: Core Dumps - Execution optional: true - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: mongodb-logs.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-mongodb-logs.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-mongodb-logs.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} display_name: "mongodb-logs.tar.gz" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: drivers-tools/.evergreen/orchestration/server.log - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-orchestration.log - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-orchestration.log + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|text/plain} display_name: "orchestration.log" @@ -211,13 +219,17 @@ functions: source_dir: ${PROJECT_DIRECTORY}/ include: - "./**" + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: working-dir.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-working-dir.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-working-dir.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} display_name: "working-dir.tar.gz" @@ -232,11 +244,12 @@ functions: - "*.lock" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: drivers-dir.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-drivers-dir.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-drivers-dir.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} display_name: "drivers-dir.tar.gz" @@ -791,27 +804,32 @@ functions: source_dir: "src/dist" include: - "*" + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: release-files.tgz - remote_file: ${UPLOAD_BUCKET}/release/${revision}/${task_id}-${execution}-release-files.tar.gz - bucket: mciuploads + remote_file: release/${revision}/${task_id}-${execution}-release-files.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/gzip} display_name: Release files "download and merge releases": + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: shell.exec params: silent: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] script: | - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - # Download all the task coverage files. - aws s3 cp --recursive s3://mciuploads/${UPLOAD_BUCKET}/release/${revision}/ release/ + aws s3 cp --recursive s3://${bucket_name}/release/${revision}/ release/ - command: shell.exec params: shell: "bash" @@ -845,11 +863,12 @@ functions: - "*" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: release-files-all.tgz - remote_file: ${UPLOAD_BUCKET}/release-all/${revision}/${task_id}-${execution}-release-files-all.tar.gz - bucket: mciuploads + remote_file: release-all/${revision}/${task_id}-${execution}-release-files-all.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/gzip} display_name: Release files all @@ -962,7 +981,7 @@ task_groups: - ${DRIVERS_TOOLS}/.evergreen/csfle/azurekms/delete-vm.sh - func: "upload test results" setup_group_can_fail_task: true - teardown_group_can_fail_task: true + teardown_task_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - testazurekms-task @@ -2220,9 +2239,9 @@ axes: display_name: "RHEL 8.x" run_on: rhel87-small batchtime: 10080 # 7 days - - id: rhel80-fips - display_name: "RHEL 8.0 FIPS" - run_on: rhel80-fips + - id: rhel92-fips + display_name: "RHEL 9.2 FIPS" + run_on: rhel92-fips batchtime: 10080 # 7 days - id: ubuntu-22.04 display_name: "Ubuntu 22.04" @@ -2596,7 +2615,7 @@ buildvariants: - matrix_name: "tests-fips" matrix_spec: platform: - - rhel80-fips + - rhel92-fips auth: "auth" ssl: "ssl" display_name: "${platform} ${auth} ${ssl}" diff --git a/.evergreen/run-import-time-test.sh b/.evergreen/run-import-time-test.sh index f013eb115c..2b17f5ffeb 100755 --- a/.evergreen/run-import-time-test.sh +++ b/.evergreen/run-import-time-test.sh @@ -25,7 +25,9 @@ function get_import_time() { } get_import_time $HEAD_SHA +git stash git checkout $BASE_SHA get_import_time $BASE_SHA git checkout $HEAD_SHA +git stash apply python tools/compare_import_time.py $HEAD_SHA $BASE_SHA diff --git a/.evergreen/run-mod-wsgi-tests.sh b/.evergreen/run-mod-wsgi-tests.sh index afb3f271ae..e1f5238110 100644 --- a/.evergreen/run-mod-wsgi-tests.sh +++ b/.evergreen/run-mod-wsgi-tests.sh @@ -19,7 +19,10 @@ fi PYTHON_VERSION=$(${PYTHON_BINARY} -c "import sys; sys.stdout.write('.'.join(str(val) for val in sys.version_info[:2]))") # Ensure the C extensions are installed. -${PYTHON_BINARY} setup.py build_ext -i +${PYTHON_BINARY} -m venv --system-site-packages .venv +source .venv/bin/activate +pip install -U pip +python -m pip install -e . export MOD_WSGI_SO=/opt/python/mod_wsgi/python_version/$PYTHON_VERSION/mod_wsgi_version/$MOD_WSGI_VERSION/mod_wsgi.so export PYTHONHOME=/opt/python/$PYTHON_VERSION @@ -38,10 +41,12 @@ trap '$APACHE -k stop -f ${PROJECT_DIRECTORY}/test/mod_wsgi_test/${APACHE_CONFIG wget -t 1 -T 10 -O - "http://localhost:8080/interpreter1$%7BPROJECT_DIRECTORY%7D" || (cat error_log && exit 1) wget -t 1 -T 10 -O - "http://localhost:8080/interpreter2$%7BPROJECT_DIRECTORY%7D" || (cat error_log && exit 1) -${PYTHON_BINARY} ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 -t 100 parallel \ +python ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 -t 100 parallel \ http://localhost:8080/interpreter1${PROJECT_DIRECTORY} http://localhost:8080/interpreter2${PROJECT_DIRECTORY} || \ (tail -n 100 error_log && exit 1) -${PYTHON_BINARY} ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 serial \ +python ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 serial \ http://localhost:8080/interpreter1${PROJECT_DIRECTORY} http://localhost:8080/interpreter2${PROJECT_DIRECTORY} || \ (tail -n 100 error_log && exit 1) + +rm -rf .venv diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 3cad42e4dc..d47e3a9505 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -31,9 +31,6 @@ set -o xtrace AUTH=${AUTH:-noauth} SSL=${SSL:-nossl} TEST_ARGS="${*:1}" -PYTHON=$(which python) -# TODO: Remove when we drop PyPy 3.8 support. -OLD_PYPY=$(python -c "import sys; print(sys.implementation.name.lower() == 'pypy' and sys.implementation.version < (7, 3, 12))") export PIP_QUIET=1 # Quiet by default export PIP_PREFER_BINARY=1 # Prefer binary dists by default @@ -113,10 +110,6 @@ fi if [ "$COMPRESSORS" = "snappy" ]; then python -m pip install '.[snappy]' - if [ "$OLD_PYPY" == "True" ]; then - pip install "python-snappy<0.7.0" - fi - PYTHON=python elif [ "$COMPRESSORS" = "zstd" ]; then python -m pip install zstandard fi @@ -158,6 +151,7 @@ if [ -n "$TEST_ENCRYPTION" ] || [ -n "$TEST_FLE_AZURE_AUTO" ] || [ -n "$TEST_FLE if [ ! -d "libmongocrypt_git" ]; then git clone https://github.com/mongodb/libmongocrypt.git libmongocrypt_git fi + python -m pip install -U setuptools python -m pip install ./libmongocrypt_git/bindings/python python -c "import pymongocrypt; print('pymongocrypt version: '+pymongocrypt.__version__)" python -c "import pymongocrypt; print('libmongocrypt version: '+pymongocrypt.libmongocrypt_version())" @@ -236,7 +230,7 @@ if [ -n "$PERF_TEST" ]; then TEST_ARGS="test/performance/perf_test.py" fi -echo "Running $AUTH tests over $SSL with python $PYTHON" +echo "Running $AUTH tests over $SSL with python $(which python)" python -c 'import sys; print(sys.version)' @@ -245,7 +239,7 @@ python -c 'import sys; print(sys.version)' # Run the tests with coverage if requested and coverage is installed. # Only cover CPython. PyPy reports suspiciously low coverage. -PYTHON_IMPL=$($PYTHON -c "import platform; print(platform.python_implementation())") +PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") if [ -n "$COVERAGE" ] && [ "$PYTHON_IMPL" = "CPython" ]; then # Keep in sync with combine-coverage.sh. # coverage >=5 is needed for relative_files=true. diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index 7238feb3c8..f0a5851d91 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -66,7 +66,7 @@ createvirtualenv () { export PIP_QUIET=1 python -m pip install --upgrade pip - python -m pip install --upgrade setuptools tox + python -m pip install --upgrade tox } # Usage: diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 0d2551d76b..370b8759e6 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -5,6 +5,11 @@ on: branches: [ "master", "v*"] tags: ['*'] pull_request: + workflow_call: + inputs: + ref: + required: true + type: string schedule: - cron: '17 10 * * 2' @@ -21,9 +26,6 @@ jobs: # required for all workflows security-events: write - # required to fetch internal or private CodeQL packs - packages: read - strategy: fail-fast: false matrix: @@ -35,6 +37,8 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref }} - uses: actions/setup-python@v3 # Initializes the CodeQL tools for scanning. diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml new file mode 100644 index 0000000000..7ec55dd3b3 --- /dev/null +++ b/.github/workflows/dist.yml @@ -0,0 +1,146 @@ +name: Python Dist + +on: + push: + tags: + - "[0-9]+.[0-9]+.[0-9]+" + - "[0-9]+.[0-9]+.[0-9]+.post[0-9]+" + - "[0-9]+.[0-9]+.[0-9]+[a-b][0-9]+" + - "[0-9]+.[0-9]+.[0-9]+rc[0-9]+" + workflow_dispatch: + pull_request: + workflow_call: + inputs: + ref: + required: true + type: string + +concurrency: + group: dist-${{ github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash -eux {0} + +jobs: + build_wheels: + name: Build wheels for ${{ matrix.buildplat[1] }} + runs-on: ${{ matrix.buildplat[0] }} + strategy: + # Ensure that a wheel builder finishes even if another fails + fail-fast: false + matrix: + # Github Actions doesn't support pairing matrix values together, let's improvise + # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 + buildplat: + - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] + - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] + - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] + - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] + - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] + - [windows-2019, "win_amd6", "cp3*-win_amd64"] + - [windows-2019, "win32", "cp3*-win32"] + - [macos-14, "macos", "cp*-macosx_*"] + + steps: + - name: Checkout pymongo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ inputs.ref }} + + - uses: actions/setup-python@v5 + with: + cache: 'pip' + python-version: 3.8 + cache-dependency-path: 'pyproject.toml' + allow-prereleases: true + + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v3 + with: + platforms: all + + - name: Install cibuildwheel + # Note: the default manylinux is manylinux2014 + run: | + python -m pip install -U pip + python -m pip install "cibuildwheel>=2.17,<3" + + - name: Build wheels + env: + CIBW_BUILD: ${{ matrix.buildplat[2] }} + run: python -m cibuildwheel --output-dir wheelhouse + + - name: Build manylinux1 wheels + if: ${{ matrix.buildplat[1] == 'manylinux_x86_64' || matrix.buildplat[1] == 'manylinux_i686' }} + env: + CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 + CIBW_MANYLINUX_I686_IMAGE: manylinux1 + CIBW_BUILD: "cp38-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" + run: python -m cibuildwheel --output-dir wheelhouse + + - name: Assert all versions in wheelhouse + if: ${{ ! startsWith(matrix.buildplat[1], 'macos') }} + run: | + ls wheelhouse/*cp38*.whl + ls wheelhouse/*cp39*.whl + ls wheelhouse/*cp310*.whl + ls wheelhouse/*cp311*.whl + ls wheelhouse/*cp312*.whl + + - uses: actions/upload-artifact@v4 + with: + name: wheel-${{ matrix.buildplat[1] }} + path: ./wheelhouse/*.whl + if-no-files-found: error + + make_sdist: + name: Make SDist + runs-on: macos-13 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ inputs.ref }} + + - uses: actions/setup-python@v5 + with: + # Build sdist on lowest supported Python + python-version: '3.8' + + - name: Build SDist + run: | + set -ex + python -m pip install -U pip build + python -m build --sdist . + + - name: Test SDist + run: | + python -m pip install dist/*.gz + cd .. + python -c "from pymongo import has_c; assert has_c()" + + - uses: actions/upload-artifact@v4 + with: + name: "sdist" + path: ./dist/*.tar.gz + + collect_dist: + runs-on: ubuntu-latest + needs: [build_wheels, make_sdist] + name: Download Wheels + steps: + - name: Download all workflow run artifacts + uses: actions/download-artifact@v4 + - name: Flatten directory + working-directory: . + run: | + find . -mindepth 2 -type f -exec mv {} . \; + find . -type d -empty -delete + - uses: actions/upload-artifact@v4 + with: + name: all-dist-${{ github.run_id }} + path: "./*" diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index c3ee0d4eb1..a729ab327b 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -1,156 +1,96 @@ -name: Python Wheels +name: Release on: - push: - tags: - - "[0-9]+.[0-9]+.[0-9]+" - - "[0-9]+.[0-9]+.[0-9]+.post[0-9]+" - - "[0-9]+.[0-9]+.[0-9]+[a-b][0-9]+" - - "[0-9]+.[0-9]+.[0-9]+rc[0-9]+" workflow_dispatch: - pull_request: - -concurrency: - group: wheels-${{ github.ref }} - cancel-in-progress: true + inputs: + version: + description: "The new version to set" + required: true + following_version: + description: "The post (dev) version to set" + required: true + dry_run: + description: "Dry Run?" + default: false + type: boolean + +env: + # Changes per repo + PRODUCT_NAME: PyMongo + # Changes per branch + SILK_ASSET_GROUP: mongodb-python-driver + EVERGREEN_PROJECT: mongo-python-driver-v4.8 defaults: run: shell: bash -eux {0} jobs: - build_wheels: - name: Build wheels for ${{ matrix.buildplat[1] }} - runs-on: ${{ matrix.buildplat[0] }} - strategy: - # Ensure that a wheel builder finishes even if another fails - fail-fast: false - matrix: - # Github Actions doesn't support pairing matrix values together, let's improvise - # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 - buildplat: - - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] - - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] - - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] - - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] - - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] - - [windows-2019, "win_amd6", "cp3*-win_amd64"] - - [windows-2019, "win32", "cp3*-win32"] - - [macos-14, "macos", "cp*-macosx_*"] - + pre-publish: + environment: release + runs-on: ubuntu-latest + permissions: + id-token: write + contents: write + outputs: + version: ${{ steps.pre-publish.outputs.version }} steps: - - name: Checkout pymongo - uses: actions/checkout@v4 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 with: - fetch-depth: 0 - - - uses: actions/setup-python@v5 + app_id: ${{ vars.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + - uses: mongodb-labs/drivers-github-tools/setup@v2 with: - cache: 'pip' - python-version: 3.8 - cache-dependency-path: 'pyproject.toml' - allow-prereleases: true - - - name: Set up QEMU - if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 + aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} + aws_region_name: ${{ vars.AWS_REGION_NAME }} + aws_secret_id: ${{ secrets.AWS_SECRET_ID }} + artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} + - uses: mongodb-labs/drivers-github-tools/python/pre-publish@v2 + id: pre-publish with: - platforms: all + version: ${{ inputs.version }} + dry_run: ${{ inputs.dry_run }} - - name: Install cibuildwheel - # Note: the default manylinux is manylinux2014 - run: | - python -m pip install -U pip - python -m pip install "cibuildwheel>=2.17,<3" + build-dist: + needs: [pre-publish] + uses: ./.github/workflows/dist.yml + with: + ref: ${{ needs.pre-publish.outputs.version }} - - name: Build wheels - env: - CIBW_BUILD: ${{ matrix.buildplat[2] }} - run: python -m cibuildwheel --output-dir wheelhouse - - - name: Build manylinux1 wheels - if: ${{ matrix.buildplat[1] == 'manylinux_x86_64' || matrix.buildplat[1] == 'manylinux_i686' }} - env: - CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 - CIBW_MANYLINUX_I686_IMAGE: manylinux1 - CIBW_BUILD: "cp38-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" - run: python -m cibuildwheel --output-dir wheelhouse - - - name: Assert all versions in wheelhouse - if: ${{ ! startsWith(matrix.buildplat[1], 'macos') }} - run: | - ls wheelhouse/*cp38*.whl - ls wheelhouse/*cp39*.whl - ls wheelhouse/*cp310*.whl - ls wheelhouse/*cp311*.whl - ls wheelhouse/*cp312*.whl - - - uses: actions/upload-artifact@v4 - with: - name: wheel-${{ matrix.buildplat[1] }} - path: ./wheelhouse/*.whl - if-no-files-found: error - - make_sdist: - name: Make SDist - runs-on: macos-13 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - uses: actions/setup-python@v5 - with: - # Build sdist on lowest supported Python - python-version: '3.8' - - - name: Build SDist - run: | - set -ex - python -m pip install -U pip build - python -m build --sdist . - - - name: Test SDist - run: | - python -m pip install dist/*.gz - cd .. - python -c "from pymongo import has_c; assert has_c()" - - - uses: actions/upload-artifact@v4 - with: - name: "sdist" - path: ./dist/*.tar.gz - - collect_dist: - runs-on: ubuntu-latest - needs: [build_wheels, make_sdist] - name: Download Wheels - steps: - - name: Download all workflow run artifacts - uses: actions/download-artifact@v4 - - name: Flatten directory - working-directory: . - run: | - find . -mindepth 2 -type f -exec mv {} . \; - find . -type d -empty -delete - - uses: actions/upload-artifact@v4 - with: - name: all-dist-${{ github.run_id }} - path: "./*" + static-scan: + needs: [pre-publish] + permissions: + security-events: write + uses: ./.github/workflows/codeql.yml + with: + ref: ${{ needs.pre-publish.outputs.version }} publish: - # https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/#publishing-the-distribution-to-pypi - needs: [collect_dist] - if: startsWith(github.ref, 'refs/tags/') + needs: [build-dist, static-scan] runs-on: ubuntu-latest environment: release permissions: id-token: write + contents: write + attestations: write + security-events: write steps: - - name: Download all the dists - uses: actions/download-artifact@v4 - with: - name: all-dist-${{ github.run_id }} - path: dist/ - - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + with: + app_id: ${{ vars.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + - uses: mongodb-labs/drivers-github-tools/setup@v2 + with: + aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} + aws_region_name: ${{ vars.AWS_REGION_NAME }} + aws_secret_id: ${{ secrets.AWS_SECRET_ID }} + artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} + - uses: mongodb-labs/drivers-github-tools/python/publish@v2 + with: + version: ${{ inputs.version }} + following_version: ${{ inputs.following_version }} + product_name: ${{ env.PRODUCT_NAME }} + silk_asset_group: ${{ env.SILK_ASSET_GROUP }} + evergreen_project: ${{ env.EVERGREEN_PROJECT }} + token: ${{ github.token }} + dry_run: ${{ inputs.dry_run }} diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 530a2386f2..b93c93c022 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -31,12 +31,10 @@ jobs: - name: Run linters run: | tox -m lint-manual - - name: Check Manifest - run: | - tox -m manifest - name: Run compilation run: | - pip install -e . + export PYMONGO_C_EXT_MUST_BUILD=1 + pip install -v -e . python tools/fail_if_no_c.py - name: Run typecheck run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1a567b73f0..e467ac9f51 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -64,14 +64,11 @@ repos: stages: [manual] - repo: https://github.com/sirosen/check-jsonschema - rev: 0.27.0 + rev: 0.29.0 hooks: - - id: check-jsonschema - name: "Check GitHub Workflows" - files: ^\.github/workflows/ - types: [yaml] - args: ["--schemafile", "https://json.schemastore.org/github-workflow"] - stages: [manual] + - id: check-github-workflows + - id: check-github-actions + - id: check-dependabot - repo: https://github.com/ariebovenberg/slotscheck rev: v0.17.0 diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 889367ce3f..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,33 +0,0 @@ -include README.md -include LICENSE -include THIRD-PARTY-NOTICES -include *.ini -include sbom.json -include requirements.txt -exclude .coveragerc -exclude .git-blame-ignore-revs -exclude .pre-commit-config.yaml -exclude .readthedocs.yaml -exclude CONTRIBUTING.md -exclude RELEASE.md -recursive-include doc *.rst -recursive-include doc *.py -recursive-include doc *.conf -recursive-include doc *.css -recursive-include doc *.js -recursive-include doc *.png -include doc/Makefile -include doc/_templates/layout.html -include doc/make.bat -include doc/static/periodic-executor-refs.dot -recursive-include requirements *.txt -recursive-include tools *.py -include tools/README.rst -include green_framework_test.py -recursive-include test *.pem -recursive-include test *.py -recursive-include test *.json -recursive-include bson *.h -prune test/mod_wsgi_test -prune test/lambda -prune .evergreen diff --git a/README.md b/README.md index f3fb3d8f1b..3d13f1aa9a 100644 --- a/README.md +++ b/README.md @@ -78,12 +78,6 @@ PyMongo can be installed with [pip](http://pypi.python.org/pypi/pip): python -m pip install pymongo ``` -Or `easy_install` from [setuptools](http://pypi.python.org/pypi/setuptools): - -```bash -python -m easy_install pymongo -``` - You can also download the project source and do: ```bash diff --git a/_setup.py b/_setup.py new file mode 100644 index 0000000000..65ae1908fe --- /dev/null +++ b/_setup.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import os +import sys +import warnings + +# Hack to silence atexit traceback in some Python versions +try: + import multiprocessing # noqa: F401 +except ImportError: + pass + +from setuptools import setup +from setuptools.command.build_ext import build_ext +from setuptools.extension import Extension + + +class custom_build_ext(build_ext): + """Allow C extension building to fail. + + The C extension speeds up BSON encoding, but is not essential. + """ + + warning_message = """ +******************************************************************** +WARNING: %s could not +be compiled. No C extensions are essential for PyMongo to run, +although they do result in significant speed improvements. +%s + +Please see the installation docs for solutions to build issues: + +https://pymongo.readthedocs.io/en/stable/installation.html + +Here are some hints for popular operating systems: + +If you are seeing this message on Linux you probably need to +install GCC and/or the Python development package for your +version of Python. + +Debian and Ubuntu users should issue the following command: + + $ sudo apt-get install build-essential python-dev + +Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux, +Oracle Linux, Fedora, etc.) should issue the following command: + + $ sudo yum install gcc python-devel + +If you are seeing this message on Microsoft Windows please install +PyMongo using pip. Modern versions of pip will install PyMongo +from binary wheels available on pypi. If you must install from +source read the documentation here: + +https://pymongo.readthedocs.io/en/stable/installation.html#installing-from-source-on-windows + +If you are seeing this message on macOS / OSX please install PyMongo +using pip. Modern versions of pip will install PyMongo from binary +wheels available on pypi. If wheels are not available for your version +of macOS / OSX, or you must install from source read the documentation +here: + +https://pymongo.readthedocs.io/en/stable/installation.html#osx +******************************************************************** +""" + + def run(self): + try: + build_ext.run(self) + except Exception: + if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): + raise + e = sys.exc_info()[1] + sys.stdout.write("%s\n" % str(e)) + warnings.warn( + self.warning_message + % ( + "Extension modules", + "There was an issue with your platform configuration - see above.", + ), + stacklevel=2, + ) + + def build_extension(self, ext): + name = ext.name + try: + build_ext.build_extension(self, ext) + except Exception: + if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): + raise + e = sys.exc_info()[1] + sys.stdout.write("%s\n" % str(e)) + warnings.warn( + self.warning_message + % ( + "The %s extension module" % (name,), # noqa: UP031 + "The output above this warning shows how the compilation failed.", + ), + stacklevel=2, + ) + + +ext_modules = [ + Extension( + "bson._cbson", + include_dirs=["bson"], + sources=["bson/_cbsonmodule.c", "bson/time64.c", "bson/buffer.c"], + ), + Extension( + "pymongo._cmessage", + include_dirs=["bson"], + sources=[ + "pymongo/_cmessagemodule.c", + "bson/_cbsonmodule.c", + "bson/time64.c", + "bson/buffer.c", + ], + ), +] + + +if "--no_ext" in sys.argv or os.environ.get("NO_EXT"): + try: + sys.argv.remove("--no_ext") + except ValueError: + pass + ext_modules = [] +elif sys.platform.startswith("java") or sys.platform == "cli" or "PyPy" in sys.version: + sys.stdout.write( + """ +*****************************************************\n +The optional C extensions are currently not supported\n +by this python implementation.\n +*****************************************************\n +""" + ) + ext_modules = [] + +setup( + cmdclass={"build_ext": custom_build_ext}, + ext_modules=ext_modules, + packages=["bson", "pymongo", "gridfs"], +) # type:ignore diff --git a/doc/changelog.rst b/doc/changelog.rst index 76dc24b6dd..1935fda233 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -4,11 +4,34 @@ Changelog Changes in Version 4.8.0 ------------------------- -The handshake metadata for "os.name" on Windows has been simplified to "Windows" to improve import time. +.. warning:: PyMongo 4.8 drops support for Python 3.7 and PyPy 3.8: Python 3.8+ or PyPy 3.9+ is now required. -The repr of ``bson.binary.Binary`` is now redacted when the subtype is SENSITIVE_SUBTYPE(8). +PyMongo 4.8 brings a number of improvements including: -.. warning:: PyMongo 4.8 drops support for Python 3.7 and PyPy 3.8: Python 3.8+ or PyPy 3.9+ is now required. +- The handshake metadata for "os.name" on Windows has been simplified to "Windows" to improve import time. +- The repr of ``bson.binary.Binary`` is now redacted when the subtype is SENSITIVE_SUBTYPE(8). +- Secure Software Development Life Cycle automation for release process. + GitHub Releases now include a Software Bill of Materials, and signature + files corresponding to the distribution files released on PyPI. +- Fixed a bug in change streams where both ``startAtOperationTime`` and ``resumeToken`` + could be added to a retry attempt, which caused the retry to fail. +- Fallback to stdlib ``ssl`` module when ``pyopenssl`` import fails with AttributeError. +- Improved performance of MongoClient operations, especially when many operations are being run concurrently. + +Unavoidable breaking changes +............................ + +- Since we are now using ``hatch`` as our build backend, we no longer have a usable ``setup.py`` file + and require installation using ``pip``. Attempts to invoke the ``setup.py`` file will raise an exception. + Additionally, ``pip`` >= 21.3 is now required for editable installs. + +Issues Resolved +............... + +See the `PyMongo 4.8 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.8 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=37057 Changes in Version 4.7.3 ------------------------- diff --git a/hatch_build.py b/hatch_build.py new file mode 100644 index 0000000000..91315eb09f --- /dev/null +++ b/hatch_build.py @@ -0,0 +1,36 @@ +"""A custom hatch build hook for pymongo.""" +from __future__ import annotations + +import os +import subprocess +import sys +from pathlib import Path + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +class CustomHook(BuildHookInterface): + """The pymongo build hook.""" + + def initialize(self, version, build_data): + """Initialize the hook.""" + if self.target_name == "sdist": + return + here = Path(__file__).parent.resolve() + sys.path.insert(0, str(here)) + + subprocess.check_call([sys.executable, "_setup.py", "build_ext", "-i"]) + + # Ensure wheel is marked as binary and contains the binary files. + build_data["infer_tag"] = True + build_data["pure_python"] = False + if os.name == "nt": + patt = ".pyd" + else: + patt = ".so" + for pkg in ["bson", "pymongo"]: + dpath = here / pkg + for fpath in dpath.glob(f"*{patt}"): + relpath = os.path.relpath(fpath, here) + build_data["artifacts"].append(relpath) + build_data["force_include"][relpath] = relpath diff --git a/pymongo/_version.py b/pymongo/_version.py index dc5c38c734..53fa03d5f9 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -15,16 +15,29 @@ """Current version of PyMongo.""" from __future__ import annotations -from typing import Tuple, Union +import re +from typing import List, Tuple, Union -version_tuple: Tuple[Union[int, str], ...] = (4, 8, 0, ".dev0") +__version__ = "4.8.1.dev0" -def get_version_string() -> str: - if isinstance(version_tuple[-1], str): - return ".".join(map(str, version_tuple[:-1])) + version_tuple[-1] - return ".".join(map(str, version_tuple)) +def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]: + pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" + match = re.match(pattern, version) + if match: + parts: List[Union[int, str]] = [int(match[part]) for part in ["major", "minor", "patch"]] + if match["rest"]: + parts.append(match["rest"]) + elif re.match(r"\d+.\d+", version): + parts = [int(part) for part in version.split(".")] + else: + raise ValueError("Could not parse version") + return tuple(parts) -__version__: str = get_version_string() +version_tuple = get_version_tuple(__version__) version = __version__ + + +def get_version_string() -> str: + return __version__ diff --git a/pymongo/client_session.py b/pymongo/client_session.py index 3efc624c04..7dd1996afd 100644 --- a/pymongo/client_session.py +++ b/pymongo/client_session.py @@ -515,9 +515,6 @@ def end_session(self) -> None: It is an error to use the session after the session has ended. """ - self._end_session(lock=True) - - def _end_session(self, lock: bool) -> None: if self._server_session is not None: try: if self.in_transaction: @@ -526,7 +523,7 @@ def _end_session(self, lock: bool) -> None: # is in the committed state when the session is discarded. self._unpin() finally: - self._client._return_server_session(self._server_session, lock) + self._client._return_server_session(self._server_session) self._server_session = None def _check_ended(self) -> None: @@ -537,7 +534,7 @@ def __enter__(self) -> ClientSession: return self def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: - self._end_session(lock=True) + self.end_session() @property def client(self) -> MongoClient: @@ -1097,7 +1094,7 @@ def inc_transaction_id(self) -> None: class _ServerSessionPool(collections.deque): """Pool of _ServerSession objects. - This class is not thread-safe, access it while holding the Topology lock. + This class is thread-safe. """ def __init__(self, *args: Any, **kwargs: Any): @@ -1110,8 +1107,11 @@ def reset(self) -> None: def pop_all(self) -> list[_ServerSession]: ids = [] - while self: - ids.append(self.pop().session_id) + while True: + try: + ids.append(self.pop().session_id) + except IndexError: + break return ids def get_server_session(self, session_timeout_minutes: Optional[int]) -> _ServerSession: @@ -1123,23 +1123,17 @@ def get_server_session(self, session_timeout_minutes: Optional[int]) -> _ServerS self._clear_stale(session_timeout_minutes) # The most recently used sessions are on the left. - while self: - s = self.popleft() + while True: + try: + s = self.popleft() + except IndexError: + break if not s.timed_out(session_timeout_minutes): return s return _ServerSession(self.generation) - def return_server_session( - self, server_session: _ServerSession, session_timeout_minutes: Optional[int] - ) -> None: - if session_timeout_minutes is not None: - self._clear_stale(session_timeout_minutes) - if server_session.timed_out(session_timeout_minutes): - return - self.return_server_session_no_lock(server_session) - - def return_server_session_no_lock(self, server_session: _ServerSession) -> None: + def return_server_session(self, server_session: _ServerSession) -> None: # Discard sessions from an old pool to avoid duplicate sessions in the # child process after a fork. if server_session.generation == self.generation and not server_session.dirty: @@ -1147,9 +1141,12 @@ def return_server_session_no_lock(self, server_session: _ServerSession) -> None: def _clear_stale(self, session_timeout_minutes: Optional[int]) -> None: # Clear stale sessions. The least recently used are on the right. - while self: - if self[-1].timed_out(session_timeout_minutes): - self.pop() - else: + while True: + try: + s = self.pop() + except IndexError: + break + if not s.timed_out(session_timeout_minutes): + self.append(s) # The remaining sessions also haven't timed out. break diff --git a/pymongo/command_cursor.py b/pymongo/command_cursor.py index 0411a45abe..6d48a87824 100644 --- a/pymongo/command_cursor.py +++ b/pymongo/command_cursor.py @@ -73,7 +73,7 @@ def __init__( self.__killed = self.__id == 0 self.__comment = comment if self.__killed: - self.__end_session(True) + self.__end_session() if "ns" in cursor_info: # noqa: SIM401 self.__ns = cursor_info["ns"] @@ -112,9 +112,9 @@ def __die(self, synchronous: bool = False) -> None: self.__session = None self.__sock_mgr = None - def __end_session(self, synchronous: bool) -> None: + def __end_session(self) -> None: if self.__session and not self.__explicit_session: - self.__session._end_session(lock=synchronous) + self.__session.end_session() self.__session = None def close(self) -> None: diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index b0824acd44..89d61500ca 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -862,6 +862,7 @@ def __init__( server_monitoring_mode=options.server_monitoring_mode, ) + self._opened = False self._init_background() if connect: @@ -903,10 +904,13 @@ def target() -> bool: # this closure. When the client is freed, stop the executor soon. self_ref: Any = weakref.ref(self, executor.close) self._kill_cursors_executor = executor + self._opened = False def _after_fork(self) -> None: """Resets topology in a child after successfully forking.""" self._init_background(self._topology._pid) + # Reset the session pool to avoid duplicate sessions in the child process. + self._topology._session_pool.reset() def _duplicate(self, **kwargs: Any) -> MongoClient: args = self.__init_kwargs.copy() @@ -1243,9 +1247,11 @@ def _get_topology(self) -> Topology: If this client was created with "connect=False", calling _get_topology launches the connection process in the background. """ - self._topology.open() - with self.__lock: - self._kill_cursors_executor.open() + if not self._opened: + self._topology.open() + with self.__lock: + self._kill_cursors_executor.open() + self._opened = True return self._topology @contextlib.contextmanager @@ -1679,7 +1685,7 @@ def _cleanup_cursor( if cursor_id or conn_mgr: self._close_cursor_soon(cursor_id, address, conn_mgr) if session and not explicit_session: - session._end_session(lock=locks_allowed) + session.end_session() def _close_cursor_soon( self, @@ -1838,12 +1844,12 @@ def start_session( ) def _return_server_session( - self, server_session: Union[_ServerSession, _EmptyServerSession], lock: bool + self, server_session: Union[_ServerSession, _EmptyServerSession] ) -> None: """Internal: return a _ServerSession to the pool.""" if isinstance(server_session, _EmptyServerSession): return None - return self._topology.return_server_session(server_session, lock) + return self._topology.return_server_session(server_session) def _ensure_session(self, session: Optional[ClientSession] = None) -> Optional[ClientSession]: """If provided session is None, lend a temporary session.""" diff --git a/pymongo/pool.py b/pymongo/pool.py index 2e8aefa60c..379127deee 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -734,6 +734,7 @@ def __init__( self.op_msg_enabled = False self.listeners = pool.opts._event_listeners self.enabled_for_cmap = pool.enabled_for_cmap + self.enabled_for_logging = pool.enabled_for_logging self.compression_settings = pool.opts._compression_settings self.compression_context: Union[SnappyContext, ZlibContext, ZstdContext, None] = None self.socket_checker: SocketChecker = SocketChecker() @@ -1097,20 +1098,20 @@ def authenticate(self, reauthenticate: bool = False) -> None: auth.authenticate(creds, self, reauthenticate=reauthenticate) self.ready = True + duration = time.monotonic() - self.creation_time if self.enabled_for_cmap: assert self.listeners is not None - duration = time.monotonic() - self.creation_time self.listeners.publish_connection_ready(self.address, self.id, duration) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_READY, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=self.id, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_READY, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=self.id, + durationMS=duration, + ) def validate_session( self, client: Optional[MongoClient], session: Optional[ClientSession] @@ -1128,10 +1129,11 @@ def close_conn(self, reason: Optional[str]) -> None: if self.closed: return self._close_conn() - if reason and self.enabled_for_cmap: - assert self.listeners is not None - self.listeners.publish_connection_closed(self.address, self.id, reason) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + if reason: + if self.enabled_for_cmap: + assert self.listeners is not None + self.listeners.publish_connection_closed(self.address, self.id, reason) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1441,6 +1443,7 @@ def __init__( and self.opts._event_listeners is not None and self.opts._event_listeners.enabled_for_cmap ) + self.enabled_for_logging = self.handshake # The first portion of the wait queue. # Enforces: maxPoolSize @@ -1462,15 +1465,15 @@ def __init__( self.opts._event_listeners.publish_pool_created( self.address, self.opts.non_default_options ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.POOL_CREATED, - serverHost=self.address[0], - serverPort=self.address[1], - **self.opts.non_default_options, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.POOL_CREATED, + serverHost=self.address[0], + serverPort=self.address[1], + **self.opts.non_default_options, + ) # Similar to active_sockets but includes threads in the wait queue. self.operation_count: int = 0 # Retain references to pinned connections to prevent the CPython GC @@ -1488,14 +1491,14 @@ def ready(self) -> None: if self.enabled_for_cmap: assert self.opts._event_listeners is not None self.opts._event_listeners.publish_pool_ready(self.address) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.POOL_READY, - serverHost=self.address[0], - serverPort=self.address[1], - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.POOL_READY, + serverHost=self.address[0], + serverPort=self.address[1], + ) @property def closed(self) -> bool: @@ -1553,23 +1556,24 @@ def _reset( if self.enabled_for_cmap: assert listeners is not None listeners.publish_pool_closed(self.address) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.POOL_CLOSED, - serverHost=self.address[0], - serverPort=self.address[1], - ) - else: - if old_state != PoolState.PAUSED and self.enabled_for_cmap: - assert listeners is not None - listeners.publish_pool_cleared( - self.address, - service_id=service_id, - interrupt_connections=interrupt_connections, + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.POOL_CLOSED, + serverHost=self.address[0], + serverPort=self.address[1], ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + else: + if old_state != PoolState.PAUSED: + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_pool_cleared( + self.address, + service_id=service_id, + interrupt_connections=interrupt_connections, + ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1677,15 +1681,15 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_created(self.address, conn_id) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_CREATED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn_id, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_CREATED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn_id, + ) try: sock = _configured_socket(self.address, self.opts) @@ -1695,17 +1699,17 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect listeners.publish_connection_closed( self.address, conn_id, ConnectionClosedReason.ERROR ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_CLOSED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn_id, - reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), - error=ConnectionClosedReason.ERROR, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_CLOSED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn_id, + reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), + error=ConnectionClosedReason.ERROR, + ) if isinstance(error, (IOError, OSError, SSLError)): details = _get_timeout_details(self.opts) _raise_connection_failure(self.address, error, timeout_details=details) @@ -1751,31 +1755,31 @@ def checkout(self, handler: Optional[_MongoClientErrorHandler] = None) -> Iterat if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_check_out_started(self.address) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_STARTED, - serverHost=self.address[0], - serverPort=self.address[1], - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_STARTED, + serverHost=self.address[0], + serverPort=self.address[1], + ) conn = self._get_conn(checkout_started_time, handler=handler) + duration = time.monotonic() - checkout_started_time if self.enabled_for_cmap: assert listeners is not None - duration = time.monotonic() - checkout_started_time listeners.publish_connection_checked_out(self.address, conn.id, duration) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_SUCCEEDED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn.id, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_SUCCEEDED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn.id, + durationMS=duration, + ) try: with self.lock: self.active_contexts.add(conn.cancel_context) @@ -1807,13 +1811,14 @@ def checkout(self, handler: Optional[_MongoClientErrorHandler] = None) -> Iterat def _raise_if_not_ready(self, checkout_started_time: float, emit_event: bool) -> None: if self.state != PoolState.READY: - if self.enabled_for_cmap and emit_event: - assert self.opts._event_listeners is not None + if emit_event: duration = time.monotonic() - checkout_started_time - self.opts._event_listeners.publish_connection_check_out_failed( - self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration - ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration + ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1841,23 +1846,23 @@ def _get_conn( self.reset_without_pause() if self.closed: + duration = time.monotonic() - checkout_started_time if self.enabled_for_cmap: assert self.opts._event_listeners is not None - duration = time.monotonic() - checkout_started_time self.opts._event_listeners.publish_connection_check_out_failed( self.address, ConnectionCheckOutFailedReason.POOL_CLOSED, duration ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_FAILED, - serverHost=self.address[0], - serverPort=self.address[1], - reason="Connection pool was closed", - error=ConnectionCheckOutFailedReason.POOL_CLOSED, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_FAILED, + serverHost=self.address[0], + serverPort=self.address[1], + reason="Connection pool was closed", + error=ConnectionCheckOutFailedReason.POOL_CLOSED, + durationMS=duration, + ) raise _PoolClosedError( "Attempted to check out a connection from closed connection pool" ) @@ -1933,13 +1938,14 @@ def _get_conn( self.active_sockets -= 1 self.size_cond.notify() - if self.enabled_for_cmap and not emitted_event: - assert self.opts._event_listeners is not None + if not emitted_event: duration = time.monotonic() - checkout_started_time - self.opts._event_listeners.publish_connection_check_out_failed( - self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration - ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration + ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1972,15 +1978,15 @@ def checkin(self, conn: Connection) -> None: if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_checked_in(self.address, conn.id) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKEDIN, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn.id, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKEDIN, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn.id, + ) if self.pid != os.getpid(): self.reset_without_pause() else: @@ -1993,17 +1999,17 @@ def checkin(self, conn: Connection) -> None: listeners.publish_connection_closed( self.address, conn.id, ConnectionClosedReason.ERROR ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_CLOSED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn.id, - reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), - error=ConnectionClosedReason.ERROR, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_CLOSED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn.id, + reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), + error=ConnectionClosedReason.ERROR, + ) else: with self.lock: # Hold the lock to ensure this section does not race with @@ -2065,23 +2071,23 @@ def _perished(self, conn: Connection) -> bool: def _raise_wait_queue_timeout(self, checkout_started_time: float) -> NoReturn: listeners = self.opts._event_listeners + duration = time.monotonic() - checkout_started_time if self.enabled_for_cmap: assert listeners is not None - duration = time.monotonic() - checkout_started_time listeners.publish_connection_check_out_failed( self.address, ConnectionCheckOutFailedReason.TIMEOUT, duration ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_FAILED, - serverHost=self.address[0], - serverPort=self.address[1], - reason="Wait queue timeout elapsed without a connection becoming available", - error=ConnectionCheckOutFailedReason.TIMEOUT, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_FAILED, + serverHost=self.address[0], + serverPort=self.address[1], + reason="Wait queue timeout elapsed without a connection becoming available", + error=ConnectionCheckOutFailedReason.TIMEOUT, + durationMS=duration, + ) timeout = _csot.get_timeout() or self.opts.wait_queue_timeout if self.opts.load_balanced: other_ops = self.active_sockets - self.ncursors - self.ntxns diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index b08588daff..97e6de9068 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -291,7 +291,7 @@ def load_cert_chain( # Password callback MUST be set first or it will be ignored. if password: - def _pwcb(_max_length: int, _prompt_twice: bool, _user_data: bytes) -> bytes: + def _pwcb(_max_length: int, _prompt_twice: bool, _user_data: Optional[bytes]) -> bytes: # XXX:We could check the password length against what OpenSSL # tells us is the max, but we can't raise an exception, so... # warn? @@ -331,6 +331,7 @@ def _load_certifi(self) -> None: def _load_wincerts(self, store: str) -> None: """Attempt to load CA certs from Windows trust store.""" cert_store = self._ctx.get_cert_store() + assert cert_store is not None oid = _stdlibssl.Purpose.SERVER_AUTH.oid for cert, encoding, trust in _stdlibssl.enum_certificates(store): # type: ignore diff --git a/pymongo/ssl_support.py b/pymongo/ssl_support.py index 849fbf7018..6a5dd278d3 100644 --- a/pymongo/ssl_support.py +++ b/pymongo/ssl_support.py @@ -15,6 +15,7 @@ """Support for SSL in PyMongo.""" from __future__ import annotations +import warnings from typing import Optional from pymongo.errors import ConfigurationError @@ -23,7 +24,17 @@ try: import pymongo.pyopenssl_context as _ssl -except ImportError: +except (ImportError, AttributeError) as exc: + if isinstance(exc, AttributeError): + warnings.warn( + "Failed to use the installed version of PyOpenSSL. " + "Falling back to stdlib ssl, disabling OCSP support. " + "This is likely caused by incompatible versions " + "of PyOpenSSL < 23.2.0 and cryptography >= 42.0.0. " + "Try updating PyOpenSSL >= 23.2.0 to enable OCSP.", + UserWarning, + stacklevel=2, + ) try: import pymongo.ssl_context as _ssl # type: ignore[no-redef] except ImportError: diff --git a/pymongo/topology.py b/pymongo/topology.py index e10f490adc..ea623cd1b4 100644 --- a/pymongo/topology.py +++ b/pymongo/topology.py @@ -669,23 +669,14 @@ def description(self) -> TopologyDescription: def pop_all_sessions(self) -> list[_ServerSession]: """Pop all session ids from the pool.""" - with self._lock: - return self._session_pool.pop_all() + return self._session_pool.pop_all() def get_server_session(self, session_timeout_minutes: Optional[int]) -> _ServerSession: """Start or resume a server session, or raise ConfigurationError.""" - with self._lock: - return self._session_pool.get_server_session(session_timeout_minutes) + return self._session_pool.get_server_session(session_timeout_minutes) - def return_server_session(self, server_session: _ServerSession, lock: bool) -> None: - if lock: - with self._lock: - self._session_pool.return_server_session( - server_session, self._description.logical_session_timeout_minutes - ) - else: - # Called from a __del__ method, can't use a lock. - self._session_pool.return_server_session_no_lock(server_session) + def return_server_session(self, server_session: _ServerSession) -> None: + self._session_pool.return_server_session(server_session) def _new_selection(self) -> Selection: """A Selection object, initially including all known servers. diff --git a/pyproject.toml b/pyproject.toml index aebabbf344..d208f6a439 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] -requires = ["setuptools>=63.0"] -build-backend = "setuptools.build_meta" +requires = ["hatchling>1.24","setuptools>=65.0","hatch-requirements-txt>=0.4.1"] +build-backend = "hatchling.build" [project] name = "pymongo" @@ -45,16 +45,28 @@ Documentation = "https://pymongo.readthedocs.io/" Source = "https://github.com/mongodb/mongo-python-driver" Tracker = "https://jira.mongodb.org/projects/PYTHON/issues" -[tool.setuptools.dynamic] -version = {attr = "pymongo._version.__version__"} +# Used to call hatch_build.py +[tool.hatch.build.hooks.custom] -[tool.setuptools.packages.find] -include = ["bson","gridfs", "pymongo"] +[tool.hatch.version] +path = "pymongo/_version.py" +validate-bump = false -[tool.setuptools.package-data] -bson=["py.typed", "*.pyi"] -pymongo=["py.typed", "*.pyi"] -gridfs=["py.typed", "*.pyi"] +[tool.hatch.build.targets.wheel] +packages = ["bson","gridfs", "pymongo"] + +[tool.hatch.metadata.hooks.requirements_txt] +files = ["requirements.txt"] + +[tool.hatch.metadata.hooks.requirements_txt.optional-dependencies] +aws = ["requirements/aws.txt"] +docs = ["requirements/docs.txt"] +encryption = ["requirements/encryption.txt"] +gssapi = ["requirements/gssapi.txt"] +ocsp = ["requirements/ocsp.txt"] +snappy = ["requirements/snappy.txt"] +test = ["requirements/test.txt"] +zstd = ["requirements/zstd.txt"] [tool.pytest.ini_options] minversion = "7" @@ -168,6 +180,7 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?)|dummy.*)$" "UP031", "F401", "B023", "F811"] "tools/*.py" = ["T201"] "green_framework_test.py" = ["T201"] +"hatch_build.py" = ["S"] [tool.coverage.run] branch = true diff --git a/sbom.json b/sbom.json index 3a09bce287..162aa82521 100644 --- a/sbom.json +++ b/sbom.json @@ -1,10 +1,11 @@ { - "metadata": { - "timestamp": "2024-05-02T17:36:12.698229+00:00" - }, - "serialNumber": "urn:uuid:9876a8a6-060e-486f-b128-910aecf0fe7b", - "version": 1, - "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", - "bomFormat": "CycloneDX", - "specVersion": "1.5" - } \ No newline at end of file + "metadata": { + "timestamp": "2024-06-10T18:55:17.710940+00:00" + }, + "components": [], + "serialNumber": "urn:uuid:a6c08d96-55e1-4cdb-945c-0e21ced83e34", + "version": 1, + "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.5" +} diff --git a/setup.py b/setup.py index 599ea0e4a9..f371b3d75b 100644 --- a/setup.py +++ b/setup.py @@ -1,167 +1,8 @@ from __future__ import annotations -import os -import sys -import warnings - -# Hack to silence atexit traceback in some Python versions -try: - import multiprocessing # noqa: F401 -except ImportError: - pass - -from setuptools import setup -from setuptools.command.build_ext import build_ext -from setuptools.extension import Extension - - -class custom_build_ext(build_ext): - """Allow C extension building to fail. - - The C extension speeds up BSON encoding, but is not essential. - """ - - warning_message = """ -******************************************************************** -WARNING: %s could not -be compiled. No C extensions are essential for PyMongo to run, -although they do result in significant speed improvements. -%s - -Please see the installation docs for solutions to build issues: - -https://pymongo.readthedocs.io/en/stable/installation.html - -Here are some hints for popular operating systems: - -If you are seeing this message on Linux you probably need to -install GCC and/or the Python development package for your -version of Python. - -Debian and Ubuntu users should issue the following command: - - $ sudo apt-get install build-essential python-dev - -Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux, -Oracle Linux, Fedora, etc.) should issue the following command: - - $ sudo yum install gcc python-devel - -If you are seeing this message on Microsoft Windows please install -PyMongo using pip. Modern versions of pip will install PyMongo -from binary wheels available on pypi. If you must install from -source read the documentation here: - -https://pymongo.readthedocs.io/en/stable/installation.html#installing-from-source-on-windows - -If you are seeing this message on macOS / OSX please install PyMongo -using pip. Modern versions of pip will install PyMongo from binary -wheels available on pypi. If wheels are not available for your version -of macOS / OSX, or you must install from source read the documentation -here: - -https://pymongo.readthedocs.io/en/stable/installation.html#osx -******************************************************************** -""" - - def run(self): - try: - build_ext.run(self) - except Exception: - if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): - raise - e = sys.exc_info()[1] - sys.stdout.write("%s\n" % str(e)) - warnings.warn( - self.warning_message - % ( - "Extension modules", - "There was an issue with your platform configuration - see above.", - ), - stacklevel=2, - ) - - def build_extension(self, ext): - name = ext.name - try: - build_ext.build_extension(self, ext) - except Exception: - if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): - raise - e = sys.exc_info()[1] - sys.stdout.write("%s\n" % str(e)) - warnings.warn( - self.warning_message - % ( - "The %s extension module" % (name,), # noqa: UP031 - "The output above this warning shows how the compilation failed.", - ), - stacklevel=2, - ) - - -ext_modules = [ - Extension( - "bson._cbson", - include_dirs=["bson"], - sources=["bson/_cbsonmodule.c", "bson/time64.c", "bson/buffer.c"], - ), - Extension( - "pymongo._cmessage", - include_dirs=["bson"], - sources=[ - "pymongo/_cmessagemodule.c", - "bson/_cbsonmodule.c", - "bson/time64.c", - "bson/buffer.c", - ], - ), -] - - -if "--no_ext" in sys.argv or os.environ.get("NO_EXT"): - try: - sys.argv.remove("--no_ext") - except ValueError: - pass - ext_modules = [] -elif sys.platform.startswith("java") or sys.platform == "cli" or "PyPy" in sys.version: - sys.stdout.write( - """ -*****************************************************\n -The optional C extensions are currently not supported\n -by this python implementation.\n -*****************************************************\n -""" - ) - ext_modules = [] - - -def parse_reqs_file(fname): - with open(fname) as fid: - lines = [li.strip() for li in fid.readlines()] - return [li for li in lines if li and not li.startswith("#")] - - -dependencies = parse_reqs_file("requirements.txt") - -extras_require = dict( - aws=parse_reqs_file("requirements/aws.txt"), - encryption=parse_reqs_file("requirements/encryption.txt"), - gssapi=parse_reqs_file("requirements/gssapi.txt"), - ocsp=parse_reqs_file("requirements/ocsp.txt"), - snappy=parse_reqs_file("requirements/snappy.txt"), - # PYTHON-3423 Removed in 4.3 but kept here to avoid pip warnings. - srv=[], - tls=[], - # PYTHON-2133 Removed in 4.0 but kept here to avoid pip warnings. - zstd=parse_reqs_file("requirements/zstd.txt"), - test=parse_reqs_file("requirements/test.txt"), +msg = ( + "PyMongo>=4.8 no longer supports building via setup.py, use python -m pip install instead. If " + "this is an editable install (-e) please upgrade to pip>=21.3 first: python -m pip install --upgrade pip" ) -setup( - cmdclass={"build_ext": custom_build_ext}, - install_requires=dependencies, - extras_require=extras_require, - ext_modules=ext_modules, -) # type:ignore +raise RuntimeError(msg) diff --git a/test/__init__.py b/test/__init__.py index e1eba725b0..c516838f47 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -277,6 +277,7 @@ def __init__(self): self.is_data_lake = False self.load_balancer = TEST_LOADBALANCER self.serverless = TEST_SERVERLESS + self._fips_enabled = None if self.load_balancer or self.serverless: self.default_client_options["loadBalanced"] = True if COMPRESSORS: @@ -523,6 +524,17 @@ def storage_engine(self): # Raised if self.server_status is None. return None + @property + def fips_enabled(self): + if self._fips_enabled is not None: + return self._fips_enabled + try: + subprocess.check_call(["fips-mode-setup", "--is-enabled"]) + self._fips_enabled = True + except (subprocess.SubprocessError, FileNotFoundError): + self._fips_enabled = False + return self._fips_enabled + def check_auth_type(self, auth_type): auth_mechs = self.server_parameters.get("authenticationMechanisms", []) return auth_type in auth_mechs @@ -670,6 +682,12 @@ def require_auth(self, func): lambda: self.auth_enabled, "Authentication is not enabled on the server", func=func ) + def require_no_fips(self, func): + """Run a test only if the host does not have FIPS enabled.""" + return self._require( + lambda: not self.fips_enabled, "Test cannot run on a FIPS-enabled host", func=func + ) + def require_no_auth(self, func): """Run a test only if the server is running without auth enabled.""" return self._require( diff --git a/test/connection_string/test/valid-options.json b/test/connection_string/test/valid-options.json index 3d174c5abe..3c79fe7ae5 100644 --- a/test/connection_string/test/valid-options.json +++ b/test/connection_string/test/valid-options.json @@ -40,7 +40,7 @@ }, { "description": "Colon in a key value pair", - "uri": "mongodb://example.com?authMechanismProperties=TOKEN_RESOURCE:mongodb://test-cluster", + "uri": "mongodb://example.com/?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://test-cluster", "valid": true, "warning": false, "hosts": [ diff --git a/test/connection_string/test/valid-warnings.json b/test/connection_string/test/valid-warnings.json index 6bedbc6a66..f0e8288bc7 100644 --- a/test/connection_string/test/valid-warnings.json +++ b/test/connection_string/test/valid-warnings.json @@ -96,13 +96,13 @@ }, { "description": "Comma in a key value pair causes a warning", - "uri": "mongodb://example.com?authMechanismProperties=TOKEN_RESOURCE:mongodb://host1%2Chost2", + "uri": "mongodb://localhost?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://host1%2Chost2", "valid": true, "warning": true, "hosts": [ { "type": "hostname", - "host": "example.com", + "host": "localhost", "port": null } ], diff --git a/test/test_auth.py b/test/test_auth.py index 596c94d562..74089bd68e 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -343,6 +343,7 @@ def tearDown(self): client_context.drop_user("pymongo_test", "user") super().tearDown() + @client_context.require_no_fips def test_scram_sha1(self): host, port = client_context.host, client_context.port @@ -404,6 +405,7 @@ def test_scram_skip_empty_exchange(self): else: self.assertEqual(started, ["saslStart", "saslContinue", "saslContinue"]) + @client_context.require_no_fips def test_scram(self): # Step 1: create users client_context.create_user( diff --git a/test/test_bson.py b/test/test_bson.py index 89c0983ca5..fec84090d2 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -23,6 +23,7 @@ import os import pickle import re +import struct import sys import tempfile import uuid @@ -489,6 +490,33 @@ def test_basic_encode(self): b"\x00", ) + def test_bad_code(self): + # Assert that decoding invalid Code with scope does not include a field name. + def generate_payload(length: int) -> bytes: + string_size = length - 0x1E + + return bytes.fromhex( + struct.pack(" fails instantly if the label does not exist test = test @@ -51,7 +49,6 @@ labels = # Use labels and -m instead of -e so that tox -m