diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..6b26875f1 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1 @@ +1d7b1abd152e4cb5e6a46e52e6b7e3bf8d366486 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..ecd85064f --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,6 @@ +source/guides/github-actions-ci-cd-sample/* @webknjaz +source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst @webknjaz + +# build-details.json +source/specifications/build-details/ @FFY00 +source/specifications/specs/build-details-*.json @FFY00 diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..fbc581cd6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: "Community Guidelines" + url: "https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md" + about: "Please make sure to follow the PSF Code of Conduct when participating in this repository." diff --git a/.github/ISSUE_TEMPLATE/general.yml b/.github/ISSUE_TEMPLATE/general.yml new file mode 100644 index 000000000..d41731613 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/general.yml @@ -0,0 +1,37 @@ +name: General Issue +description: Please fill out the form below to submit an issue. +labels: [] +assignees: [] + +body: + - type: markdown + attributes: + value: | + **Thanks for taking a minute to file an issue!** + + Read the [PSF Code of Conduct][CoC] first. + + ⚠ + Verify first that your issue is not [already reported on + GitHub][issue search]. + + _Please fill out the form below with as many precise + details as possible._ + + [CoC]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + [issue search]: ../search?q=is%3Aissue&type=issues + + - type: textarea + attributes: + label: Issue Description + description: Please provide a detailed description of your issue. + placeholder: Describe your issue here... + validations: + required: true + + - type: checkboxes + attributes: + label: Code of Conduct + options: + - label: I am aware that participants in this repository must follow the PSF Code of Conduct. + required: true diff --git a/.github/sphinx_lint_matcher.json b/.github/sphinx_lint_matcher.json new file mode 100644 index 000000000..41896d8f1 --- /dev/null +++ b/.github/sphinx_lint_matcher.json @@ -0,0 +1,15 @@ +{ + "problemMatcher": [ + { + "owner": "sphinx-lint-problem-matcher", + "pattern": [ + { + "regexp": "^(.*):(\\d+):\\s+(.*)$", + "file": 1, + "line": 2, + "message": 3 + } + ] + } + ] +} diff --git a/.github/workflows/cron.yml b/.github/workflows/cron.yml new file mode 100644 index 000000000..8870bb70b --- /dev/null +++ b/.github/workflows/cron.yml @@ -0,0 +1,14 @@ +--- + +name: Cron + +on: + schedule: + - cron: "0 6 * * *" # daily at 6am + +jobs: + test: + if: github.repository_owner == 'pypa' # suppress noise in forks + uses: ./.github/workflows/test.yml + +... diff --git a/.github/workflows/pr-preview-links.yml b/.github/workflows/pr-preview-links.yml new file mode 100644 index 000000000..90ea9cc73 --- /dev/null +++ b/.github/workflows/pr-preview-links.yml @@ -0,0 +1,22 @@ +name: Read the Docs PR preview + +on: + pull_request_target: + types: + - opened + +permissions: + contents: read + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + documentation-links: + runs-on: ubuntu-latest + steps: + - uses: readthedocs/actions/preview@v1 + with: + project-slug: "python-packaging-user-guide" diff --git a/.github/workflows/test-translations.yml b/.github/workflows/test-translations.yml new file mode 100644 index 000000000..45dc60aa3 --- /dev/null +++ b/.github/workflows/test-translations.yml @@ -0,0 +1,81 @@ +name: Test translations + +on: + workflow_dispatch: + pull_request: + paths: + - '**.po' + branches: + - translation/source + push: + paths: + - '**.po' + branches: + - translation/source + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +env: + I18N_BRANCH: translation/source + +jobs: + matrix: + runs-on: ubuntu-latest + outputs: + languages: ${{ steps.languages.outputs.languages }} + + steps: + - name: Grab the repo src + uses: actions/checkout@v4 + with: + ref: ${{ env.I18N_BRANCH }} + + - name: List languages + id: languages + working-directory: locales + run: | + list=$(find * -maxdepth 0 -type d | jq -nRc '[inputs]') + echo "languages=$list" >> $GITHUB_OUTPUT + + + test-translation: + runs-on: ubuntu-latest + needs: matrix + strategy: + fail-fast: false + matrix: + language: ${{fromJson(needs.matrix.outputs.languages)}} + + steps: + - name: Grab the repo src + uses: actions/checkout@v4 + with: + ref: ${{ env.I18N_BRANCH }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: >- + 3.10 + + - name: Install Python tooling + run: python -m pip install --upgrade nox virtualenv sphinx-lint + + - name: Set Sphinx problem matcher + uses: sphinx-doc/github-problem-matcher@v1.0 + + - name: Build translated docs in ${{ matrix.language }} + run: nox -s build -- -q -D language=${{ matrix.language }} + + - name: Set Sphinx Lint problem matcher + if: always() + run: echo '::add-matcher::.github/sphinx_lint_matcher.json' + + - name: Lint translation file + if: always() + run: sphinx-lint locales/${{ matrix.language }}/LC_MESSAGES/messages.po diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..8503ca720 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,60 @@ +name: Test + +on: + merge_group: + push: + branches-ignore: + - gh-readonly-queue/** # Temporary merge queue-related GH-made branches + pull_request: + workflow_call: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +jobs: + build: + name: ${{ matrix.noxenv }} + if: ${{ github.repository_owner == 'pypa' || github.event_name != 'schedule' }} + runs-on: ubuntu-latest + strategy: + matrix: + noxenv: + - build + - linkcheck + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + cache: 'pip' + cache-dependency-path: 'requirements.txt' + + - name: Install dependencies + run: | + python -m pip install --upgrade nox virtualenv + + - name: Nox ${{ matrix.noxenv }} + run: | + python -m nox -s ${{ matrix.noxenv }} + + + check: + # This job does nothing and is only used for the branch protection + # or multi-stage CI jobs, like making sure that all tests pass before + # a publishing job is started. + if: always() + + needs: + - build + + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/translation.yml b/.github/workflows/translation.yml new file mode 100644 index 000000000..7cfae2991 --- /dev/null +++ b/.github/workflows/translation.yml @@ -0,0 +1,79 @@ +name: Translation + +on: + workflow_run: + workflows: + - Test + branches: + - main + types: + - completed + +env: + I18N_BRANCH: translation/source + +jobs: + build: + runs-on: ubuntu-latest + if: github.repository_owner == 'pypa' + + steps: + - name: Grab the repo src + uses: actions/checkout@v3 + with: + fetch-depth: 0 # To reach the common commit + - name: Set up git user as [bot] + # Refs: + # * https://github.community/t/github-actions-bot-email-address/17204/6 + # * https://github.com/actions/checkout/issues/13#issuecomment-724415212 + uses: fregante/setup-git-user@v1.1.0 + + - name: Switch to the translation source branch + run: | + sh -x + + git fetch origin \ + '+refs/heads/${{ + env.I18N_BRANCH + }}:refs/remotes/origin/${{ + env.I18N_BRANCH + }}' + + git checkout -B '${{ env.I18N_BRANCH }}' \ + 'origin/${{ env.I18N_BRANCH }}' + + - name: >- + Merge '${{ github.event.repository.default_branch }}' + to '${{ env.I18N_BRANCH }}' + run: | + sh -x + + git merge '${{ github.event.repository.default_branch }}' + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: >- + 3.10 + + - name: Install Python tooling + run: python -m pip install --upgrade nox virtualenv + + - name: Generate a fresh POT file out of RST documents + run: python -m nox -s translation + + - name: Commit the POT file to Git + run: | + git_hash=$(git rev-parse --short "${GITHUB_SHA}") + git add --force locales/messages.pot + git diff-index --quiet HEAD || \ + git commit \ + -m "Update messages.pot as of version ${git_hash}" \ + locales/messages.pot + + - name: >- + Push the updated POT file back to '${{ env.I18N_BRANCH }}' + branch on GitHub + if: always() + run: | + git push --atomic origin 'HEAD:${{ env.I18N_BRANCH }}' diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml new file mode 100644 index 000000000..d99b6473c --- /dev/null +++ b/.github/workflows/zizmor.yml @@ -0,0 +1,38 @@ +# From https://woodruffw.github.io/zizmor/usage/#use-in-github-actions + +name: GitHub Actions Security Analysis with zizmor 🌈 + +on: + push: + branches: ["main"] + pull_request: + branches: ["**"] + +jobs: + zizmor: + name: zizmor latest via PyPI + runs-on: ubuntu-latest + permissions: + security-events: write + # required for workflows in private repositories + contents: read + actions: read + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v5 + + - name: Run zizmor 🌈 + run: uvx zizmor --format sarif source/guides/github-actions-ci-cd-sample/* > results.sarif + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: results.sarif + category: zizmor diff --git a/.gitignore b/.gitignore index db214a9ae..301e238bb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,7 @@ build/ .*~ +.nox +*.pyc +__pycache__ +.DS_Store +/locales/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..db8b1131a --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,43 @@ +ci: + autoupdate_schedule: quarterly + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: end-of-file-fixer + - id: mixed-line-ending + - id: trailing-whitespace + +- repo: https://github.com/codespell-project/codespell + rev: v2.3.0 + hooks: + - id: codespell + args: ["-L", "ned,ist,oder", "--skip", "*.po"] + +- repo: local + hooks: + - id: disallow-caps + name: Disallow improper capitalization + language: pygrep + entry: PyBind|Numpy|Cmake|CCache|Github|PyTest + exclude: .pre-commit-config.yaml + exclude_types: ["pofile"] + +- repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 + hooks: + - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal + +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.7.1 + hooks: + - id: ruff + - id: ruff-format diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..90e9a9bbc --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,19 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +version: 2 + +sphinx: + configuration: source/conf.py + # The config file overrides the UI settings: + # https://github.com/pyca/cryptography/issues/5863#issuecomment-792343136 + builder: dirhtml + +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +python: + install: + - requirements: requirements.txt diff --git a/Makefile b/Makefile deleted file mode 100644 index c9bafe1f8..000000000 --- a/Makefile +++ /dev/null @@ -1,89 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/TheHithickersGuidetothePackaging.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/TheHithickersGuidetothePackaging.qhc" - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ - "run these through (pdf)latex." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/README.rst b/README.rst index d069b212e..eab80f1a8 100644 --- a/README.rst +++ b/README.rst @@ -1,29 +1,41 @@ Python Packaging User Guide =========================== +http://packaging.python.org + The "Python Packaging User Guide" (PyPUG) aims to be the authoritative resource on how to package and install distributions in Python using current tools. -The guide is currently maintained by the "Python Packaging Authority" (PyPA). -It was forked from the “Hitchhiker's Guide to Packaging” in March 2013, which was -maintained by Tarek Ziadé. Thank you Tarek for all your efforts in Python -packaging. - To follow the development of Python packaging, see the `Python -Packaging Authority `_. - -The html version of the Guide is currently available online at -http://packaging.python.org. - -The Python Packaging User Guide is licensed under a Creative Commons -Attribution-ShareAlike license: http://creativecommons.org/licenses/by-sa/3.0 . - +Packaging Authority `_. Code of Conduct --------------- Everyone interacting in the Python Packaging User Guide project's codebases, -issue trackers, chat rooms, and mailing lists is expected to follow the -`PyPA Code of Conduct`_. +issue trackers, chat rooms, and mailing lists are expected to follow the +`PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +This guide is community-maintained and contributions are welcome! Please see the +`contributing guide`_ for details on our writing style guide and how to build +the guide locally to test your changes. + +.. _contributing guide: https://packaging.python.org/contribute + +License +------- + +The Python Packaging User Guide is licensed under a Creative Commons +Attribution-ShareAlike license: http://creativecommons.org/licenses/by-sa/3.0 . + +History +------- -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ +This Guide was forked from the “Hitchhiker's Guide to Packaging” in March 2013, +which was maintained by Tarek Ziadé. Thank you Tarek for all your efforts in +Python packaging. diff --git a/authors.txt b/authors.txt index 723ab7ea4..087255916 100644 --- a/authors.txt +++ b/authors.txt @@ -13,3 +13,4 @@ Nick Coghlan Paul Moore Reinout van Rees Tarek Ziadé +Thea Flowers diff --git a/extra/specifications/schemas/build-details-v1.0.schema.json b/extra/specifications/schemas/build-details-v1.0.schema.json new file mode 100644 index 000000000..9954ddab7 --- /dev/null +++ b/extra/specifications/schemas/build-details-v1.0.schema.json @@ -0,0 +1,261 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://packaging.python.org/en/latest/specifications/schemas/build-details-v1.0.schema.json", + "type": "object", + "title": "build-details.json — a static description file with build details of Python installations", + "required": [ + "schema_version", + "base_prefix", + "platform", + "language", + "implementation" + ], + "additionalProperties": false, + "properties": { + "schema_version": { + "type": "string", + "description": "Schema version.\n\nThis is a string following the format ``.``, where ```` and ```` are unpaded numbers and represent the **major** and **minor** components of the version. Versions may be arithmetically compared by intrepreting the version string as a decimal number.\n\nFor this specification version, this value is constant and **MUST** be ``1.0``.\n\nFuture versions of this schema **MUST** use a higher version number. Future versions of this schema **MUST NOT** use the same **major** version component as other schema version unless its specification is deemed backwards-compatible with them — it can't change, or extend, any parts of the current specification in such a way as the semantics of the interpreted data differ, or that data valid under the new specification is invalid under the older specification, with the exception of additional properties (errors caused by ``additionalProperties``).", + "const": "1.0" + }, + "base_prefix": { + "type": "string", + "description": "Base prefix of the Python installation.\n\nEither an absolute path, or a path relative to directory where this file is contained.", + "examples": [ + "/usr", + "../.." + ] + }, + "base_interpreter": { + "type": "string", + "description": "The path to the Python interprer of the base installation.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the installation provides an interpreter executable.", + "examples": [ + "/usr/bin/python", + "bin/python" + ] + }, + "platform": { + "type": "string", + "description": "System platform string.\n\nThis field **SHOULD** be equivalent to ``sysconfig.get_platform()``.", + "examples": [ + "linux-x86_64" + ] + }, + "language": { + "type": "object", + "description": "Object containing details related to the Python language specification.", + "required": [ + "version" + ], + "additionalProperties": false, + "properties": { + "version": { + "type": "string", + "description": "String representation the Python language version — a version string consisting only of the *major* and *minor* components.\n\nThis field **SHOULD** be equivalent to ``sysconfig.get_python_version()``.", + "examples": ["3.14"] + }, + "version_info": { + "type": "object", + "description": "Object in the format of :py:data:`sys.version_info`.\n\nThis section **SHOULD** be equivalent to :py:data:`sys.version_info`.", + "required": ["major", "minor", "micro", "releaselevel", "serial"], + "additionalProperties": false, + "examples": [ + { + "major": 3, + "minor": 14, + "micro": 1, + "releaselevel": "final", + "serial": 0 + } + ], + "properties": { + "major": { + "type": "number" + }, + "minor": { + "type": "number" + }, + "micro": { + "type": "number" + }, + "releaselevel": { + "type": "string", + "enum": ["alpha", "beta", "candidate", "final"] + }, + "serial": { + "type": "number" + } + } + } + } + }, + "implementation": { + "type": "object", + "description": "Object containing details related to Python implementation.\n\nThis section **SHOULD** be equivalent to :py:data:`sys.implementation`. It follows specification defined in PEP 421, meaning that on top of the required keys, implementation-specific keys can also exist, but must be prefixed with an underscore.", + "required": [ + "name", + "version", + "hexversion", + "cache_tag" + ], + "additionalProperties": true, + "properties": { + "name": { + "type": "string", + "description": "Lower-case name of the Python implementation.", + "examples": ["cpython", "pypy"] + }, + "version": { + "type": "object", + "description": "Object in the format of :py:data:`sys.version_info`, containing the implementation version.", + "required": ["major", "minor", "micro", "releaselevel", "serial"], + "additionalProperties": false, + "examples": [ + { + "major": 3, + "minor": 14, + "micro": 1, + "releaselevel": "final", + "serial": 0 + }, + { + "major": 7, + "minor": 3, + "micro": 16, + "releaselevel": "final", + "serial": 0 + } + ], + "properties": { + "major": { + "type": "number" + }, + "minor": { + "type": "number" + }, + "micro": { + "type": "number" + }, + "releaselevel": { + "type": "string", + "enum": ["alpha", "beta", "candidate", "final"] + }, + "serial": { + "type": "number" + } + } + } + } + }, + "abi": { + "type": "object", + "description": "Object containing details related to ABI.", + "required": [ + "flags" + ], + "additionalProperties": false, + "properties": { + "flags": { + "type": "array", + "description": "Build configuration flags, used to calculate the extension suffix.\n\nThe flags **MUST** be defined in the order they appear on the extension suffix.", + "additionalProperties": true, + "examples": [ + ["t", "d"] + ] + }, + "extension_suffix": { + "type": "string", + "description": "Suffix used for extensions built against the current implementation version.\n\nThis field **MUST** be present if the Python implementation supports extensions, otherwise this entry will be missing.", + "examples": [ + ".cpython-314-x86_64-linux-gnu.so" + ] + }, + "stable_abi_suffix": { + "type": "string", + "description": "Suffix used for extensions built against the stable ABI.\n\nThis field **MUST** be present if the Python implementation has a stable ABI extension suffix, otherwise this entry will be missing.", + "examples": [ + ".abi3.so" + ] + } + } + }, + "suffixes": { + "type": "object", + "description": "Valid module suffixes grouped by type.\n\nThis section **MUST** be present if the Python installation supports importing external files, and it **SHOULD** be equivalent to the ``importlib.machinery.*_SUFFIXES`` attributes.\n\nAdditionally, if a Python implementation provides extension kinds other than the ones listed on ``importlib.machinery`` module, they **MAY** add a sub-section for them.", + "examples": [ + { + "source": [".py"], + "bytecode": [".pyc"], + "optimized_bytecode": [".pyc"], + "debug_bytecode": [".pyc"], + "extensions": [".cpython-313-x86_64-linux-gnu.so", ".abi3.so", ".so"] + } + ] + }, + "libpython": { + "type": "object", + "description": "Object containing details related to the ``libpython`` library.\n\nThis section **MUST** by present if Python installation provides a ``libpython`` library, otherwise this section will be missing.", + "additionalProperties": false, + "properties": { + "dynamic": { + "type": "string", + "description": "The path to the dynamic ``libpython`` library.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python installation provides a dynamic ``libpython`` library, otherwise this entry will be missing.", + "examples": [ + "/usr/lib/libpython3.14.so.1.0", + "lib/libpython3.14.so.1.0" + ] + }, + "dynamic_stableabi": { + "type": "string", + "description": "The path to the dynamic ``libpython`` library for the stable ABI.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python installation provides a dynamic ``libpython`` library targeting the Stable ABI, otherwise this entry will be missing.\n\nIf this key is present ``dynamic`` **MUST** also be set.", + "examples": [ + "/usr/lib/libpython3.so", + "lib/libpython3.so" + ] + }, + "static": { + "type": "string", + "description": "The path to the static ``libpython`` library.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python installation provides a static ``libpython`` library, otherwise this entry will be missing.", + "examples": [ + "/usr/lib/python3.14/config-3.14-x86_64-linux-gnu/libpython3.14.a", + "lib/python3.14/config-3.14-x86_64-linux-gnu/libpython3.14.a" + ] + }, + "link_extensions": { + "type": "boolean", + "description": "Should extensions built against a dynamic ``libpython`` link to it?\n\nThis field **MUST** be present if the Python installation provides a dynamic ``libpython`` library, otherwise this entry will be missing." + } + } + }, + "c_api": { + "type": "object", + "description": "Object containing details related to the Python C API.\n\nThis section **MUST** be present if the Python implementation provides a C API, otherwise this section will be missing.", + "required": [ + "headers" + ], + "additionalProperties": false, + "properties": { + "headers": { + "type": "string", + "description": "The path to the C API headers.\n\nEither an absolute path, or a path relative to ``base_prefix``.", + "examples": [ + "/usr/include/python3.14", + "include/python3.14" + ] + }, + "pkgconfig_path": { + "type": "string", + "description": "The path to the pkg-config definition files.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python implementation provides pkg-config definition files, otherwise this section will be missing.", + "examples": [ + "/usr/lib/pkgconfig", + "lib/pkgconfig" + ] + } + } + }, + "arbitrary_data": { + "type": "object", + "description": "Object containing extra arbitrary data.\n\nThis is meant to be used as an escape-hatch, to include any relevant data that is not covered by this specification. Implementations may choose what data to provide in this section.", + "additionalProperties": true + } + } +} diff --git a/make.bat b/make.bat deleted file mode 100644 index 354767b06..000000000 --- a/make.bat +++ /dev/null @@ -1,113 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -set SPHINXBUILD=sphinx-build -set BUILDDIR=build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\TheHithickersGuidetothePackaging.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\TheHithickersGuidetothePackaging.ghc - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -:end diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 000000000..698e82f9d --- /dev/null +++ b/noxfile.py @@ -0,0 +1,106 @@ +# Copyright 2017, PyPA +# The Python Packaging User Guide is licensed under a Creative Commons +# Attribution-ShareAlike license: +# http://creativecommons.org/licenses/by-sa/3.0. + + +import nox + +nox.options.sessions = [] + + +@nox.session() +def translation(session): + """ + Build the gettext .pot files. + """ + session.install("-r", "requirements.txt") + target_dir = "locales" + session.run( + "sphinx-build", + "-b", + "gettext", # build gettext-style message catalogs (.pot file) + "-d", + session.cache_dir / ".doctrees", # path to put the cache + "source/", # where the rst files are located + target_dir, # where to put the .pot file + ) + + +@nox.session() +def build(session, autobuild=False): + """ + Make the website. + """ + session.install("-r", "requirements.txt") + + if autobuild: + command = "sphinx-autobuild" + extra_args = "--host", "0.0.0.0" + else: + # NOTE: This branch adds options that are unsupported by autobuild + command = "sphinx-build" + extra_args = ( + "--color", # colorize the output + "--keep-going", # don't interrupt the build on the first warning + ) + + session.run( + command, + *extra_args, + "-j", + "auto", # parallelize the build + "-b", + "html", # use HTML builder + "-d", + session.cache_dir / ".doctrees", # path to put the cache + "-n", # nitpicky warn about all missing references + "-W", # Treat warnings as errors. + *session.posargs, + "source", # where the rst files are located + "build", # where to put the html output + ) + + +@nox.session() +def preview(session): + """ + Make and preview the website. + """ + session.install("sphinx-autobuild") + build(session, autobuild=True) + + +@nox.session() +def linkcheck(session): + """ + Check for broken links. + """ + session.install("-r", "requirements.txt") + session.run( + "sphinx-build", + "-b", + "linkcheck", # use linkcheck builder + "-d", + session.cache_dir / ".doctrees", # path to put the cache + "--color", + "-n", + "-W", + "--keep-going", # be strict + "source", # where the rst files are located + "build", # where to put the check output + ) + + +@nox.session() +def checkqa(session): + """ + Format the guide using pre-commit. + """ + session.install("pre-commit") + session.run( + "pre-commit", + "run", + "--all-files", + "--show-diff-on-failure", + ) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..5c710c86b --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +furo==2024.8.6 +sphinx==7.2.6 +sphinx-autobuild==2021.3.14 +sphinx-inline-tabs==2023.4.21 +sphinx-copybutton==0.5.2 +sphinx-toolbox==3.5.0 +sphinx-jsonschema==1.19.1 diff --git a/source/additional.rst b/source/additional.rst deleted file mode 100644 index ace4e912f..000000000 --- a/source/additional.rst +++ /dev/null @@ -1,27 +0,0 @@ -================= -Additional Topics -================= - -:Last Reviewed: 2015-09-08 - -This section covers a variety of packaging concepts and topics that don't fit -neatly into the documentation of any particular :ref:`project ` or in -either of our guides on :doc:`installing` or :doc:`distributing`. - -.. toctree:: - :maxdepth: 1 - - install_requirements_linux - extensions - multiple_python_versions - patching - mirrors - single_source_version - requirements - pip_easy_install - wheel_egg - science - deployment - multi_version_install - appveyor - self_hosted_repository diff --git a/source/assets/py.png b/source/assets/py.png new file mode 100644 index 000000000..93e4a02c3 Binary files /dev/null and b/source/assets/py.png differ diff --git a/source/assets/py_pkg_applications.png b/source/assets/py_pkg_applications.png new file mode 100644 index 000000000..bc3187dbf Binary files /dev/null and b/source/assets/py_pkg_applications.png differ diff --git a/source/assets/py_pkg_tools_and_libs.png b/source/assets/py_pkg_tools_and_libs.png new file mode 100644 index 000000000..707f1081d Binary files /dev/null and b/source/assets/py_pkg_tools_and_libs.png differ diff --git a/source/conf.py b/source/conf.py index 1ab6e2eef..d41db4957 100644 --- a/source/conf.py +++ b/source/conf.py @@ -1,221 +1,223 @@ -# -*- coding: utf-8 -*- -# -# The Python Packaging Guide documentation build configuration file, created by -# sphinx-quickstart on Sun Dec 13 14:07:23 2009. -# -# This file is execfile()d with the current directory set to its containing -# dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import glob - -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.append(os.path.abspath('.')) - -# -- General configuration ---------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.intersphinx'] - -intersphinx_cache_limit = 0 -intersphinx_mapping = { - 'pip': ('https://pip.pypa.io/en/latest/', None), - 'pypa': ('https://pypa.io/en/latest/', None), - } - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['themes/pug/templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -# source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Python Packaging User Guide' -copyright = u'2013–2015, PyPA' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '' -# The full version, including alpha/beta/rc tags. -release = '' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of documents that shouldn't be included in the build. -# unused_docs = [] - -# List of directories, relative to source directory, that shouldn't be searched -# for source files. -exclude_trees = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None +# -- Project information --------------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - - -# -- Options for HTML output -------------------------------------------------- +import os -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'default' -if not on_rtd: - try: - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - except ImportError: - pass +# Some options are only enabled for the main packaging.python.org deployment builds +RTD_BUILD = bool(os.getenv("READTHEDOCS")) +RTD_PR_BUILD = RTD_BUILD and os.getenv("READTHEDOCS_VERSION_TYPE") == "external" +RTD_URL = os.getenv("READTHEDOCS_CANONICAL_URL") +RTD_CANONICAL_BUILD = ( + RTD_BUILD and not RTD_PR_BUILD and "packaging.python.org" in RTD_URL +) +project = "Python Packaging User Guide" -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} +copyright = "2013–2020, PyPA" +author = "Python Packaging Authority" -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = [] -if not on_rtd: - try: - import sphinx_rtd_theme - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - except ImportError: - pass +# -- General configuration ------------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None +root_doc = "index" -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None +extensions = [ + "sphinx.ext.extlinks", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx_inline_tabs", + "sphinx_copybutton", + "sphinx_toolbox.collapse", + "sphinx-jsonschema", +] -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None +nitpicky = True +nitpick_ignore = [ + ("envvar", "PATH"), + ("py:func", "find_packages"), + ("py:func", "setup"), + ("py:func", "importlib.metadata.entry_points"), + ("py:class", "importlib.metadata.EntryPoint"), + ("py:func", "setuptools.find_namespace_packages"), + ("py:func", "setuptools.find_packages"), + ("py:func", "setuptools.setup"), +] -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None +default_role = "any" +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['themes/pug/static'] +# -- Options for internationalization -------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-internationalization -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' +language = "en" -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True +locale_dirs = ["../locales"] -# Custom sidebar templates, maps document names to template names. -html_sidebars = {} +gettext_auto_build = True +gettext_compact = "messages" +gettext_location = True -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} +# -- Options for HTML output ----------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -# If false, no module index is generated. -# html_use_modindex = True +html_title = "Python Packaging User Guide" +html_theme = "furo" -# If false, no index is generated. -# html_use_index = True +html_theme_options = { + "source_edit_link": "https://github.com/pypa/packaging.python.org/edit/main/source/%7Bfilename%7D", + "source_view_link": "https://github.com/pypa/packaging.python.org/blob/main/source/%7Bfilename%7D?plain=true", +} -# If true, the index is split into individual pages for each letter. -# html_split_index = False +html_favicon = "assets/py.png" +html_last_updated_fmt = "" -# If true, links to the reST sources are added to the pages. -html_show_sourcelink = False +_metrics_js_files = [ + ( + "https://analytics.python.org/js/script.outbound-links.js", + {"data-domain": "packaging.python.org", "defer": "defer"}, + ), +] +html_js_files = [] +if RTD_CANONICAL_BUILD: + # Enable collection of the visitor metrics reported at + # https://plausible.io/packaging.python.org + html_js_files.extend(_metrics_js_files) + +html_extra_path = [ + "../extra", +] -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' +# -- Options for HTML help output ------------------------------------------------------ +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-help-output -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = '' +htmlhelp_basename = "pythonpackagingguide-authdoc" -# Output file base name for HTML help builder. -htmlhelp_basename = 'pythonpackagingguide' +# -- Options for LaTeX output ---------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-latex-output +latex_elements = {} +latex_documents = [ + ( + root_doc, + "pythonpackagingguide.tex", + "Python Packaging User Guide", + "Python Packaging Authority", + "manual", + ), +] -# -- Options for LaTeX output ------------------------------------------------- +# -- Options for manual page output ---------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-manual-page-output -# The paper size ('letter' or 'a4'). -# latex_paper_size = 'letter' +man_pages = [ + (root_doc, "pythonpackagingguide", "Python Packaging User Guide", [author], 1) +] -# The font size ('10pt', '11pt' or '12pt'). -# latex_font_size = '10pt' +# -- Options for Texinfo output -------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-texinfo-output + +texinfo_documents = [ + ( + root_doc, + "pythonpackagingguide", + "Python Packaging User Guide", + author, + "pythonpackagingguide", + "One line description of project.", + "Miscellaneous", + ), +] -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, -# documentclass [howto/manual]). -latex_documents = [ - ('index', 'PythonPackagingGuide.tex', u'Python Packaging Guide', - u'PyPA', 'manual'), +# -- Options for the linkcheck builder ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder + +linkcheck_ignore = [ + "http://localhost//d+", + "https://packaging.python.org/en/latest/specifications/schemas/.*", + "https://test.pypi.org/project/example-package-YOUR-USERNAME-HERE", + "https://pypi.org/manage/*", + "https://test.pypi.org/manage/*", + # Temporarily ignored. Ref: + # https://github.com/pypa/packaging.python.org/pull/1308#issuecomment-1775347690 + "https://www.breezy-vcs.org/*", + # Ignore while StackOverflow is blocking GitHub CI. Ref: + # https://github.com/pypa/packaging.python.org/pull/1474 + "https://stackoverflow.com/*", + "https://pyscaffold.org/*", + "https://anaconda.org/", + "https://www.cisa.gov/sbom", +] +linkcheck_retries = 5 +# Ignore anchors for common targets when we know they likely won't be found +linkcheck_anchors_ignore_for_url = [ + # GitHub synthesises anchors in JavaScript, so Sphinx can't find them in the HTML + r"https://github/.com/", + # While PyPI has its botscraping defenses active, Sphinx can't resolve the anchors + # https://github.com/pypa/packaging.python.org/issues/1744 + r"https://pypi/.org/", ] -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None +# -- Options for extlinks ---------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html#configuration -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False +github_url = "https://github.com/" +github_repo_org = "pypa" +github_repo_name = "packaging.python.org" +github_repo_slug = f"{github_repo_org}/{github_repo_name}" +github_repo_url = f"{github_url}/{github_repo_slug}" +github_repo_issues_url = f"{github_url}/{github_repo_slug}/issues" +github_sponsors_url = f"{github_url}/sponsors" -# Additional stuff for the LaTeX preamble. -# latex_preamble = '' +extlinks = { + "issue": (f"{github_repo_issues_url}/%s", "#%s"), + "pr": (f"{github_repo_url}/pull/%s", "PR #%s"), + "commit": (f"{github_repo_url}/commit/%s", "%s"), + "gh": (f"{github_url}/%s", "GitHub: %s"), + "user": (f"{github_sponsors_url}/%s", "@%s"), +} -# Documents to append as an appendix to all manuals. -# latex_appendices = [] +# -- Options for intersphinx ---------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration -# If false, no module index is generated. -# latex_use_modindex = True +intersphinx_mapping = { + "boltons": ("https://boltons.readthedocs.io/en/latest/", None), + "bottle": ("https://bottlepy.org/docs/dev/", None), + "build": ("https://pypa-build.readthedocs.io/en/stable/", None), + "cffi": ("https://cffi.readthedocs.io/en/latest/", None), + "conda": ("https://conda.io/en/latest/", None), + "devpi": ("https://devpi.net/docs/devpi/devpi/latest/+doc", None), + "dh-virtualenv": ("https://dh-virtualenv.readthedocs.io/en/latest/", None), + "distlib": ("https://distlib.readthedocs.io/en/latest/", None), + "flexx": ("https://flexx.readthedocs.io/en/latest/", None), + "flit": ("https://flit.pypa.io/en/stable/", None), + "nox": ("https://nox.thea.codes/en/latest/", None), + "numpy": ("https://numpy.org/doc/stable/", None), + "openstack": ("https://docs.openstack.org/glance/latest/", None), + "packaging": ("https://packaging.pypa.io/en/latest/", None), + "pip": ("https://pip.pypa.io/en/latest/", None), + "pipenv": ("https://pipenv.pypa.io/en/latest/", None), + "piwheels": ("https://piwheels.readthedocs.io/en/latest/", None), + "pybind11": ("https://pybind11.readthedocs.io/en/stable/", None), + "pynsist": ("https://pynsist.readthedocs.io/en/latest/", None), + "pypa": ("https://www.pypa.io/en/latest/", None), + "python": ("https://docs.python.org/3", None), + "python-guide": ("https://docs.python-guide.org/", None), + "setuptools": ("https://setuptools.pypa.io/en/latest/", None), + "spack": ("https://spack.readthedocs.io/en/latest/", None), + "sphinx": ("https://www.sphinx-doc.org/en/master", None), + "tox": ("https://tox.wiki/en/latest/", None), + "twine": ("https://twine.readthedocs.io/en/stable/", None), + "virtualenv": ("https://virtualenv.pypa.io/en/stable/", None), + "warehouse": ("https://warehouse.pypa.io/", None), +} + +# -- Options for todo extension -------------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/todo.html#configuration + +todo_include_todos = True + +# -- Options for sphinx-copybutton ----------------------------------------------------- +# https://sphinx-copybutton.readthedocs.io/en/latest/use.html + +copybutton_prompt_text = r">>> |\.\.\. |\$ |> " +copybutton_prompt_is_regexp = True diff --git a/source/contribute.rst b/source/contribute.rst new file mode 100644 index 000000000..cf5314b8d --- /dev/null +++ b/source/contribute.rst @@ -0,0 +1,292 @@ +.. |PyPUG| replace:: Python Packaging User Guide + +************************ +Contribute to this guide +************************ + +The |PyPUG| welcomes contributors! There are lots of ways to help out, +including: + +* Reading the guide and giving feedback +* Reviewing new contributions +* Revising existing content +* Writing new content +* Translating the guide + +Most of the work on the |PyPUG| takes place on the +`project's GitHub repository`__. To get started, check out the list of +`open issues`__ and `pull requests`__. If you're planning to write or edit +the guide, please read the :ref:`style guide `. + +.. __: https://github.com/pypa/packaging.python.org/ +.. __: https://github.com/pypa/packaging.python.org/issues +.. __: https://github.com/pypa/packaging.python.org/pulls + +By contributing to the |PyPUG|, you're expected to follow the PSF's +`Code of Conduct`__. + +.. __: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + + +Documentation types +=================== + +This project consists of four distinct documentation types with specific +purposes. The project aspires to follow the `Diátaxis process`_ +for creating quality documentation. When proposing new additions to the project please pick the +appropriate documentation type. + +.. _Diátaxis process: https://diataxis.fr/ + +Tutorials +--------- + +Tutorials are focused on teaching the reader new concepts by accomplishing a +goal. They are opinionated step-by-step guides. They do not include extraneous +warnings or information. `example tutorial-style document`_. + +.. _example tutorial-style document: https://docs.djangoproject.com/en/dev/intro/ + +Guides +------ + +Guides are focused on accomplishing a specific task and can assume some level of +pre-requisite knowledge. These are similar to tutorials, but have a narrow and +clear focus and can provide lots of caveats and additional information as +needed. They may also discuss multiple approaches to accomplishing the task. +:doc:`example guide-style document `. + +Discussions +----------- + +Discussions are focused on understanding and information. These explore a +specific topic without a specific goal in mind. :doc:`example discussion-style +document `. + +Specifications +-------------- + +Specifications are reference documentation focused on comprehensively documenting +an agreed-upon interface for interoperability between packaging tools. +:doc:`example specification-style document `. + + +Translations +============ + +We use `Weblate`_ to manage translations of this project. +Please visit the `packaging.python.org`_ project on Weblate to contribute. + +If you are experiencing issues while you are working on translations, +please open an issue on `GitHub`_. + +.. tip:: + + Any translations of this project should follow `reStructuredText syntax`_. + +.. _Weblate: https://weblate.org/ +.. _packaging.python.org: https://hosted.weblate.org/projects/pypa/packaging-python-org/ +.. _GitHub: https://github.com/pypa/packaging.python.org/issues +.. _reStructuredText syntax: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html + +Adding a language +----------------- + +If your language is not listed on `packaging.python.org`_, click the button +:guilabel:`Start new translation` at the bottom of the language list and add +the language you want to translate. + +Following reStructuredText syntax +--------------------------------- + +If you are not familiar with reStructuredText (RST) syntax, please read `this guide`_ +before translating on Weblate. + +**Do not translate the text in reference directly** + + When translating the text in reference, please do not translate them directly. + + | Wrong: Translate the following text directly: + + .. code-block:: rst + + `some ref`_ -> `TRANSLATED TEXT HERE`_ + + | Right: Translate the following text with your own language and add the original reference: + + .. code-block:: rst + + `some ref`_ -> `TRANSLATED TEXT HERE `_ + +.. _this guide: https://docutils.sourceforge.io/docs/user/rst/quickref.html + +Building the guide locally +========================== + +Though not required to contribute, it may be useful to build this guide locally +in order to test your changes. In order to build this guide locally, you'll +need: + +1. :doc:`Nox `. You can install or upgrade + nox using ``pip``: + + .. code-block:: bash + + python -m pip install --user nox + +2. Python 3.11. Our build scripts are usually tested with Python 3.11 only. + See the :doc:`Hitchhiker's Guide to Python installation instructions ` + to install Python 3.11 on your operating system. + +To build the guide, run the following shell command in the project's root folder: + +.. code-block:: bash + + nox -s build + +After the process has completed you can find the HTML output in the +``./build/html`` directory. You can open the ``index.html`` file to view the +guide in web browser, but it's recommended to serve the guide using an HTTP +server. + +You can build the guide and serve it via an HTTP server using the following +command: + +.. code-block:: bash + + nox -s preview + +The guide will be browsable via http://localhost:8000. + + +Where the guide is deployed +=========================== + +The guide is deployed via ReadTheDocs and the configuration lives at https://readthedocs.org/projects/python-packaging-user-guide/. It's served from a custom domain and fronted by Fast.ly. + + +.. _contributing_style_guide: + +Style guide +=========== + +This style guide has recommendations for how you should write the |PyPUG|. +Before you start writing, please review it. By following the style guide, your +contributions will help add to a cohesive whole and make it easier for your +contributions to be accepted into the project. + + +Purpose +------- + +The purpose of the |PyPUG| is to be the authoritative resource on how to +package, publish, and install Python projects using current tools. + + +Scope +----- + +The guide is meant to answer questions and solve problems with accurate and +focused recommendations. + +The guide isn't meant to be comprehensive and it's not meant to replace +individual projects' documentation. For example, pip has dozens of commands, +options, and settings. The pip documentation describes each of them in detail, +while this guide describes only the parts of pip that are needed to complete the +specific tasks described in this guide. + + +Audience +-------- + +The audience of this guide is anyone who uses Python with packages. + +Don't forget that the Python community is big and welcoming. Readers may not +share your age, gender, education, culture, and more, but they deserve to learn +about packaging just as much as you do. + +In particular, keep in mind that not all people who use Python see themselves as +programmers. The audience of this guide includes astronomers or painters or +students as well as professional software developers. + + +Voice and tone +-------------- + +When writing this guide, strive to write with a voice that's approachable and +humble, even if you have all the answers. + +Imagine you're working on a Python project with someone you know to be smart and +skilled. You like working with them and they like working with you. That person +has asked you a question and you know the answer. How do you respond? *That* is +how you should write this guide. + +Here's a quick check: try reading aloud to get a sense for your writing's voice +and tone. Does it sound like something you would say or does it sound like +you're acting out a part or giving a speech? Feel free to use contractions and +don't worry about sticking to fussy grammar rules. You are hereby granted +permission to end a sentence in a preposition, if that's what you want to end it +with. + +When writing the guide, adjust your tone for the seriousness and difficulty of +the topic. If you're writing an introductory tutorial, it's OK to make a joke, +but if you're covering a sensitive security recommendation, you might want to +avoid jokes altogether. + + +Conventions and mechanics +------------------------- + +**Write to the reader** + When giving recommendations or steps to take, address the reader as *you* + or use the imperative mood. + + | Wrong: To install it, the user runs… + | Right: You can install it by running… + | Right: To install it, run… + +**State assumptions** + Avoid making unstated assumptions. Reading on the web means that any page of + the guide may be the first page of the guide that the reader ever sees. + If you're going to make assumptions, then say what assumptions that you're + going to make. + +**Cross-reference generously** + The first time you mention a tool or practice, link to the part of the + guide that covers it, or link to a relevant document elsewhere. Save the + reader a search. + +**Respect naming practices** + When naming tools, sites, people, and other proper nouns, use their preferred + capitalization. + + | Wrong: Pip uses… + | Right: pip uses… + | + | Wrong: …hosted on github. + | Right: …hosted on GitHub. + +**Use a gender-neutral style** + Often, you'll address the reader directly with *you*, *your* and *yours*. + Otherwise, use gender-neutral pronouns *they*, *their*, and *theirs* or avoid + pronouns entirely. + + | Wrong: A maintainer uploads the file. Then he… + | Right: A maintainer uploads the file. Then they… + | Right: A maintainer uploads the file. Then the maintainer… + +**Headings** + Write headings that use words the reader is searching for. A good way to + do this is to have your heading complete an implied question. For example, a + reader might want to know *How do I install MyLibrary?* so a good heading + might be *Install MyLibrary*. + + In section headings, use sentence case. In other words, write headings as you + would write a typical sentence. + + | Wrong: Things You Should Know About Python + | Right: Things you should know about Python + +**Numbers** + In body text, write numbers one through nine as words. For other numbers or + numbers in tables, use numerals. diff --git a/source/current.rst b/source/current.rst deleted file mode 100644 index b731092cb..000000000 --- a/source/current.rst +++ /dev/null @@ -1,96 +0,0 @@ -.. _`Tool Recommendations`: - -==================== -Tool Recommendations -==================== - -:Page Status: Complete -:Last Reviewed: 2016-06-24 - -If you're familiar with Python packaging and installation, and just want to know -what tools are currently recommended, then here it is. - - -Installation Tool Recommendations -================================= - -* Use :ref:`pip` to install Python :term:`packages ` from - :term:`PyPI `. [1]_ [2]_ Depending how :ref:`pip` - is installed, you may need to also install :ref:`wheel` to get the benefit - of wheel caching. [3]_ - -* Use :ref:`virtualenv`, or `venv`_ to isolate application specific - dependencies from a shared Python installation. [4]_ - -* If you're looking for management of fully integrated cross-platform software - stacks, consider: - - * :ref:`buildout`: primarily focused on the web development community - - * :ref:`spack`, :ref:`hashdist`, or :ref:`conda`: primarily focused - on the scientific community. - - - -Packaging Tool Recommendations -============================== - -* Use :ref:`setuptools` to define projects and create :term:`Source Distributions - `. [5]_ [6]_ - -* Use the ``bdist_wheel`` :ref:`setuptools` extension available from the - :ref:`wheel project ` to create :term:`wheels `. This is - especially beneficial, if your project contains binary extensions. [7]_ - -* Use `twine `_ for uploading distributions - to :term:`PyPI `. - - ----- - -.. [1] There are some cases where you might choose to use ``easy_install`` (from - :ref:`setuptools`), e.g. if you need to install from :term:`Eggs ` - (which pip doesn't support). For a detailed breakdown, see :ref:`pip vs - easy_install`. - -.. [2] The acceptance of :pep:`453` means that :ref:`pip` - will be available by default in most installations of Python 3.4 or - later. See the :pep:`rationale section <453#rationale>` from :pep:`453` - as for why pip was chosen. - -.. [3] :ref:`get-pip.py ` and :ref:`virtualenv` install - :ref:`wheel`, whereas :ref:`ensurepip` and :ref:`venv ` do not - currently. Also, the common "python-pip" package that's found in various - linux distros, does not depend on "python-wheel" currently. - -.. [4] Beginning with Python 3.4, ``venv`` will create virtualenv environments - with ``pip`` installed, thereby making it an equal alternative to - :ref:`virtualenv`. However, using :ref:`virtualenv` will still be - recommended for users that need cross-version consistency. - -.. [5] Although you can use pure ``distutils`` for many projects, it does not - support defining dependencies on other projects and is missing several - convenience utilities for automatically populating distribution metadata - correctly that are provided by ``setuptools``. Being outside the - standard library, ``setuptools`` also offers a more consistent feature - set across different versions of Python, and (unlike ``distutils``), - ``setuptools`` will be updated to produce the upcoming "Metadata 2.0" - standard formats on all supported versions. - - Even for projects that do choose to use ``distutils``, when :ref:`pip` - installs such projects directly from source (rather than installing - from a prebuilt :term:`wheel ` file), it will actually build - your project using :ref:`setuptools` instead. - -.. [6] `distribute`_ (a fork of setuptools) was merged back into - :ref:`setuptools` in June 2013, thereby making setuptools the default - choice for packaging. - -.. [7] :term:`PyPI ` currently only allows - uploading Windows and Mac OS X wheels, and they should be compatible with - the binary installers provided for download from python.org. Enhancements - will have to be made to the :pep:`wheel compatibility tagging scheme - <425>` before linux wheels will be allowed. - -.. _distribute: https://pypi.python.org/pypi/distribute -.. _venv: https://docs.python.org/3/library/venv.html diff --git a/source/deployment.rst b/source/deployment.rst deleted file mode 100644 index 31fbd66a5..000000000 --- a/source/deployment.rst +++ /dev/null @@ -1,102 +0,0 @@ - -====================== -Application Deployment -====================== - -:Page Status: Incomplete -:Last Reviewed: 2014-11-11 - -.. contents:: Contents - :local: - - -Overview -======== - - -Supporting multiple hardware platforms --------------------------------------- - -:: - - FIXME - - Meaning: x86, x64, ARM, others? - - For Python-only distributions, it *should* be straightforward to deploy on all - platforms where Python can run. - - For distributions with binary extensions, deployment is major headache. Not only - must the extensions be built on all the combinations of operating system and - hardware platform, but they must also be tested, preferably on continuous - integration platforms. The issues are similar to the "multiple python - versions" section above, not sure whether this should be a separate section. - Even on Windows x64, both the 32 bit and 64 bit versions of Python enjoy - significant usage. - - - -OS Packaging & Installers -========================= - -:: - - FIXME - - - Building rpm/debs for projects - - Building rpms/debs for whole virtualenvs - - Building Mac OS X installers for Python projects - - Building Android APKs with Kivy+P4A or P4A & Buildozer - -Windows -------- - -:: - - FIXME - - - Building Windows installers for Python projects - -Pynsist -^^^^^^^ - -`Pynsist `__ is a tool that bundles Python -programs together with the Python-interpreter into a single installer based on -NSIS. In most cases, packaging only requires the user to choose a version of -the Python-interpreter and declare the dependencies of the program. The tool -downloads the specified Python-interpreter for Windows and packages it with all -the dependencies in a single Windows-executable installer. - -The installer installs or updates the Python-interpreter on the users system, -which can be used independently of the packaged program. The program itself, -can be started from a shortcut, that the installer places in the start-menu. -Uninstalling the program leaves the Python installation of the user intact. - -A big advantage of pynsist is that the Windows packages can be built on Linux. -There are several examples for different kinds of programs (console, GUI) in -the `documentation `__. The tool is released -under the MIT-licence. - -Application Bundles -=================== - -:: - - FIXME - - - py2exe/py2app/PEX - - wheels kinda/sorta - - -Configuration Management -======================== - -:: - - FIXME - - puppet - salt - chef - ansible - fabric diff --git a/source/discussions/deploying-python-applications.rst b/source/discussions/deploying-python-applications.rst new file mode 100644 index 000000000..e10f36f9c --- /dev/null +++ b/source/discussions/deploying-python-applications.rst @@ -0,0 +1,139 @@ + +============================= +Deploying Python applications +============================= + +:Page Status: Incomplete +:Last Reviewed: 2021-8-24 + + +Overview +======== + + +Supporting multiple hardware platforms +-------------------------------------- + +:: + + FIXME + + Meaning: x86, x64, ARM, others? + + For Python-only distributions, it *should* be straightforward to deploy on all + platforms where Python can run. + + For distributions with binary extensions, deployment is a major headache. Not only + must the extensions be built on all the combinations of operating system and + hardware platform, but they must also be tested, preferably on continuous + integration platforms. The issues are similar to the "multiple Python + versions" section above, not sure whether this should be a separate section. + Even on Windows x64, both the 32 bit and 64 bit versions of Python enjoy + significant usage. + + + +OS packaging & installers +========================= + +:: + + FIXME + + - Building rpm/debs for projects + - Building rpms/debs for whole virtualenvs + - Building macOS installers for Python projects + - Building Android APKs with Kivy+P4A or P4A & Buildozer + +Windows +------- + +:: + + FIXME + + - Building Windows installers for Python projects + +Pynsist +^^^^^^^ + +`Pynsist `__ is a tool that bundles Python +programs together with the Python-interpreter into a single installer based on +NSIS. In most cases, packaging only requires the user to choose a version of +the Python-interpreter and declare the dependencies of the program. The tool +downloads the specified Python-interpreter for Windows and packages it with all +the dependencies in a single Windows-executable installer. + +The installed program can be started from a shortcut that the installer adds to +the start-menu. It uses a Python interpreter installed within its application +directory, independent of any other Python installation on the computer. + +A big advantage of Pynsist is that the Windows packages can be built on Linux. +There are several examples for different kinds of programs (console, GUI) in +the :any:`documentation `. The tool is released +under the MIT-licence. + +Application bundles +=================== + +:: + + FIXME + + - wheels kinda/sorta + +Windows +------- + +py2exe +^^^^^^ + +`py2exe `__ is a distutils extension which +allows to build standalone Windows executable programs (32-bit and 64-bit) +from Python scripts. Python versions included in the official development +cycle are supported (refers to `Status of Python branches`__). py2exe can +build console executables and windows (GUI) executables. Building windows +services, and DLL/EXE COM servers might work but it is not actively supported. +The distutils extension is released under the MIT-licence and Mozilla +Public License 2.0. + +.. __: https://devguide.python.org/#status-of-python-branches + +macOS +----- + +py2app +^^^^^^ + +`py2app `__ is a Python setuptools +command which will allow you to make standalone macOS application +bundles and plugins from Python scripts. Note that py2app MUST be used +on macOS to build applications, it cannot create Mac applications on other +platforms. py2app is released under the MIT-license. + +Unix (including Linux and macOS) +----------------------------------- + +pex +^^^ + +`pex `__ is a library for generating .pex +(Python EXecutable) files which are executable Python environments in the +spirit of virtualenvs. pex is an expansion upon the ideas outlined in :pep:`441` +and makes the deployment of Python applications as simple as cp. pex files may +even include multiple platform-specific Python distributions, meaning that a +single pex file can be portable across Linux and macOS. pex is released under the +Apache License 2.0. + +Configuration management +======================== + +:: + + FIXME + + puppet + salt + chef + ansible + fabric diff --git a/source/discussions/distribution-package-vs-import-package.rst b/source/discussions/distribution-package-vs-import-package.rst new file mode 100644 index 000000000..65e7019c6 --- /dev/null +++ b/source/discussions/distribution-package-vs-import-package.rst @@ -0,0 +1,110 @@ +.. _distribution-package-vs-import-package: + +======================================= +Distribution package vs. import package +======================================= + +A number of different concepts are commonly referred to by the word +"package". This page clarifies the differences between two distinct but +related meanings in Python packaging, "distribution package" and "import +package". + +What's a distribution package? +============================== + +A distribution package is a piece of software that you can install. +Most of the time, this is synonymous with "project". When you type ``pip +install pkg``, or when you write ``dependencies = ["pkg"]`` in your +``pyproject.toml``, ``pkg`` is the name of a distribution package. When +you search or browse the PyPI_, the most widely known centralized source for +installing Python libraries and tools, what you see is a list of distribution +packages. Alternatively, the term "distribution package" can be used to +refer to a specific file that contains a certain version of a project. + +Note that in the Linux world, a "distribution package", +most commonly abbreviated as "distro package" or just "package", +is something provided by the system package manager of the `Linux distribution `_, +which is a different meaning. + + +What's an import package? +========================= + +An import package is a Python module. Thus, when you write ``import +pkg`` or ``from pkg import func`` in your Python code, ``pkg`` is the +name of an import package. More precisely, import packages are special +Python modules that can contain submodules. For example, the ``numpy`` +package contains modules like ``numpy.linalg`` and +``numpy.fft``. Usually, an import package is a directory on the file +system, containing modules as ``.py`` files and subpackages as +subdirectories. + +You can use an import package as soon as you have installed a distribution +package that provides it. + + +What are the links between distribution packages and import packages? +===================================================================== + +Most of the time, a distribution package provides one single import +package (or non-package module), with a matching name. For example, +``pip install numpy`` lets you ``import numpy``. + +However, this is only a convention. PyPI and other package indices *do not +enforce any relationship* between the name of a distribution package and the +import packages it provides. (A consequence of this is that you cannot blindly +install the PyPI package ``foo`` if you see ``import foo``; this may install an +unintended, and potentially even malicious package.) + +A distribution package could provide an import package with a different +name. An example of this is the popular Pillow_ library for image +processing. Its distribution package name is ``Pillow``, but it provides +the import package ``PIL``. This is for historical reasons: Pillow +started as a fork of the PIL library, thus it kept the import name +``PIL`` so that existing PIL users could switch to Pillow with little +effort. More generally, a fork of an existing library is a common reason +for differing names between the distribution package and the import +package. + +On a given package index (like PyPI), distribution package names must be +unique. On the other hand, import packages have no such requirement. +Import packages with the same name can be provided by several +distribution packages. Again, forks are a common reason for this. + +Conversely, a distribution package can provide several import packages, +although this is less common. An example is the attrs_ distribution +package, which provides both an ``attrs`` import package with a newer +API, and an ``attr`` import package with an older but supported API. + + +How do distribution package names and import package names compare? +=================================================================== + +Import packages should have valid Python identifiers as their name (the +:ref:`exact rules ` are found in the Python +documentation) [#non-identifier-mod-name]_. In particular, they use underscores ``_`` as word +separator and they are case-sensitive. + +On the other hand, distribution packages can use hyphens ``-`` or +underscores ``_``. They can also contain dots ``.``, which is sometimes +used for packaging a subpackage of a :ref:`namespace package +`. For most purposes, they are insensitive +to case and to ``-`` vs. ``_`` differences, e.g., ``pip install +Awesome_Package`` is the same as ``pip install awesome-package`` (the +precise rules are given in the :ref:`name normalization specification +`). + + + +--------------------------- + +.. [#non-identifier-mod-name] Although it is technically possible + to import packages/modules that do not have a valid Python identifier as + their name, using :doc:`importlib `, + this is vanishingly rare and strongly discouraged. + + +.. _distro: https://en.wikipedia.org/wiki/Linux_distribution +.. _PyPI: https://pypi.org +.. _Pillow: https://pypi.org/project/Pillow +.. _attrs: https://pypi.org/project/attrs diff --git a/source/discussions/downstream-packaging.rst b/source/discussions/downstream-packaging.rst new file mode 100644 index 000000000..3f4795fa8 --- /dev/null +++ b/source/discussions/downstream-packaging.rst @@ -0,0 +1,481 @@ +.. _downstream-packaging: + +=============================== +Supporting downstream packaging +=============================== + +:Page Status: Draft +:Last Reviewed: 2025-? + +While PyPI and the Python packaging tools such as :ref:`pip` are the primary +means of distributing Python packages, they are also often made available as part +of other packaging ecosystems. These repackaging efforts are collectively called +*downstream* packaging (your own efforts are called *upstream* packaging), +and include such projects as Linux distributions, Conda, Homebrew and MacPorts. +They generally aim to provide improved support for use cases that cannot be handled +via Python packaging tools alone, such as native integration with a specific operating +system, or assured compatibility with specific versions of non-Python software. + +This discussion attempts to explain how downstream packaging is usually done, +and what additional challenges downstream packagers typically face. It aims +to provide some optional guidelines that project maintainers may choose to +follow which help make downstream packaging *significantly* easier +(without imposing any major maintenance hassles on the upstream project). +Note that this is not an all-or-nothing proposal — anything that upstream +maintainers can do is useful, even if it's only a small part. Downstream +maintainers are also willing to prepare patches to resolve these issues. +Having these patches merged can be very helpful, since it removes the need +for different downstreams to carry and keep rebasing the same patches, +and the risk of applying inconsistent solutions to the same problem. + +Establishing a good relationship between software maintainers and downstream +packagers can bring mutual benefits. Downstreams are often willing to share +their experience, time and hardware to improve your package. They are +sometimes in a better position to see how your package is used in practice, +and to provide information about its relationships with other packages that +would otherwise require significant effort to obtain. +Packagers can often find bugs before your users hit them in production, +provide bug reports of good quality, and supply patches whenever they can. +For example, they are regularly active in ensuring the packages they redistribute +are updated for any compatibility issues that arise when a new Python version +is released. + +Please note that downstream builds include not only binary redistribution, +but also source builds done on user systems (in source-first distributions +such as Gentoo Linux, for example). + + +.. _provide-complete-source-distributions: + +Provide complete source distributions +------------------------------------- + +Why? +~~~~ + +The vast majority of downstream packagers prefer to build packages from source, +rather than use the upstream-provided binary packages. In some cases, using +sources is actually required for the package to be included in the distribution. +This is also true of pure Python packages that provide universal wheels. +The reasons for using source distributions may include: + +- Being able to audit the source code of all packages. + +- Being able to run the test suite and build documentation. + +- Being able to easily apply patches, including backporting commits + from the project's repository and sending patches back to the project. + +- Being able to build on a specific platform that is not covered + by upstream builds. + +- Being able to build against specific versions of system libraries. + +- Having a consistent build process across all Python packages. + +While it is usually possible to build packages from a Git repository, there are +a few important reasons to provide a static archive file instead: + +- Fetching a single file is often more efficient, more reliable and better + supported than e.g. using a Git clone. This can help users with poor + Internet connectivity. + +- Downstreams often use hashes to verify the authenticity of source files + on subsequent builds, which require that they remain bitwise identical over + time. For example, automatically generated Git archives do not guarantee + this, as the compressed data may change if gzip is upgraded on the server. + +- Archive files can be mirrored, reducing both upstream and downstream + bandwidth use. The actual builds can afterwards be performed in firewalled + or offline environments, that can only access source files provided + by the local mirror or redistributed earlier. + +- Explicitly publishing archive files can ensure that any dependencies on version control + system metadata are resolved when creating the source archive. For example, automatically + generated Git archives omit all of the commit tag information, potentially resulting in + incorrect version details in the resulting builds. + +How? +~~~~ + +Ideally, **a source distribution archive published on PyPI should include all the files +from the package's Git repository** that are necessary to build the package +itself, run its test suite, build and install its documentation, and any other +files that may be useful to end users, such as shell completions, editor +support files, and so on. + +This point applies only to the files belonging to the package itself. +The downstream packaging process, much like Python package managers, will +provision the necessary Python dependencies, system tools and external +libraries that are needed by your package and its build scripts. However, +the files listing these dependencies (for example, ``requirements*.txt`` files) +should also be included, to help downstreams determine the needed dependencies, +and check for changes in them. + +Some projects have concerns related to Python package managers using source +distributions from PyPI. They do not wish to increase their size with files +that are not used by these tools, or they do not wish to publish source +distributions at all, as they enable a problematic or outright nonfunctional +fallback to building the particular project from source. In these cases, a good +compromise may be to publish a separate source archive for downstream use +elsewhere, for example by attaching it to a GitHub release. Alternatively, +large files, such as test data, can be split into separate archives. + +On the other hand, some projects (NumPy_, for instance) decide to include tests +in their installed packages. This has the added advantage of permitting users to +run tests after installing them, for example to check for regressions +after upgrading a dependency. Yet another approach is to split tests or test +data into a separate Python package. Such an approach was taken by +the cryptography_ project, with the large test vectors being split +to cryptography-vectors_ package. + +A good idea is to use your source distribution in the release workflow. +For example, the :ref:`build` tool does exactly that — it first builds a source +distribution, and then uses it to build a wheel. This ensures that the source +distribution actually works, and that it won't accidentally install fewer files +than the official wheels. + +Ideally, also use the source distribution to run tests, build documentation, +and so on, or add specific tests to make sure that all necessary files were +actually included. Understandably, this requires more effort, so it's fine +not do that — downstream packagers will report any missing files promptly. + + +.. _no-internet-access-in-builds: + +Do not use the Internet during the build process +------------------------------------------------ + +Why? +~~~~ + +Downstream builds are frequently done in sandboxed environments that cannot +access the Internet. The package sources are unpacked into this environment, +and all the necessary dependencies are installed. + +Even if this is not the case, and assuming that you took sufficient care to +properly authenticate downloads, using the Internet is discouraged for a number +of reasons: + +- The Internet connection may be unstable (e.g. due to poor reception) + or suffer from temporary problems that could cause the process to fail + or hang. + +- The remote resources may become temporarily or even permanently + unavailable, making the build no longer possible. This is especially + problematic when someone needs to build an old package version. + +- The remote resources may change, making the build not reproducible. + +- Accessing remote servers poses a privacy issue and a potential + security issue, as it exposes information about the system building + the package. + +- The user may be using a service with a limited data plan, in which + uncontrolled Internet access may result in additional charges or other + inconveniences. + +How? +~~~~ + +If the package is implementing any custom build *backend* actions that use +the Internet, for example by automatically downloading vendored dependencies +or fetching Git submodules, its source distribution should either include all +of these files or allow provisioning them externally, and the Internet must not +be used if the files are already present. + +Note that this point does not apply to Python dependencies that are specified +in the package metadata, and are fetched during the build and installation +process by *frontends* (such as :ref:`build` or :ref:`pip`). Downstreams use +frontends that use local provisioning for Python dependencies. + +Ideally, custom build scripts should not even attempt to access the Internet +at all, unless explicitly requested to. If any resources are missing and need +to be fetched, they should ask the user for permission first. If that is not +feasible, the next best thing is to provide an opt-out switch to disable +all Internet access. This could be done e.g. by checking whether +a ``NO_NETWORK`` environment variable is set to a non-empty value. + +Since downstreams frequently also run tests and build documentation, the above +should ideally extend to these processes as well. + +Please also remember that if you are fetching remote resources, you absolutely +must *verify their authenticity* (usually against a hash), to protect against +the file being substituted by a malicious party. + + +.. _support-system-dependencies-in-builds: + +Support building against system dependencies +-------------------------------------------- + +Why? +~~~~ + +Some Python projects have non-Python dependencies, such as libraries written +in C or C++. Trying to use the system versions of these dependencies +in upstream packaging may cause a number of problems for end users: + +- The published wheels require a binary-compatible version of the used + library to be present on the user's system. If the library is missing + or an incompatible version is installed, the Python package may fail with errors + that are not clear to inexperienced users, or even misbehave at runtime. + +- Building from a source distribution requires a source-compatible version + of the dependency to be present, along with its development headers + and other auxiliary files that some systems package separately + from the library itself. + +- Even for an experienced user, installing a compatible dependency version + may be very hard. For example, the used Linux distribution may not provide + the required version, or some other package may require an incompatible + version. + +- The linkage between the Python package and its system dependency is not + recorded by the packaging system. The next system update may upgrade + the library to a newer version that breaks binary compatibility with + the Python package, and requires user intervention to fix. + +For these reasons, you may reasonably decide to either statically link +your dependencies, or to provide local copies in the installed package. +You may also vendor the dependency in your source distribution. Sometimes +these dependencies are also repackaged on PyPI, and can be declared as +project dependencies like any other Python package. + +However, none of these issues apply to downstream packaging, and downstreams +have good reasons to prefer dynamically linking to system dependencies. +In particular: + +- In many cases, reliably sharing dynamic dependencies between components is a large part + of the *purpose* of a downstream packaging ecosystem. Helping to support that makes it + easier for users of those systems to access upstream projects in their preferred format. + +- Static linking and vendoring obscures the use of external dependencies, + making source auditing harder. + +- Dynamic linking makes it possible to quickly and systematically replace the used + libraries across an entire downstream packaging ecosystem, which can be particularly + important when they turn out to contain a security vulnerability or critical bug. + +- Using system dependencies makes the package benefit from downstream + customization that can improve the user experience on a particular platform, + without the downstream maintainers having to consistently patch + the dependencies vendored in different packages. This can include + compatibility improvements and security hardening. + +- Static linking and vendoring can result in multiple different versions of the + same library being loaded in the same process (for example, attempting to + import two Python packages that link to different versions of the same library). + This sometimes works without incident, but it can also lead to anything from library + loading errors, to subtle runtime bugs, to catastrophic failures (like suddenly + crashing and losing data). + +- Last but not least, static linking and vendoring results in duplication, + and may increase the use of both disk space and memory. + +How? +~~~~ + +A good compromise between the needs of both parties is to provide a switch +between using vendored and system dependencies. Ideally, if the package has +multiple vendored dependencies, it should provide both individual switches +for each dependency, and a general switch to control the default for them, +e.g. via a ``USE_SYSTEM_DEPS`` environment variable. + +If the user requests using system dependencies, and a particular dependency +is either missing or incompatible, the build should fail with an explanatory +message rather than fall back to a vendored version. This gives the packager +the opportunity to notice their mistake and a chance to consciously decide +how to solve it. + +It is reasonable for upstream projects to leave *testing* of building with +system dependencies to their downstream repackagers. The goal of these guidelines +is to facilitate more effective collaboration between upstream projects and downstream +repackagers, not to suggest upstream projects take on tasks that downstream repackagers +are better equipped to handle. + +.. _support-downstream-testing: + +Support downstream testing +-------------------------- + +Why? +~~~~ + +A variety of downstream projects run some degree of testing on the packaged +Python projects. Depending on the particular case, this can range from minimal +smoke testing to comprehensive runs of the complete test suite. There can +be various reasons for doing this, for example: + +- Verifying that the downstream packaging did not introduce any bugs. + +- Testing on additional platforms that are not covered by upstream testing. + +- Finding subtle bugs that can only be reproduced with particular hardware, + system package versions, and so on. + +- Testing the released package against newer (or older) dependency versions than + the ones present during upstream release testing. + +- Testing the package in an environment closely resembling the production + setup. This can detect issues caused by non-trivial interactions between + different installed packages, including packages that are not dependencies + of your package, but nevertheless can cause issues. + +- Testing the released package against newer Python versions (including + newer point releases), or less tested Python implementations such as PyPy. + +Admittedly, sometimes downstream testing may yield false positives or bug +reports about scenarios the upstream project is not interested in supporting. +However, perhaps even more often it does provide early notice of problems, +or find non-trivial bugs that would otherwise cause issues for the upstream +project's users. While mistakes do happen, the majority of downstream packagers +are doing their best to double-check their results, and help upstream +maintainers triage and fix the bugs that they reported. + +How? +~~~~ + +There are a number of things that upstream projects can do to help downstream +repackagers test their packages efficiently and effectively, including some of the suggestions +already mentioned above. These are typically improvements that make the test suite more +reliable and easier to use for everyone, not just downstream packagers. +Some specific suggestions are: + +- Include the test files and fixtures in the source distribution, or make it + possible to easily download them separately. + +- Do not write to the package directories during testing. Downstream test + setups sometimes run tests on top of the installed package, and modifications + performed during testing and temporary test files may end up being part + of the installed package! + +- Make the test suite work offline. Mock network interactions, using + packages such as responses_ or vcrpy_. If that is not possible, make it + possible to easily disable the tests using Internet access, e.g. via a pytest_ + marker. Use pytest-socket_ to verify that your tests work offline. This + often makes your own test workflows faster and more reliable as well. + +- Make your tests work without a specialized setup, or perform the necessary + setup as part of test fixtures. Do not ever assume that you can connect + to system services such as databases — in an extreme case, you could crash + a production service! + +- If your package has optional dependencies, make their tests optional as + well. Either skip them if the needed packages are not installed, or add + markers to make deselecting easy. + +- More generally, add markers to tests with special requirements. These can + include e.g. significant space usage, significant memory usage, long runtime, + incompatibility with parallel testing. + +- Do not assume that the test suite will be run with ``-Werror``. Downstreams + often need to disable that, as it causes false positives, e.g. due to newer + dependency versions. Assert for warnings using ``pytest.warns()`` rather + than ``pytest.raises()``! + +- Aim to make your test suite reliable and reproducible. Avoid flaky tests. + Avoid depending on specific platform details, don't rely on exact results + of floating-point computation, or timing of operations, and so on. Fuzzing + has its advantages, but you want to have static test cases for completeness + as well. + +- Split tests by their purpose, and make it easy to skip categories that are + irrelevant or problematic. Since the primary purpose of downstream testing + is to ensure that the package itself works, downstreams are not generally interested + in tasks such as checking code coverage, code formatting, typechecking or running + benchmarks. These tests can fail as dependencies are upgraded or the system + is under load, without actually affecting the package itself. + +- If your test suite takes significant time to run, support testing + in parallel. Downstreams often maintain a large number of packages, + and testing them all takes a lot of time. Using pytest-xdist_ can help them + avoid bottlenecks. + +- Ideally, support running your test suite via ``pytest``. pytest_ has many + command-line arguments that are truly helpful to downstreams, such as + the ability to conveniently deselect tests, rerun flaky tests + (via pytest-rerunfailures_), add a timeout to prevent tests from hanging + (via pytest-timeout_) or run tests in parallel (via pytest-xdist_). + Note that test suites don't need to be *written* with ``pytest`` to be + *executed* with ``pytest``: ``pytest`` is able to find and execute almost + all test cases that are compatible with the standard library's ``unittest`` + test discovery. + + +.. _aim-for-stable-releases: + +Aim for stable releases +----------------------- + +Why? +~~~~ + +Many downstreams provide stable release channels in addition to the main +package streams. The goal of these channels is to provide more conservative +upgrades to users with higher stability needs. These users often prefer +to trade having the newest features available for lower risk of issues. + +While the exact policies differ, an important criterion for including a new +package version in a stable release channel is for it to be available in testing +for some time already, and have no known major regressions. For example, +in Gentoo Linux a package is usually marked stable after being available +in testing for a month, and being tested against the versions of its +dependencies that are marked stable at the time. + +However, there are circumstances which demand more prompt action. For example, +if a security vulnerability or a major bug is found in the version that is +currently available in the stable channel, the downstream is facing a need +to resolve it. In this case, they need to consider various options, such as: + +- putting a new version in the stable channel early, + +- adding patches to the version currently published, + +- or even downgrading the stable channel to an earlier release. + +Each of these options involves certain risks and a certain amount of work, +and packagers needs to weigh them to determine the course of action. + +How? +~~~~ + +There are some things that upstreams can do to tailor their workflow to stable +release channels. These actions often are beneficial to the package's users +as well. Some specific suggestions are: + +- Adjust the release frequency to the rate of code changes. Packages that + are released rarely often bring significant changes with every release, + and a higher risk of accidental regressions. + +- Avoid mixing bug fixes and new features, if possible. In particular, if there + are known bug fixes merged already, consider making a new release before + merging feature branches. + +- Consider making prereleases after major changes, to provide more testing + opportunities for users and downstreams willing to opt-in. + +- If your project is subject to very intense development, consider splitting + one or more branches that include a more conservative subset of commits, + and are released separately. For example, Django_ currently maintains three + release branches in addition to main. + +- Even if you don't wish to maintain additional branches permanently, consider + making additional patch releases with minimal changes to the previous + version, especially when a security vulnerability is discovered. + +- Split your changes into focused commits that address one problem at a time, + to make it easier to cherry-pick changes to earlier releases when necessary. + + +.. _responses: https://pypi.org/project/responses/ +.. _vcrpy: https://pypi.org/project/vcrpy/ +.. _pytest-socket: https://pypi.org/project/pytest-socket/ +.. _pytest-xdist: https://pypi.org/project/pytest-xdist/ +.. _pytest: https://pytest.org/ +.. _pytest-rerunfailures: https://pypi.org/project/pytest-rerunfailures/ +.. _pytest-timeout: https://pypi.org/project/pytest-timeout/ +.. _Django: https://www.djangoproject.com/ +.. _NumPy: https://numpy.org/ +.. _cryptography: https://pypi.org/project/cryptography/ +.. _cryptography-vectors: https://pypi.org/project/cryptography-vectors/ diff --git a/source/discussions/index.rst b/source/discussions/index.rst new file mode 100644 index 000000000..b1b84f97a --- /dev/null +++ b/source/discussions/index.rst @@ -0,0 +1,20 @@ +Discussions +########### + +**Discussions** are focused on providing comprehensive information about a +specific topic. If you're just trying to get stuff done, see +:doc:`/guides/index`. + +.. toctree:: + :maxdepth: 1 + + versioning + deploying-python-applications + pip-vs-easy-install + install-requires-vs-requirements + distribution-package-vs-import-package + package-formats + src-layout-vs-flat-layout + setup-py-deprecated + single-source-version + downstream-packaging diff --git a/source/requirements.rst b/source/discussions/install-requires-vs-requirements.rst similarity index 75% rename from source/requirements.rst rename to source/discussions/install-requires-vs-requirements.rst index a9ae9402b..99e1552b8 100644 --- a/source/requirements.rst +++ b/source/discussions/install-requires-vs-requirements.rst @@ -1,23 +1,17 @@ -.. _`install_requires vs Requirements files`: +.. _`install_requires vs requirements files`: ====================================== -install_requires vs Requirements files +install_requires vs requirements files ====================================== -:Page Status: Complete -:Last Reviewed: 2015-09-08 - -.. contents:: Contents - :local: - install_requires ---------------- -``install_requires`` is a :ref:`setuptools` ``setup.py`` keyword that should be -used to specify what a project **minimally** needs to run correctly. When the -project is installed by :ref:`pip`, this is the specification that is used to -install its dependencies. +``install_requires`` is a :ref:`setuptools` :file:`setup.py` keyword that +should be used to specify what a project **minimally** needs to run correctly. +When the project is installed by :ref:`pip`, this is the specification that is +used to install its dependencies. For example, if the project requires A and B, your ``install_requires`` would be like so: @@ -41,8 +35,9 @@ v2 of 'B', so it would be like so: 'B>=2' ] -It may also be known that project A follows semantic versioning, and that v2 of -'A' will indicate a break in compatibility, so it makes sense to not allow v2: +It may also be known that project 'A' introduced a change in its v2 +that breaks the compatibility of your project with v2 of 'A' and later, +so it makes sense to not allow v2: :: @@ -71,7 +66,7 @@ just a list of :ref:`pip:pip install` arguments placed into a file. Whereas ``install_requires`` defines the dependencies for a single project, :ref:`Requirements Files ` are often used to define -the requirements for a complete python environment. +the requirements for a complete Python environment. Whereas ``install_requires`` requirements are minimal, requirements files often contain an exhaustive listing of pinned versions for the purpose of @@ -86,9 +81,9 @@ packages. [1]_ Whereas ``install_requires`` metadata is automatically analyzed by pip during an install, requirements files are not, and only are used when a user specifically -installs them using ``pip install -r``. +installs them using ``python -m pip install -r``. ---- .. [1] For more on "Abstract" vs "Concrete" requirements, see - https://caremad.io/2013/07/setup-vs-requirement/. + https://caremad.io/posts/2013/07/setup-vs-requirement/. diff --git a/source/discussions/package-formats.rst b/source/discussions/package-formats.rst new file mode 100644 index 000000000..6d4dee35c --- /dev/null +++ b/source/discussions/package-formats.rst @@ -0,0 +1,193 @@ +.. _package-formats: + +=============== +Package Formats +=============== + +This page discusses the file formats that are used to distribute Python packages +and the differences between them. + +You will find files in two formats on package indices such as PyPI_: **source +distributions**, or **sdists** for short, and **binary distributions**, commonly +called **wheels**. For example, the `PyPI page for pip 23.3.1 `_ +lets you download two files, ``pip-23.3.1.tar.gz`` and +``pip-23.3.1-py3-none-any.whl``. The former is an sdist, the latter is a +wheel. As explained below, these serve different purposes. When publishing a +package on PyPI (or elsewhere), you should always upload both an sdist and one +or more wheel. + + +What is a source distribution? +============================== + +Conceptually, a source distribution is an archive of the source code in raw +form. Concretely, an sdist is a ``.tar.gz`` archive containing the source code +plus an additional special file called ``PKG-INFO``, which holds the project +metadata. The presence of this file helps packaging tools to be more efficient +by not needing to compute the metadata themselves. The ``PKG-INFO`` file follows +the format specified in :ref:`core-metadata` and is not intended to be written +by hand [#core-metadata-format]_. + +You can thus inspect the contents of an sdist by unpacking it using standard +tools to work with tar archives, such as ``tar -xvf`` on UNIX platforms (like +Linux and macOS), or :ref:`the command line interface of Python's tarfile module +` on any platform. + +Sdists serve several purposes in the packaging ecosystem. When :ref:`pip`, the +standard Python package installer, cannot find a wheel to install, it will fall +back on downloading a source distribution, compiling a wheel from it, and +installing the wheel. Furthermore, sdists are often used as the package source +by downstream packagers (such as Linux distributions, Conda, Homebrew and +MacPorts on macOS, ...), who, for various reasons, may prefer them over, e.g., +pulling from a Git repository. + +A source distribution is recognized by its file name, which has the form +:samp:`{package_name}-{version}.tar.gz`, e.g., ``pip-23.3.1.tar.gz``. + +.. TODO: provide clear guidance on whether sdists should contain docs and tests. + Discussion: https://discuss.python.org/t/should-sdists-include-docs-and-tests/14578 + +If you want technical details on the sdist format, read the :ref:`sdist +specification `. + + +What is a wheel? +================ + +Conceptually, a wheel contains exactly the files that need to be copied when +installing the package. + +There is a big difference between sdists and wheels for packages with +:term:`extension modules `, written in compiled languages like +C, C++ and Rust, which need to be compiled into platform-dependent machine code. +With these packages, wheels do not contain source code (like C source files) but +compiled, executable code (like ``.so`` files on Linux or DLLs on Windows). + +Furthermore, while there is only one sdist per version of a project, there may +be many wheels. Again, this is most relevant in the context of extension +modules. The compiled code of an extension module is tied to an operating system +and processor architecture, and often also to the version of the Python +interpreter (unless the :ref:`Python stable ABI ` is used). + +For pure-Python packages, the difference between sdists and wheels is less +marked. There is normally one single wheel, for all platforms and Python +versions. Python is an interpreted language, which does not need ahead-of-time +compilation, so wheels contain ``.py`` files just like sdists. + +If you are wondering about ``.pyc`` bytecode files: they are not included in +wheels, since they are cheap to generate, and including them would unnecessarily +force a huge number of packages to distribute one wheel per Python version +instead of one single wheel. Instead, installers like :ref:`pip` generate them +while installing the package. + +With that being said, there are still important differences between sdists and +wheels, even for pure Python projects. Wheels are meant to contain exactly what +is to be installed, and nothing more. In particular, wheels should never include +tests and documentation, while sdists commonly do. Also, the wheel format is +more complex than sdist. For example, it includes a special file -- called +``RECORD`` -- that lists all files in the wheel along with a hash of their +content, as a safety check of the download's integrity. + +At a glance, you might wonder if wheels are really needed for "plain and basic" +pure Python projects. Keep in mind that due to the flexibility of sdists, +installers like pip cannot install from sdists directly -- they need to first +build a wheel, by invoking the :term:`build backend` that the sdist specifies +(the build backend may do all sorts of transformations while building the wheel, +such as compiling C extensions). For this reason, even for a pure Python +project, you should always upload *both* an sdist and a wheel to PyPI or other +package indices. This makes installation much faster for your users, since a +wheel is directly installable. By only including files that must be installed, +wheels also make for smaller downloads. + +On the technical level, a wheel is a ZIP archive (unlike sdists which are TAR +archives). You can inspect its contents by unpacking it as a normal ZIP archive, +e.g., using ``unzip`` on UNIX platforms like Linux and macOS, ``Expand-Archive`` +in Powershell on Windows, or :ref:`the command line interface of Python's +zipfile module `. This can be very useful to check +that the wheel includes all the files you need it to. + +Inside a wheel, you will find the package's files, plus an additional directory +called :samp:`{package_name}-{version}.dist-info`. This directory contains +various files, including a ``METADATA`` file which is the equivalent of +``PKG-INFO`` in sdists, as well as ``RECORD``. This can be useful to ensure no +files are missing from your wheels. + +The file name of a wheel (ignoring some rarely used features) looks like this: +:samp:`{package_name}-{version}-{python_tag}-{abi_tag}-{platform_tag}.whl`. +This naming convention identifies which platforms and Python versions the wheel +is compatible with. For example, the name ``pip-23.3.1-py3-none-any.whl`` means +that: + +- (``py3``) This wheel can be installed on any implementation of Python 3, + whether CPython, the most widely used Python implementation, or an alternative + implementation like PyPy_; +- (``none``) It does not depend on the Python version; +- (``any``) It does not depend on the platform. + +The pattern ``py3-none-any`` is common for pure Python projects. Packages with +extension modules typically ship multiple wheels with more complex tags. + +All technical details on the wheel format can be found in the :ref:`wheel +specification `. + + +.. _egg-format: +.. _`Wheel vs Egg`: + +What about eggs? +================ + +"Egg" is an old package format that has been replaced with the wheel format. It +should not be used anymore. Since August 2023, PyPI `rejects egg uploads +`_. + +Here's a breakdown of the important differences between wheel and egg. + +* The egg format was introduced by :ref:`setuptools` in 2004, whereas the wheel + format was introduced by :pep:`427` in 2012. + +* Wheel has an :doc:`official standard specification + `. Egg did not. + +* Wheel is a :term:`distribution ` format, i.e a packaging + format. [#wheel-importable]_ Egg was both a distribution format and a runtime + installation format (if left zipped), and was designed to be importable. + +* Wheel archives do not include ``.pyc`` files. Therefore, when the distribution + only contains Python files (i.e. no compiled extensions), and is compatible + with Python 2 and 3, it's possible for a wheel to be "universal", similar to + an :term:`sdist `. + +* Wheel uses standard :ref:`.dist-info directories + `. Egg used ``.egg-info``. + +* Wheel has a :ref:`richer file naming convention `. A + single wheel archive can indicate its compatibility with a number of Python + language versions and implementations, ABIs, and system architectures. + +* Wheel is versioned. Every wheel file contains the version of the wheel + specification and the implementation that packaged it. + +* Wheel is internally organized by `sysconfig path type + `_, + therefore making it easier to convert to other formats. + +-------------------------------------------------------------------------------- + +.. [#core-metadata-format] This format is email-based. Although this would + be unlikely to be chosen today, backwards compatibility considerations lead to + it being kept as the canonical format. From the user point of view, this + is mostly invisible, since the metadata is specified by the user in a way + understood by the build backend, typically ``[project]`` in ``pyproject.toml``, + and translated by the build backend into ``PKG-INFO``. + +.. [#wheel-importable] Circumstantially, in some cases, wheels can be used + as an importable runtime format, although :ref:`this is not officially supported + at this time `. + + + +.. _pip-pypi: https://pypi.org/project/pip/23.3.1/#files +.. _pypi: https://pypi.org +.. _pypi-eggs-deprecation: https://blog.pypi.org/posts/2023-06-26-deprecate-egg-uploads/ +.. _pypy: https://pypy.org diff --git a/source/pip_easy_install.rst b/source/discussions/pip-vs-easy-install.rst similarity index 51% rename from source/pip_easy_install.rst rename to source/discussions/pip-vs-easy-install.rst index ca0063b9f..2bb75d3be 100644 --- a/source/pip_easy_install.rst +++ b/source/discussions/pip-vs-easy-install.rst @@ -5,16 +5,13 @@ pip vs easy_install =================== -:Page Status: Complete -:Last Reviewed: 2015-09-08 - -`easy_install` was released in 2004, as part of :ref:`setuptools`. It was -notable at the time for installing :term:`packages ` from +:ref:`easy_install `, now `deprecated`_, was released in 2004 as part of :ref:`setuptools`. +It was notable at the time for installing :term:`packages ` from :term:`PyPI ` using requirement specifiers, and automatically installing dependencies. -:ref:`pip` came later in 2008, as alternative to `easy_install`, although still +:ref:`pip` came later in 2008, as an alternative to :ref:`easy_install `, although still largely built on top of :ref:`setuptools` components. It was notable at the time for *not* installing packages as :term:`Eggs ` or from :term:`Eggs ` (but rather simply as 'flat' packages from :term:`sdists `, which gave users the power to easily replicate environments. -Here's a breakdown of the important differences between pip and easy_install now: +Here's a breakdown of the important differences between pip and the deprecated easy_install: -+------------------------------+----------------------------------+-------------------------------+ -| | **pip** | **easy_install** | -+------------------------------+----------------------------------+-------------------------------+ -|Installs from :term:`Wheels |Yes |No | -|` | | | -+------------------------------+----------------------------------+-------------------------------+ -|Uninstall Packages |Yes (``pip uninstall``) |No | -+------------------------------+----------------------------------+-------------------------------+ -|Dependency Overrides |Yes (:ref:`Requirements Files |No | -| |`) | | -+------------------------------+----------------------------------+-------------------------------+ -|List Installed Packages |Yes (``pip list`` and ``pip |No | -| |freeze``) | | -+------------------------------+----------------------------------+-------------------------------+ -|:pep:`438` |Yes |No | -|Support | | | -+------------------------------+----------------------------------+-------------------------------+ -|Installation format |'Flat' packages with `egg-info` | Encapsulated Egg format | -| |metadata. | | -+------------------------------+----------------------------------+-------------------------------+ -|sys.path modification |No |Yes | -| | | | -| | | | -+------------------------------+----------------------------------+-------------------------------+ -|Installs from :term:`Eggs |No |Yes | -|` | | | -+------------------------------+----------------------------------+-------------------------------+ -|`pylauncher support`_ |No |Yes [1]_ | -| | | | -+------------------------------+----------------------------------+-------------------------------+ -|:ref:`Multi-version Installs` |No |Yes | -| | | | -+------------------------------+----------------------------------+-------------------------------+ -|Exclude scripts during install|No |Yes | -| | | | -+------------------------------+----------------------------------+-------------------------------+ ++------------------------------+--------------------------------------+-------------------------------+ +| | **pip** | **easy_install** | ++------------------------------+--------------------------------------+-------------------------------+ +|Installs from :term:`Wheels |Yes |No | +|` | | | ++------------------------------+--------------------------------------+-------------------------------+ +|Uninstall Packages |Yes (``python -m pip uninstall``) |No | ++------------------------------+--------------------------------------+-------------------------------+ +|Dependency Overrides |Yes (:ref:`Requirements Files |No | +| |`) | | ++------------------------------+--------------------------------------+-------------------------------+ +|List Installed Packages |Yes (``python -m pip list`` and |No | +| |``python -m pip freeze``) | | ++------------------------------+--------------------------------------+-------------------------------+ +|:pep:`438` |Yes |No | +|Support | | | ++------------------------------+--------------------------------------+-------------------------------+ +|Installation format |'Flat' packages with :file:`egg-info` | Encapsulated Egg format | +| |metadata. | | ++------------------------------+--------------------------------------+-------------------------------+ +|sys.path modification |No |Yes | +| | | | +| | | | ++------------------------------+--------------------------------------+-------------------------------+ +|Installs from :term:`Eggs |No |Yes | +|` | | | ++------------------------------+--------------------------------------+-------------------------------+ +|`pylauncher support`_ |No |Yes [1]_ | +| | | | ++------------------------------+--------------------------------------+-------------------------------+ +|:ref:`Multi-version Installs` |No |Yes | +| | | | ++------------------------------+--------------------------------------+-------------------------------+ +|Exclude scripts during install|No |Yes | +| | | | ++------------------------------+--------------------------------------+-------------------------------+ +|per project index |Only in virtualenv |Yes, via setup.cfg | +| | | | ++------------------------------+--------------------------------------+-------------------------------+ ---- -.. [1] https://setuptools.readthedocs.io/en/latest/easy_install.html#natural-script-launcher +.. _deprecated: https://setuptools.readthedocs.io/en/latest/history.html#v42-0-0 + +.. [1] https://setuptools.readthedocs.io/en/latest/deprecated/easy_install.html#natural-script-launcher -.. _pylauncher support: https://bitbucket.org/pypa/pylauncher +.. _pylauncher support: https://bitbucket.org/vinay.sajip/pylauncher diff --git a/source/discussions/setup-py-deprecated.rst b/source/discussions/setup-py-deprecated.rst new file mode 100644 index 000000000..6bcd15b58 --- /dev/null +++ b/source/discussions/setup-py-deprecated.rst @@ -0,0 +1,215 @@ +.. _setup-py-deprecated: + + +=========================== +Is ``setup.py`` deprecated? +=========================== + +No, :term:`setup.py` and :ref:`setuptools` are not deprecated. + +Setuptools is perfectly usable as a :term:`build backend` +for packaging Python projects. +And :file:`setup.py` is a valid configuration file for :ref:`setuptools` +that happens to be written in Python, instead of in *TOML* for example +(a similar practice is used by other tools +like *nox* and its :file:`noxfile.py` configuration file, +or *pytest* and :file:`conftest.py`). + +However, ``python setup.py`` and the use of :file:`setup.py` +as a command line tool are deprecated. + +This means that commands such as the following **MUST NOT** be run anymore: + +* ``python setup.py install`` +* ``python setup.py develop`` +* ``python setup.py sdist`` +* ``python setup.py bdist_wheel`` + + +What commands should be used instead? +===================================== + ++---------------------------------+----------------------------------------+ +| Deprecated | Recommendation | ++=================================+========================================+ +| ``python setup.py install`` | ``python -m pip install .`` | ++---------------------------------+----------------------------------------+ +| ``python setup.py develop`` | ``python -m pip install --editable .`` | ++---------------------------------+----------------------------------------+ +| ``python setup.py sdist`` | ``python -m build`` [#needs-build]_ | ++---------------------------------+ | +| ``python setup.py bdist_wheel`` | | ++---------------------------------+----------------------------------------+ + + +.. [#needs-build] This requires the :ref:`build` dependency. + It is recommended to always build and publish both the source distribution + and wheel of a project, which is what ``python -m build`` does. + If necessary the ``--sdist`` and ``--wheel`` options can be used + to generate only one or the other. + + +In order to install a setuptools based project, +it was common to run :file:`setup.py`'s ``install`` command such as: +``python setup.py install``. +Nowadays, the recommended method is to use :ref:`pip` directly +with a command like this one: ``python -m pip install .``. +Where the dot ``.`` is actually a file system path, +it is the path notation for the current directory. +Indeed, *pip* accepts a path to +a project's source tree directory on the local filesystem +as argument to its ``install`` sub-command. +So this would also be a valid command: +``python -m pip install path/to/project``. + +As for the installation in *develop* mode aka *editable* mode, +instead of ``python setup.py develop`` +one can use the ``--editable`` option of pip's *install* sub-command: +``python -m pip install --editable .``. + +One recommended, simple, and straightforward method of building +:term:`source distributions ` +and :term:`wheels ` +is to use the :ref:`build` tool with a command like +``python -m build`` +which triggers the generation of both distribution formats. +If necessary the ``--sdist`` and ``--wheel`` options can be used +to generate only one or the other. +Note that the build tool needs to be installed separately. + +The command ``python setup.py install`` was deprecated +in setuptools version *58.3.0*. + + +What about other commands? +========================== + +What are some replacements for the other ``python setup.py`` commands? + + +``python setup.py test`` +------------------------ + +The recommendation is to use a test runner such as pytest_. + +.. _pytest: https://docs.pytest.org/ + + +``python setup.py check``, ``python setup.py register``, and ``python setup.py upload`` +--------------------------------------------------------------------------------------- + +A trusted replacement is :ref:`twine`: + +* ``python -m twine check --strict dist/*`` +* ``python -m twine register dist/*.whl`` [#not-pypi]_ +* ``python -m twine upload dist/*`` + +.. [#not-pypi] Not necessary, nor supported on :term:`PyPI `. + But might be necessary on other :term:`package indexes ` (for example :ref:`devpi`). + + +``python setup.py --version`` +----------------------------- + +A possible replacement solution (among others) is to rely on setuptools-scm_: + +* ``python -m setuptools_scm`` + +.. _setuptools-scm: https://setuptools-scm.readthedocs.io/en/latest/usage/#as-cli-tool + + +Remaining commands +------------------ + +This guide does not make suggestions of replacement solutions for those commands: + +.. hlist:: + :columns: 4 + + * ``alias`` + * ``bdist`` + * ``bdist_dumb`` + * ``bdist_egg`` + * ``bdist_rpm`` + * ``build`` + * ``build_clib`` + * ``build_ext`` + * ``build_py`` + * ``build_scripts`` + * ``clean`` + * ``dist_info`` + * ``easy_install`` + * ``editable_wheel`` + * ``egg_info`` + * ``install_data`` + * ``install_egg_info`` + * ``install_headers`` + * ``install_lib`` + * ``install_scripts`` + * ``rotate`` + * ``saveopts`` + * ``setopt`` + * ``upload_docs`` + + +What about custom commands? +=========================== + +Likewise, custom :file:`setup.py` commands are deprecated. +The recommendation is to migrate those custom commands +to a task runner tool or any other similar tool. +Some examples of such tools are: +chuy, make, nox or tox, pydoit, pyinvoke, taskipy, and thx. + + +What about custom build steps? +============================== + +Custom build steps that for example +either overwrite existing steps such as ``build_py``, ``build_ext``, and ``bdist_wheel`` +or add new build steps are not deprecated. +Those will be automatically called as expected. + + +Should ``setup.py`` be deleted? +=============================== + +Although the usage of :file:`setup.py` as an executable script is deprecated, +its usage as a configuration file for setuptools is absolutely fine. +There is likely no modification needed in :file:`setup.py`. + + +Is ``pyproject.toml`` mandatory? +================================ + +While it is not technically necessary yet, +it is **STRONGLY RECOMMENDED** for a project to have a :file:`pyproject.toml` file +at the root of its source tree with a content like this: + +.. code:: toml + + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + +The guide :ref:`modernize-setup-py-project` has more details about this. + +The standard fallback behavior for a :term:`build frontend ` +in the absence of a :file:`pyproject.toml` file and its ``[build-system]`` table +is to assume that the :term:`build backend ` is setuptools. + + +Why? What does it all mean? +=========================== + +One way to look at it is that the scope of setuptools +has now been reduced to the role of a build backend. + + +Where to read more about this? +============================== + +* `Why you shouldn't invoke setup.py directly `__ by Paul Ganssle + +* :doc:`setuptools:deprecated/commands` diff --git a/source/discussions/single-source-version.rst b/source/discussions/single-source-version.rst new file mode 100644 index 000000000..c7dc8d1e1 --- /dev/null +++ b/source/discussions/single-source-version.rst @@ -0,0 +1,62 @@ +.. _single-source-version: + +=================================== +Single-sourcing the Project Version +=================================== + +:Page Status: Complete +:Last Reviewed: 2024-10-07 + +Many Python :term:`distribution packages ` publish a single +Python :term:`import package ` where it is desired that the runtime +``__version__`` attribute on the import package report the same version specifier +as :func:`importlib.metadata.version` reports for the distribution package +(as described in :ref:`runtime-version-access`). + +It is also frequently desired that this version information be derived from a version +control system *tag* (such as ``v1.2.3``) rather than being manually updated in the +source code. + +Some projects may choose to simply live with the data entry duplication, and rely +on automated testing to ensure the different values do not diverge. + +Alternatively, a project's chosen build system may offer a way to define a single +source of truth for the version number. + +In general, the options are: + +1) If the code is in a version control system (VCS), such as Git, then the version can be extracted from the VCS. + +2) The version can be hard-coded into the :file:`pyproject.toml` file -- and the build system can copy it + into other locations it may be required. + +3) The version string can be hard-coded into the source code -- either in a special purpose file, + such as :file:`_version.txt` (which must then be shipped as part of the project's source distribution + package), or as an attribute in a particular module, such as :file:`__init__.py`. The build + system can then extract it from the runtime location at build time. + +Consult your build system's documentation for their recommended method. + +When the intention is that a distribution package and its associated import package +share the same version, it is recommended that the project include an automated test +case that ensures ``import_name.__version__`` and ``importlib.metadata.version("dist-name")`` +report the same value (note: for many projects, ``import_name`` and ``dist-name`` will +be the same name). + + +.. _Build system version handling: + +Build System Version Handling +----------------------------- + +The following are links to some build system's documentation for handling version strings. + +* `Flit `_ + +* `Hatchling `_ + +* `PDM `_ + +* `Setuptools `_ + + - `setuptools_scm `_ diff --git a/source/discussions/src-layout-vs-flat-layout.rst b/source/discussions/src-layout-vs-flat-layout.rst new file mode 100644 index 000000000..c38968345 --- /dev/null +++ b/source/discussions/src-layout-vs-flat-layout.rst @@ -0,0 +1,105 @@ +.. _src-layout-vs-flat-layout: + +========================= +src layout vs flat layout +========================= + +The "flat layout" refers to organising a project's files in a folder or +repository, such that the various configuration files and +:term:`import packages ` are all in the top-level directory. + +:: + + . + ├── README.md + ├── noxfile.py + ├── pyproject.toml + ├── setup.py + ├── awesome_package/ + │ ├── __init__.py + │ └── module.py + └── tools/ + ├── generate_awesomeness.py + └── decrease_world_suck.py + +The "src layout" deviates from the flat layout by moving the code that is +intended to be importable (i.e. ``import awesome_package``, also known as +:term:`import packages `) into a subdirectory. This +subdirectory is typically named ``src/``, hence "src layout". + +:: + + . + ├── README.md + ├── noxfile.py + ├── pyproject.toml + ├── setup.py + ├── src/ + │ └── awesome_package/ + │ ├── __init__.py + │ └── module.py + └── tools/ + ├── generate_awesomeness.py + └── decrease_world_suck.py + +Here's a breakdown of the important behaviour differences between the src +layout and the flat layout: + +* The src layout requires installation of the project to be able to run its + code, and the flat layout does not. + + This means that the src layout involves an additional step in the + development workflow of a project (typically, an + :doc:`editable installation ` + is used for development and a regular installation is used for testing). + +* The src layout helps prevent accidental usage of the in-development copy of + the code. + + This is relevant since the Python interpreter includes the current working + directory as the first item on the import path. This means that if an import + package exists in the current working directory with the same name as an + installed import package, the variant from the current working directory will + be used. This can lead to subtle misconfiguration of the project's packaging + tooling, which could result in files not being included in a distribution. + + The src layout helps avoid this by keeping import packages in a directory + separate from the root directory of the project, ensuring that the installed + copy is used. + +* The src layout helps enforce that an + :doc:`editable installation ` is only + able to import files that were meant to be importable. + + This is especially relevant when the editable installation is implemented + using a `path configuration file `_ + that adds the directory to the import path. + + The flat layout would add the other project files (eg: ``README.md``, + ``tox.ini``) and packaging/tooling configuration files (eg: ``setup.py``, + ``noxfile.py``) on the import path. This would make certain imports work + in editable installations but not regular installations. + +.. _running-cli-from-source-src-layout: + +Running a command-line interface from source with src-layout +============================================================ + +Due to the firstly mentioned specialty of the src layout, a command-line +interface can not be run directly from the :term:`source tree `, +but requires installation of the package in +:doc:`Development Mode ` +for testing purposes. Since this can be unpractical in some situations, +a workaround could be to prepend the package folder to Python's +:py:data:`sys.path` when called via its :file:`__main__.py` file: + +.. code-block:: python + + import os + import sys + + if not __package__: + # Make CLI runnable from source tree with + # python src/package + package_source_path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, package_source_path) diff --git a/source/discussions/versioning.rst b/source/discussions/versioning.rst new file mode 100644 index 000000000..eeea3578c --- /dev/null +++ b/source/discussions/versioning.rst @@ -0,0 +1,253 @@ +.. _versioning: +.. _`Choosing a versioning scheme`: + +========== +Versioning +========== + +This discussion covers all aspects of versioning Python packages. + + +Valid version numbers +===================== + +Different Python projects may use different versioning schemes based on the +needs of that particular project, but in order to be compatible with tools like +:ref:`pip`, all of them are required to comply with a flexible format for +version identifiers, for which the authoritative reference is the +:ref:`specification of version specifiers `. Here are some +examples of version numbers [#version-examples]_: + +- A simple version (final release): ``1.2.0`` +- A development release: ``1.2.0.dev1`` +- An alpha release: ``1.2.0a1`` +- A beta release: ``1.2.0b1`` +- A release candidate: ``1.2.0rc1`` +- A post-release: ``1.2.0.post1`` +- A post-release of an alpha release (possible, but discouraged): ``1.2.0a1.post1`` +- A simple version with only two components: ``23.12`` +- A simple version with just one component: ``42`` +- A version with an epoch: ``1!1.0`` + +Projects can use a cycle of pre-releases to support testing by their users +before a final release. In order, the steps are: alpha releases, beta releases, +release candidates, final release. Pip and other modern Python package +installers ignore pre-releases by default when deciding which versions of +dependencies to install, unless explicitly requested (e.g., with +``pip install pkg==1.1a3`` or ``pip install --pre pkg``). + +The purpose of development releases is to support releases made early during a +development cycle, for example, a nightly build, or a build from the latest +source in a Linux distribution. + +Post-releases are used to address minor errors in a final release that do not +affect the distributed software, such as correcting an error in the release +notes. They should not be used for bug fixes; these should be done with a new +final release (e.g., incrementing the third component when using semantic +versioning). + +Finally, epochs, a rarely used feature, serve to fix the sorting order when +changing the versioning scheme. For example, if a project is using calendar +versioning, with versions like 23.12, and switches to semantic versioning, with +versions like 1.0, the comparison between 1.0 and 23.12 will go the wrong way. +To correct this, the new version numbers should have an explicit epoch, as in +"1!1.0", in order to be treated as more recent than the old version numbers. + + + +Semantic versioning vs. calendar versioning +=========================================== + +A versioning scheme is a formalized way to interpret the segments of a version +number, and to decide which should be the next version number for a new release +of a package. Two versioning schemes are commonly used for Python packages, +semantic versioning and calendar versioning. + +.. caution:: + + The decision which version number to choose is up to a + project's maintainer. This effectively means that version + bumps reflect the maintainer's view. That view may differ + from the end-users' perception of what said formalized + versioning scheme promises them. + + There are known exceptions for selecting the next version + number. The maintainers may consciously choose to break the + assumption that the last version segment only contains + backwards-compatible changes. + One such case is when a security vulnerability needs to be + addressed. Security releases often come in patch versions + but contain breaking changes inevitably. + + +Semantic versioning +------------------- + +The idea of *semantic versioning* (or SemVer) is to use 3-part version numbers, +*major.minor.patch*, where the project author increments: + +- *major* when they make incompatible API changes, +- *minor* when they add functionality in a backwards-compatible manner, and +- *patch*, when they make backwards-compatible bug fixes. + +A majority of Python projects use a scheme that resembles semantic +versioning. However, most projects, especially larger ones, do not strictly +adhere to semantic versioning, since many changes are technically breaking +changes but affect only a small fraction of users. Such projects tend to +increment the major number when the incompatibility is high, or to signal a +shift in the project, rather than for any tiny incompatibility +[#semver-strictness]_. Conversely, a bump of the major version number +is sometimes used to signal significant but backwards-compatible new +features. + +For those projects that do use strict semantic versioning, this approach allows +users to make use of :ref:`compatible release version specifiers +`, with the ``~=`` operator. For +example, ``name ~= X.Y`` is roughly equivalent to ``name >= X.Y, == X.*``, i.e., +it requires at least release X.Y, and allows any later release with greater Y as +long as X is the same. Likewise, ``name ~= X.Y.Z`` is roughly equivalent to +``name >= X.Y.Z, == X.Y.*``, i.e., it requires at least X.Y.Z and allows a later +release with same X and Y but higher Z. + +Python projects adopting semantic versioning should abide by clauses 1-8 of the +`Semantic Versioning 2.0.0 specification `_. + +The popular :doc:`Sphinx ` documentation generator is an example +project that uses strict semantic versioning (:doc:`Sphinx versioning policy +`). The famous :doc:`NumPy ` +scientific computing package explicitly uses "loose" semantic versioning, where +releases incrementing the minor version can contain backwards-incompatible API +changes (:doc:`NumPy versioning policy `). + + +Calendar versioning +------------------- + +Semantic versioning is not a suitable choice for all projects, such as those +with a regular time-based release cadence and a deprecation process that +provides warnings for a number of releases prior to removal of a feature. + +A key advantage of date-based versioning, or `calendar versioning `_ +(CalVer), is that it is straightforward to tell how old the base feature set of +a particular release is given just the version number. + +Calendar version numbers typically take the form *year.month* (for example, +23.12 for December 2023). + +:doc:`Pip `, the standard Python package installer, uses calendar +versioning. + + +Other schemes +------------- + +Serial versioning refers to the simplest possible versioning scheme, which +consists of a single number incremented every release. While serial versioning +is very easy to manage as a developer, it is the hardest to track as an end +user, as serial version numbers convey little or no information regarding API +backwards compatibility. + +Combinations of the above schemes are possible. For example, a project may +combine date-based versioning with serial versioning to create a *year.serial* +numbering scheme that readily conveys the approximate age of a release, but +doesn't otherwise commit to a particular release cadence within the year. + + +Local version identifiers +========================= + +Public version identifiers are designed to support distribution via :term:`PyPI +`. Python packaging tools also support the notion +of a :ref:`local version identifier `, which can be +used to identify local development builds not intended for publication, or +modified variants of a release maintained by a redistributor. + +A local version identifier takes the form of a public version identifier, +followed by "+" and a local version label. For example, a package with +Fedora-specific patches applied could have the version "1.2.1+fedora.4". +Another example is versions computed by setuptools-scm_, a setuptools plugin +that reads the version from Git data. In a Git repository with some commits +since the latest release, setuptools-scm generates a version like +"0.5.dev1+gd00980f", or if the repository has untracked changes, like +"0.5.dev1+gd00980f.d20231217". + +.. _runtime-version-access: + +Accessing version information at runtime +======================================== + +Version information for all :term:`distribution packages ` +that are locally available in the current environment can be obtained at runtime +using the standard library's :func:`importlib.metadata.version` function:: + + >>> importlib.metadata.version("cryptography") + '41.0.7' + +Many projects also choose to version their top level +:term:`import packages ` by providing a package level +``__version__`` attribute:: + + >>> import cryptography + >>> cryptography.__version__ + '41.0.7' + +This technique can be particularly valuable for CLI applications which want +to ensure that version query invocations (such as ``pip -V``) run as quickly +as possible. + +Package publishers wishing to ensure their reported distribution package and +import package versions are consistent with each other can review the +:ref:`single-source-version` discussion for potential approaches to doing so. + +As import packages and modules are not *required* to publish runtime +version information in this way (see the withdrawn proposal in +:pep:`PEP 396 <396>`), the ``__version__`` attribute should either only be +queried with interfaces that are known to provide it (such as a project +querying its own version or the version of one of its direct dependencies), +or else the querying code should be designed to handle the case where the +attribute is missing [#fallback-to-dist-version]_. + +Some projects may need to publish version information for external APIs +that aren't the version of the module itself. Such projects should +define their own project-specific ways of obtaining the relevant information +at runtime. For example, the standard library's :mod:`ssl` module offers +multiple ways to access the underlying OpenSSL library version:: + + >>> ssl.OPENSSL_VERSION + 'OpenSSL 3.2.2 4 Jun 2024' + >>> ssl.OPENSSL_VERSION_INFO + (3, 2, 0, 2, 0) + >>> hex(ssl.OPENSSL_VERSION_NUMBER) + '0x30200020' + +-------------------------------------------------------------------------------- + +.. [#version-examples] Some more examples of unusual version numbers are + given in a `blog post `_ by Seth Larson. + +.. [#semver-strictness] For some personal viewpoints on this issue, see these + blog posts: `by Hynek Schlawak `_, `by Donald Stufft + `_, `by Bernát Gábor `_, `by + Brett Cannon `_. For a humoristic take, read about + ZeroVer_. + +.. [#fallback-to-dist-version] A full list mapping the top level names available + for import to the distribution packages that provide those import packages and + modules may be obtained through the standard library's + :func:`importlib.metadata.packages_distributions` function. This means that + even code that is attempting to infer a version to report for all importable + top-level names has a means to fall back to reporting the distribution + version information if no ``__version__`` attribute is defined. Only standard + library modules, and modules added via means other than Python package + installation would fail to have version information reported in that case. + + +.. _zerover: https://0ver.org +.. _calver: https://calver.org +.. _semver: https://semver.org +.. _semver-bernat-gabor: https://bernat.tech/posts/version-numbers/ +.. _semver-brett-cannon: https://snarky.ca/why-i-dont-like-semver/ +.. _semver-donald-stufft: https://caremad.io/posts/2016/02/versioning-software/ +.. _semver-hynek-schlawack: https://hynek.me/articles/semver-will-not-save-you/ +.. _setuptools-scm: https://setuptools-scm.readthedocs.io +.. _versions-seth-larson: https://sethmlarson.dev/pep-440 diff --git a/source/distributing.rst b/source/distributing.rst deleted file mode 100644 index 6929b5c54..000000000 --- a/source/distributing.rst +++ /dev/null @@ -1,838 +0,0 @@ -=================================== -Packaging and Distributing Projects -=================================== - -:Page Status: Complete -:Last Reviewed: 2015-09-08 - -This section covers the basics of how to configure, package and distribute your -own Python projects. It assumes that you are already familiar with the contents -of the :doc:`installing` page. - -The section does *not* aim to cover best practices for Python project -development as a whole. For example, it does not provide guidance or tool -recommendations for version control, documentation, or testing. - -For more reference material, see `Building and Distributing Packages -`_ in the -:ref:`setuptools` docs, but note that some advisory content there may be -outdated. In the event of conflicts, prefer the advice in the Python -Packaging User Guide. - -.. contents:: Contents - :local: - - -Requirements for Packaging and Distributing -=========================================== - -1. First, make sure you have already fulfilled the :ref:`requirements for - installing packages `. - -2. Install "twine" [1]_: - - :: - - pip install twine - - You'll need this to upload your project :term:`distributions ` to :term:`PyPI ` (see :ref:`below - `). - - -Configuring your Project -======================== - - -Initial Files -------------- - -setup.py -~~~~~~~~ - -The most important file is "setup.py" which exists at the root of your project -directory. For an example, see the `setup.py -`_ in the `PyPA -sample project `_. - -"setup.py" serves two primary functions: - -1. It's the file where various aspects of your project are configured. The - primary feature of ``setup.py`` is that it contains a global ``setup()`` - function. The keyword arguments to this function are how specific details of - your project are defined. The most relevant arguments are explained in - :ref:`the section below `. - -2. It's the command line interface for running various commands that - relate to packaging tasks. To get a listing of available commands, run - ``python setup.py --help-commands``. - - -setup.cfg -~~~~~~~~~ - -"setup.cfg" is an ini file that contains option defaults for ``setup.py`` -commands. For an example, see the `setup.cfg -`_ in the `PyPA -sample project `_. - - -README.rst -~~~~~~~~~~ - -All projects should contain a readme file that covers the goal of the -project. The most common format is `reStructuredText -`_ with an "rst" extension, although -this is not a requirement. - -For an example, see `README.rst -`_ from the `PyPA -sample project `_ - -MANIFEST.in -~~~~~~~~~~~ - -A "MANIFEST.in" is needed in certain cases where you need to package additional -files that ``python setup.py sdist (or bdist_wheel)`` don't automatically -include. To see a list of what's included by default, see the `Specifying the -files to distribute -`_ -section from the :ref:`distutils` documentation. - -For an example, see the `MANIFEST.in -`_ from the `PyPA -sample project `_ - - -For details on writing a ``MANIFEST.in`` file, see the `The MANIFEST.in template -`_ -section from the :ref:`distutils` documentation. - - - -~~~~~~~~~~~~~~ - -Although it's not required, the most common practice is to include your -python modules and packages under a single top-level package that has the same -:ref:`name ` as your project, or something very close. - -For an example, see the `sample -`_ package that's -include in the `PyPA sample project `_ - - -.. _`setup() args`: - -setup() args ------------- - -As mentioned above, The primary feature of ``setup.py`` is that it contains a -global ``setup()`` function. The keyword arguments to this function are how -specific details of your project are defined. - -The most relevant arguments are explained below. The snippets given are taken -from the `setup.py -`_ contained in the -`PyPA sample project `_. - - -.. _`setup() name`: - -name -~~~~ - -:: - - name='sample', - -This is the name of your project, and will determine how your project is listed -on :term:`PyPI `. For details on permitted -characters, see the :pep:`name <426#name>` -section from :pep:`426`. - - -version -~~~~~~~ - -:: - - version='1.2.0', - -This is the current version of your project, allowing your users to determine whether or not -they have the latest version, and to indicate which specific versions they've tested their own -software against. - -Versions are displayed on :term:`PyPI ` for each release if you -publish your project. - -See :ref:`Choosing a versioning scheme` for more information on ways to use versions to convey -compatibility information to your users. - -If the project code itself needs run-time access to the version, the simplest -way is to keep the version in both ``setup.py`` and your code. If you'd rather -not duplicate the value, there are a few ways to manage this. See the -":ref:`Single sourcing the version`" Advanced Topics section. - - -description -~~~~~~~~~~~ - -:: - - description='A sample Python project', - long_description=long_description, - -Give a short and long description for you project. These values will be -displayed on :term:`PyPI ` if you publish your -project. - - -url -~~~ - -:: - - url='https://github.com/pypa/sampleproject', - - -Give a homepage url for your project. - - -author -~~~~~~ - -:: - - author='The Python Packaging Authority', - author_email='pypa-dev@googlegroups.com', - -Provide details about the author. - - -license -~~~~~~~ - -:: - - license='MIT', - -Provide the type of license you are using. - - -classifiers -~~~~~~~~~~~ - -:: - - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 3 - Alpha', - - # Indicate who your project is intended for - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: MIT License', - - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.2', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - ], - -Provide a list of classifiers the categorize your project. For a full listing, -see https://pypi.python.org/pypi?%3Aaction=list_classifiers. - - -keywords -~~~~~~~~ - -:: - - keywords='sample setuptools development', - -List keywords that describe your project. - - -packages -~~~~~~~~ - -:: - - packages=find_packages(exclude=['contrib', 'docs', 'tests*']), - - -It's required to list the :term:`packages ` to be included -in your project. Although they can be listed manually, -``setuptools.find_packages`` finds them automatically. Use the ``exclude`` -keyword argument to omit packages that are not intended to be released and -installed. - - -install_requires -~~~~~~~~~~~~~~~~ - -:: - - install_requires=['peppercorn'], - -"install_requires" should be used to specify what dependencies a project -minimally needs to run. When the project is installed by :ref:`pip`, this is the -specification that is used to install its dependencies. - -For more on using "install_requires" see :ref:`install_requires vs Requirements files`. - - -.. _`Package Data`: - -package_data -~~~~~~~~~~~~ - -:: - - package_data={ - 'sample': ['package_data.dat'], - }, - - -Often, additional files need to be installed into a :term:`package `. These files are often data that’s closely related to the package’s -implementation, or text files containing documentation that might be of interest -to programmers using the package. These files are called "package data". - -The value must be a mapping from package name to a list of relative path names -that should be copied into the package. The paths are interpreted as relative to -the directory containing the package. - -For more information, see `Including Data Files -`_ -from the `setuptools docs `_. - - -.. _`Data Files`: - -data_files -~~~~~~~~~~ - -:: - - data_files=[('my_data', ['data/data_file'])], - -Although configuring :ref:`Package Data` is sufficient for most needs, in some -cases you may need to place data files *outside* of your :term:`packages -`. The ``data_files`` directive allows you to do that. - -Each (directory, files) pair in the sequence specifies the installation -directory and the files to install there. If directory is a relative path, it is -interpreted relative to the installation prefix (Python’s sys.prefix for -pure-Python :term:`distributions `, sys.exec_prefix for -distributions that contain extension modules). Each file name in files is -interpreted relative to the ``setup.py`` script at the top of the project source -distribution. - -For more information see the distutils section on `Installing Additional Files -`_. - -.. note:: - - :ref:`setuptools` allows absolute "data_files" paths, and pip honors them as - absolute, when installing from :term:`sdist `. This is not true when installing from :term:`wheel` - distributions. Wheels don't support absolute paths, and they end up being - installed relative to "site-packages". For discussion see `wheel Issue #92 - `_. - - -scripts -~~~~~~~ - -Although ``setup()`` supports a `scripts -`_ -keyword for pointing to pre-made scripts to install, the recommended approach to -achieve cross-platform compatibility is to use :ref:`console_scripts` entry -points (see below). - - -entry_points -~~~~~~~~~~~~ - -:: - - entry_points={ - ... - }, - - -Use this keyword to specify any plugins that your project provides for any named -entry points that may be defined by your project or others that you depend on. - -For more information, see the section on `Dynamic Discovery of Services and -Plugins -`_ -from the :ref:`setuptools` docs. - -The most commonly used entry point is "console_scripts" (see below). - -.. _`console_scripts`: - -console_scripts -*************** - -:: - - entry_points={ - 'console_scripts': [ - 'sample=sample:main', - ], - }, - -Use "console_script" `entry points -`_ -to register your script interfaces. You can then let the toolchain handle the -work of turning these interfaces into actual scripts [2]_. The scripts will be -generated during the install of your :term:`distribution `. - -For more information, see `Automatic Script Creation -`_ -from the `setuptools docs `_. - -.. _`Choosing a versioning scheme`: - -Choosing a versioning scheme ----------------------------- - -Standards compliance for interoperability -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Different Python projects may use different versioning schemes based on the needs of that -particular project, but all of them are required to comply with the flexible :pep:`public version -scheme <440#public-version-identifiers>` specified -in :pep:`440` in order to be supported in tools and libraries like ``pip`` -and ``setuptools``. - -Here are some examples of compliant version numbers:: - - 1.2.0.dev1 # Development release - 1.2.0a1 # Alpha Release - 1.2.0b1 # Beta Release - 1.2.0rc1 # Release Candidate - 1.2.0 # Final Release - 1.2.0.post1 # Post Release - 15.10 # Date based release - 23 # Serial release - -To further accommodate historical variations in approaches to version numbering, -:pep:`440` also defines a comprehensive technique for :pep:`version -normalisation <440#normalization>` that maps -variant spellings of different version numbers to a standardised canonical form. - -Scheme choices -~~~~~~~~~~~~~~ - -Semantic versioning (preferred) -******************************* - -For new projects, the recommended versioning scheme is based on `Semantic Versioning -`_, but adopts a different approach to handling pre-releases and -build metadata. - -The essence of semantic versioning is a 3-part MAJOR.MINOR.MAINTENANCE numbering scheme, -where the project author increments: - -1. MAJOR version when they make incompatible API changes, -2. MINOR version when they add functionality in a backwards-compatible manner, and -3. MAINTENANCE version when they make backwards-compatible bug fixes. - -Adopting this approach as a project author allows users to make use of :pep:`"compatible release" -<440#compatible-release>` specifiers, where -``name ~= X.Y`` requires at least release X.Y, but also allows any later release with -a matching MAJOR version. - -Python projects adopting semantic versioning should abide by clauses 1-8 of the -`Semantic Versioning 2.0.0 specification `_. - -Date based versioning -********************* - -Semantic versioning is not a suitable choice for all projects, such as those with a regular -time based release cadence and a deprecation process that provides warnings for a number of -releases prior to removal of a feature. - -A key advantage of date based versioning is that it is straightforward to tell how old the -base feature set of a particular release is given just the version number. - -Version numbers for date based projects typically take the form of YEAR.MONTH (for example, -``12.04``, ``15.10``). - -Serial versioning -***************** - -This is the simplest possible versioning scheme, and consists of a single number which is -incremented every release. - -While serial versioning is very easy to manage as a developer, it is the hardest to track -as an end user, as serial version numbers convey little or no information regarding API -backwards compatibility. - -Hybrid schemes -************** - -Combinations of the above schemes are possible. For example, a project may combine date -based versioning with serial versioning to create a YEAR.SERIAL numbering scheme that -readily conveys the approximate age of a release, but doesn't otherwise commit to a particular -release cadence within the year. - -Pre-release versioning -~~~~~~~~~~~~~~~~~~~~~~ - -Regardless of the base versioning scheme, pre-releases for a given final release may be -published as: - -* zero or more dev releases (denoted with a ".devN" suffix) -* zero or more alpha releases (denoted with a ".aN" suffix) -* zero or more beta releases (denoted with a ".bN" suffix) -* zero or more release candidates (denoted with a ".rcN" suffix) - -``pip`` and other modern Python package installers ignore pre-releases by default when -deciding which versions of dependencies to install. - - -Local version identifiers -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Public version identifiers are designed to support distribution via -:term:`PyPI `. Python's software distribution tools also support -the notion of a :pep:`local version identifier -<440#local-version-identifiers>`, which can be used to -identify local development builds not intended for publication, or modified variants of a release -maintained by a redistributor. - -A local version identifier takes the form ``+``. -For example:: - - 1.2.0.dev1+hg.5.b11e5e6f0b0b # 5th VCS commmit since 1.2.0.dev1 release - 1.2.1+fedora.4 # Package with downstream Fedora patches applied - - -Working in "Development Mode" -============================= - -Although not required, it's common to locally install your project in "editable" -or "develop" mode while you're working on it. This allows your project to be -both installed and editable in project form. - -Assuming you're in the root of your project directory, then run: - -:: - - pip install -e . - - -Although somewhat cryptic, ``-e`` is short for ``--editable``, and ``.`` refers -to the current working directory, so together, it means to install the current -directory (i.e. your project) in editable mode. This will also install any -dependencies declared with "install_requires" and any scripts declared with -"console_scripts". Dependencies will not be installed in editable mode. - -It's fairly common to also want to install some of your dependencies in editable -mode as well. For example, supposing your project requires "foo" and "bar", but -you want "bar" installed from vcs in editable mode, then you could construct a -requirements file like so:: - - -e . - -e git+https://somerepo/bar.git#egg=bar - -The first line says to install your project and any dependencies. The second -line overrides the "bar" dependency, such that it's fulfilled from vcs, not -PyPI. For more on requirements files, see the :ref:`Requirements File -` section in the pip docs. For more on vcs installs, -see the :ref:`VCS Support ` section of the pip docs. - -Lastly, if you don't want to install any dependencies at all, you can run:: - - pip install -e . --no-deps - - -For more information, see the `Development Mode -`_ section -of the `setuptools docs `_. - -.. _`Packaging Your Project`: - -Packaging your Project -====================== - -To have your project installable from a :term:`Package Index` like :term:`PyPI -`, you'll need to create a :term:`Distribution -` (aka ":term:`Package `" ) for your -project. - - - -Source Distributions --------------------- - -Minimally, you should create a :term:`Source Distribution `: - -:: - - python setup.py sdist - - -A "source distribution" is unbuilt (i.e, it's not a :term:`Built Distribution`), -and requires a build step when installed by pip. Even if the distribution is -pure python (i.e. contains no extensions), it still involves a build step to -build out the installation metadata from ``setup.py``. - - -Wheels ------- - -You should also create a wheel for your project. A wheel is a :term:`built -package ` that can be installed without needing to go -through the "build" process. Installing wheels is substantially faster for the -end user than installing from a source distribution. - -If your project is pure python (i.e. contains no compiled extensions) and -natively supports both Python 2 and 3, then you'll be creating what's called a -:ref:`"Universal Wheel" (see section below) `. - -If your project is pure python but does not natively support both Python 2 and -3, then you'll be creating a :ref:`"Pure Python Wheel" (see section below) `. - -If you project contains compiled extensions, then you'll be creating what's -called a :ref:`"Platform Wheel" (see section below) `. - - -.. _`Universal Wheels`: - -Universal Wheels -~~~~~~~~~~~~~~~~ - -"Universal Wheels" are wheels that are pure python (i.e. contains no compiled -extensions) and support Python 2 and 3. This is a wheel that can be installed -anywhere by :ref:`pip`. - -To build a Universal Wheel: - -:: - - python setup.py bdist_wheel --universal - - -You can also permanently set the ``--universal`` flag in "setup.cfg" (e.g., see -`sampleproject/setup.cfg -`_) - -:: - - [bdist_wheel] - universal=1 - - -Only use the ``--universal`` setting, if: - -1. Your project runs on Python 2 and 3 with no changes (i.e. it does not - require 2to3). -2. Your project does not have any C extensions. - -Beware that ``bdist_wheel`` does not currently have any checks to warn if you -use the setting inappropriately. - -If your project has optional C extensions, it is recommended not to publish a -universal wheel, because pip will prefer the wheel over a source installation, -and prevent the possibility of building the extension. - - -.. _`Pure Python Wheels`: - -Pure Python Wheels -~~~~~~~~~~~~~~~~~~ - -"Pure Python Wheels" that are not "universal" are wheels that are pure python -(i.e. contains no compiled extensions), but don't natively support both Python 2 -and 3. - -To build the wheel: - -:: - - python setup.py bdist_wheel - - -`bdist_wheel` will detect that the code is pure Python, and build a wheel that's -named such that it's usable on any Python installation with the same major -version (Python 2 or Python 3) as the version you used to build the wheel. For -details on the naming of wheel files, see :pep:`425` - -If your code supports both Python 2 and 3, but with different code (e.g., you -use `"2to3" `_) you can run -``setup.py bdist_wheel`` twice, once with Python 2 and once with Python 3. This -will produce wheels for each version. - - - -.. _`Platform Wheels`: - -Platform Wheels -~~~~~~~~~~~~~~~ - -"Platform Wheels" are wheels that are specific to a certain platform like linux, -OSX, or Windows, usually due to containing compiled extensions. - -To build the wheel: - -:: - - python setup.py bdist_wheel - - -`bdist_wheel` will detect that the code is not pure Python, and build a wheel -that's named such that it's only usable on the platform that it was built -on. For details on the naming of wheel files, see :pep:`425` - -.. note:: - - :term:`PyPI ` currently only allows uploads of - platform wheels for Windows and OS X, NOT linux. Currently, the wheel tag - specification (:pep:`425`) does not handle the variation that can - exist across linux distros. - - -.. _`Uploading your Project to PyPI`: - -Uploading your Project to PyPI -============================== - -.. note:: - - Before releasing on main PyPI repo, you might prefer training with - `PyPI test site `_ - which is cleaned on a semi regular basis. See - `these instructions `_ on how - to setup your configuration in order to use it. - -When you ran the command to create your distribution, a new directory dist/ was created under your project's root directory. That's where you'll find your distribution file(s) to upload. - -Create an account ------------------ - -First, you need a :term:`PyPI ` user -account. There are two options: - -1. Create an account manually `using the form on the PyPI website - `_. - -2. **(Not recommended):** Have an account created as part of - registering your first project (not recommended due to the - related security concerns, see option #3 below). - -If you created your account using option #1 (the form), you'll need to manually -write a ``~/.pypirc`` file like so. - - :: - - [distutils] - index-servers=pypi - - [pypi] - repository = https://upload.pypi.org/legacy/ - username = - password = - -You can leave out the password line if you use twine with its -``-p PASSWORD`` argument or prefer to simply enter your password -when prompted. - - -Register your project ---------------------- - -Next, if this is the first release, you currently need to explicitly register your -project prior to uploading. - -There are three ways to do this: - -1. Use `the form on the PyPI website - `_, to upload your - ``PKG-INFO`` info located in your local project tree at - ``myproject.egg-info/PKG-INFO``. If you don't have that file or directory, - then run ``python setup.py egg_info`` to have it generated. -2. Run ``twine register dist/mypkg.whl``, and :ref:`twine` will register your project - based on the package metadata in the specified files. Your ``~/.pypirc`` - must already be appropriately configured for twine to work. -3. **(Not recommended):** Run ``python setup.py register``. If you don't have - a user account already, a wizard will create one for you. This approach is - covered here due to it being mentioned in other guides, but it is not - recommended as it may use a plaintext HTTP or unverified HTTPS connection - on some Python versions, allowing your username and password to be intercepted - during transmission. - - -Upload your distributions -------------------------- - -Finally, you can upload your distributions to :term:`PyPI `. - -There are two options: - -1. Use :ref:`twine` - - :: - - twine upload dist/* - - The biggest reason to use twine is that ``python setup.py upload`` (option #2 - below) uploads files over plaintext. This means anytime you use it you expose - your username and password to a MITM attack. Twine uses only verified TLS to - upload to PyPI in order to protect your credentials from theft. - - Secondly it allows you to precreate your distribution files. ``python - setup.py upload`` only allows you to upload something that you've created in - the same command invocation. This means that you cannot test the exact file - you're going to upload to PyPI to ensure that it works before uploading it. - - Finally it allows you to pre-sign your files and pass the .asc files into the - command line invocation (``twine upload twine-1.0.1.tar.gz - twine-1.0.1.tar.gz.asc``). This enables you to be assured that you're typing - your gpg passphrase into gpg itself and not anything else since *you* will be - the one directly executing ``gpg --detach-sign -a ``. - - -2. **(Not recommended):** Use :ref:`setuptools`: - - :: - - python setup.py sdist bdist_wheel upload - - This approach is covered here due to it being mentioned in other guides, but it - is not recommended as it may use a plaintext HTTP or unverified HTTPS connection - on some Python versions, allowing your username and password to be intercepted - during transmission. - ----- - -.. [1] Depending on your platform, this may require root or Administrator - access. :ref:`pip` is currently considering changing this by `making user - installs the default behavior - `_. - - -.. [2] Specifically, the "console_script" approach generates ``.exe`` files on - Windows, which are necessary because the OS special-cases ``.exe`` files. - Script-execution features like ``PATHEXT`` and the :pep:`Python Launcher for - Windows <397>` allow scripts to - be used in many cases, but not all. diff --git a/source/extensions.rst b/source/extensions.rst deleted file mode 100644 index 4aefb42e5..000000000 --- a/source/extensions.rst +++ /dev/null @@ -1,290 +0,0 @@ -.. _`Binary Extensions`: - -================= -Binary Extensions -================= - -:Page Status: Incomplete -:Last Reviewed: 2013-12-08 - -One of the features of the CPython reference interpreter is that, in -addition to allowing the execution of Python code, it also exposes a rich -C API for use by other software. One of the most common uses of this C API -is to create importable C extensions that allow things which aren't -always easy to achieve in pure Python code. - -.. contents:: Contents - :local: - -An overview of binary extensions -================================ - -Use cases ---------- - -The typical use cases for binary extensions break down into just three -conventional categories: - -* accelerator modules: these modules are completely self-contained, and - are created solely to run faster than the equivalent pure Python code - runs in CPython. Ideally, accelerator modules will always have a pure - Python equivalent to use as a fallback if the accelerated version isn't - available on a given system. The CPython standard library makes extensive - use of accelerator modules. - -* wrapper modules: these modules are created to expose existing C interfaces - to Python code. They may either expose the underlying C interface directly, - or else expose a more "Pythonic" API that makes use of Python language - features to make the API easier to use. The CPython standard library makes - extensive use of wrapper modules. - -* low level system access: these modules are created to access lower level - features of the CPython runtime, the operating system, or the underlying - hardware. Through platform specific code, extension modules may achieve - things that aren't possible in pure Python code. A number of CPython - standard library modules are written in C in order to access interpreter - internals that aren't exposed at the language level. - - One particularly notable feature of C extensions is that, when they don't - need to call back into the interpreter runtime, they can release CPython's - global interpreter lock around long-running operations (regardling of - whether those operations are CPU or IO bound). - -Not all extension modules will fit neatly into the above categories. The -extension modules included with NumPy, for example, span all three use cases -- they move inner loops to C for speed reasons, wrap external libraries -written in C, FORTRAN and other languages, and use low level system -interfaces for both CPython and the underlying operation system to support -concurrent execution of vectorised operations and to tightly control the -exact memory layout of created objects. - - -Disadvantages -------------- - -The main disadvantage of using binary extensions is the fact that it makes -subsequent distribution of the software more difficult. One of the -advantages of using Python is that it is largely cross platform, and the -languages used to write extension modules (typically C or C++, but really -any language that can bind to the CPython C API) typically require that -custom binaries be created for different platforms. - -This means that binary extensions: - -* require that end users be able to either build them from source, or else - that someone publish pre-built binaries for common platforms - -* may not be compatible with different builds of the CPython reference - interpreter - -* often will not work correctly with alternative interpreters such as PyPy, - IronPython or Jython - -* if handcoded, make maintenance more difficult by requiring that - maintainers be familiar not only with Python, but also with the language - used to create the binary extension, as well as with the details of the - CPython C API. - -* if a pure Python fallback implementation is provided, make maintenance - more difficult by requiring that changes be implemented in two places, - and introducing additional complexity in the test suite to ensure both - versions are always executed. - -Another disadvantage of relying on binary extensions is that alternative -import mechanisms (such as the ability to import modules directly from -zipfiles) often won't work for extension modules (as the dynamic loading -mechanisms on most platforms can only load libraries from disk). - - -Alternatives to handcoded accelerator modules ---------------------------------------------- - -When extension modules are just being used to make code run faster (after -profiling has identified the code where the speed increase is worth -additional maintenance effort), a number of other alternatives should -also be considered: - -* look for existing optimised alternatives. The CPython standard libary - includes a number of optimised data structures and algorithms (especially - in the builtins and the ``collections`` and ``itertools`` modules). The - Python Package Index also offers additional alternatives. Sometimes, the - appropriate choice of standard library or third party module can avoid the - need to create your own accelerator module. - -* for long running applications, the JIT compiled `PyPy interpreter - `__ may offer a suitable alternative to the standard - CPython runtime. The main barrier to adopting PyPy is typically reliance - on other binary extension modules - while PyPy does emulate the CPython - C API, modules that rely on that cause problems for the PyPy JIT, and the - emulation layer can often expose latent defects in extension modules that - CPython currently tolerates (frequently around reference counting errors - - an object having one live reference instead of two often won't break - anything, but no references instead of one is a major problem). - -* `Cython `__ is a mature static compiler that can - compile most Python code to C extension modules. The initial compilation - provides some speed increases (by bypassing the CPython interpreter layer), - and Cython's optional static typing features can offer additional - opportunities for speed increases. Using Cython still has the disadvantage - of increasing the complexity of distributing the resulting application, - but has the benefit of having a reduced barrier to entry for Python - programmers (relative to other languages like C or C++). - -* `Numba `__ is a newer tool, created by members - of the scientific Python community, that aims to leverage LLVM to allow - selective compilation of pieces of a Python application to native - machine code at runtime. It requires that LLVM be available on the - system where the code is running, but can provide significant speed - increases, especially for operations that are amenable to vectorisation. - - -Alternatives to handcoded wrapper modules ------------------------------------------ - -The C ABI (Application Binary Interface) is a common standard for sharing -functionality between multiple applications. One of the strengths of the -CPython C API (Application Programming Interface) is allowing Python users -to tap into that functionality. However, wrapping modules by hand is quite -tedious, so a number of other alternative approaches should be considered. - -The approaches described below don't simplify the distribution case at all, -but they *can* significantly reduce the maintenance burden of keeping -wrapper modules up to date. - -* In addition to being useful for the creation of accelerator modules, - `Cython `__ is also useful for creating wrapper - modules. It still involves wrapping the interfaces by hand, however, so - may not be a good choice for wrapping large APIs. - -* `cffi `__ is a project created by some of the PyPy - developers to make it straightforward for developers that already know - both Python and C to expose their C modules to Python applications. It - also makes it relatively straightforward to wrap a C module based on its - header files, even if you don't know C yourself. - - One of the key advantages of ``cffi`` is that it is compatible with the - PyPy JIT, allowing CFFI wrapper modules to participate fully in PyPy's - tracing JIT optimisations. - -* `SWIG `__ is a wrapper interface generator that - allows a variety of programming languages, including Python, to interface - with C *and C++* code. - -* The standard library's ``ctypes`` module, while useful for getting access - to C level interfaces when header information isn't available, suffers - from the fact that it operates solely at the C ABI level, and thus has - no automatic consistency checking between the interface actually being - exported by the library and the one declared in the Python code. By - contrast, the above alternatives are all able to operate at the C *API* - level, using C header files to ensure consistency between the interface - exported by the library being wrapped and the one expected by the Python - wrapper module. While ``cffi`` *can* operate directly at the C ABI level, - it suffers from the same interface inconsistency problems as ``ctypes`` - when it is used that way. - - -Alternatives for low level system access ----------------------------------------- - -For applications that need low level system access (regardless of the -reason), a binary extension module often *is* the best way to go about it. -This is particularly true for low level access to the CPython runtime -itself, since some operations (like releasing the Global Interpreter Lock) -are simply invalid when the interpreter is running code, even if a module -like ``ctypes`` or ``cffi`` is used to obtain access to the relevant C -API interfaces. - -For cases where the extension module is manipulating the underlying -operating system or hardware (rather than the CPython runtime), it may -sometimes be better to just write an ordinary C library (or a library in -another systems programming language like C++ or Rust that can export a C -compatible ABI), and then use one of the wrapping techniques described -above to make the interface available as an importable Python module. - - -Implementing binary extensions -============================== - -:: - - mention the stable ABI (3.2+, link to the CPython C API docs) - mention the module lifecycle - mention the challenges of shared static state and subinterpreters - mention the implications of the GIL for extension modules - mention the memory allocation APIs in 3.4+ - - mention again that all this is one of the reasons why you probably - *don't* want to handcode your extension modules :) - - -Building binary extensions -========================== - -Setting up a build environment on Windows ------------------------------------------ - -Before it is possible to build a binary extension, it is necessary to ensure -that you have a suitable compiler available. On Windows, Visual C is used to -build the official CPython interpreter, and should be used to build compatible -binary extensions. - -Python 2.7 used Visual Studio 2008, Python 3.3 and 3.4 used Visual Studio 2010, -and Python 3.5+ uses Visual Studio 2015. Unfortunately, older versions of -Visual Studio are no longer easily available from Microsoft, so for versions -of Python prior to 3.5, the compilers must be obtained differently if you do -not already have a copy of the relevant version of Visual Studio. - -To set up a build environment for binary extensions, the steps are as follows: - - For Python 2.7 - - 1. Install "Visual C++ Compiler Package for Python 2.7", - which is available from - `Microsoft's website `__. - 2. Use (a recent version of) setuptools in your setup.py (pip will - do this for you, in any case). - 3. Done. - - For Python 3.4 - - 1. Install "Windows SDK for Windows 7 and .NET Framework 4" (v7.1), - which is available from - `Microsoft's website `__. - 2. Work from an SDK command prompt (with the environment variables - set, and the SDK on PATH). - 3. Set DISTUTILS_USE_SDK=1 - 4. Done. - - For Python 3.5 - - 1. Install `Visual Studio 2015 Community Edition - `__ - (or any later version, when these are released). - 2. Done. - -Note that from Python 3.5 onwards, Visual Studio works in a backward -compatible way, which means that any future version of Visual Studio will -be able to build Python extensions for all Python versions from 3.5 onwards. - -:: - - FIXME - - cover Windows binary compatibility requirements - cover Mac OS X binary compatibility requirements - cover the vagaries of Linux distros and other *nix systems - - - -Publishing binary extensions -============================ - -:: - - FIXME - - cover publishing as wheel files on PyPI or a custom index server - cover creation of Windows and Mac OS X installers - mention the fact that Linux distros have a requirement to build from - source in their own build systems anyway, so pre-built binaries for - *nix systems currently aren't common diff --git a/source/flow.rst b/source/flow.rst new file mode 100644 index 000000000..947c399db --- /dev/null +++ b/source/flow.rst @@ -0,0 +1,182 @@ +================== +The Packaging Flow +================== + +The document aims to outline the flow involved in publishing/distributing a +:term:`distribution package `, usually to the `Python +Package Index (PyPI)`_. It is written for package publishers, who are assumed +to be the package author. + +.. _Python Package Index (PyPI): https://pypi.org/ + +While the :doc:`tutorial ` walks through the +process of preparing a simple package for release, it does not fully enumerate +what steps and files are required, and for what purpose. + +Publishing a package requires a flow from the author's source code to an end +user's Python environment. The steps to achieve this are: + +- Have a source tree containing the package. This is typically a checkout from + a version control system (VCS). + +- Prepare a configuration file describing the package metadata (name, version + and so forth) and how to create the build artifacts. For most packages, this + will be a :file:`pyproject.toml` file, maintained manually in the source + tree. + +- Create build artifacts to be sent to the package distribution service + (usually PyPI); these will normally be a + :term:`source distribution ("sdist") ` + and one or more :term:`built distributions ("wheels") `. + These are made by a build tool using the configuration file from the + previous step. Often there is just one generic wheel for a pure Python + package. + +- Upload the build artifacts to the package distribution service. + +At that point, the package is present on the package distribution service. +To use the package, end users must: + +- Download one of the package's build artifacts from the package distribution + service. + +- Install it in their Python environment, usually in its ``site-packages`` + directory. This step may involve a build/compile step which, if needed, must + be described by the package metadata. + +These last 2 steps are typically performed by :ref:`pip` when an end user runs +``pip install``. + +The steps above are described in more detail below. + +The source tree +=============== + +The source tree contains the package source code, usually a checkout from a +VCS. The particular version of the code used to create the build artifacts +will typically be a checkout based on a tag associated with the version. + +The configuration file +====================== + +The configuration file depends on the tool used to create the build artifacts. +The standard practice is to use a :file:`pyproject.toml` file in the `TOML +format`_. + +.. _TOML format: https://github.com/toml-lang/toml + +At a minimum, the :file:`pyproject.toml` file needs a ``[build-system]`` table +specifying your build tool. There are many build tools available, including +but not limited to :ref:`flit`, :ref:`hatch`, :ref:`pdm`, :ref:`poetry`, +:ref:`setuptools`, `trampolim`_, and `whey`_. Each tool's documentation will +show what to put in the ``[build-system]`` table. + +.. _trampolim: https://pypi.org/project/trampolim/ +.. _whey: https://pypi.org/project/whey/ + +For example, here is a table for using :ref:`hatch`: + +.. code-block:: toml + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + +With such a table in the :file:`pyproject.toml` file, +a ":term:`frontend `" tool like +:ref:`build` can run your chosen +build tool's ":term:`backend `" +to create the build artifacts. +Your build tool may also provide its own frontend. An install tool +like :ref:`pip` also acts as a frontend when it runs your build tool's backend +to install from a source distribution. + +The particular build tool you choose dictates what additional information is +required in the :file:`pyproject.toml` file. For example, you might specify: + +* a ``[project]`` table containing project + :doc:`Core Metadata ` + (name, version, author and so forth), + +* a ``[tool]`` table containing tool-specific configuration options. + +Refer to the :ref:`pyproject.toml guide ` for a +complete guide to ``pyproject.toml`` configuration. + + +Build artifacts +=============== + +The source distribution (sdist) +------------------------------- + +A source distribution contains enough to install the package from source in an +end user's Python environment. As such, it needs the package source, and may +also include tests and documentation. These are useful for end users wanting +to develop your sources, and for end user systems where some local compilation +step is required (such as a C extension). + +The :ref:`build` package knows how to invoke your build tool to create one of +these: + +.. code-block:: bash + + python3 -m build --sdist source-tree-directory + +Or, your build tool may provide its own interface for creating an sdist. + + +The built distributions (wheels) +-------------------------------- + +A built distribution contains only the files needed for an end user's Python +environment. No compilation steps are required during the install, and the +wheel file can simply be unpacked into the ``site-packages`` directory. This +makes the install faster and more convenient for end users. + +A pure Python package typically needs only one "generic" wheel. A package with +compiled binary extensions needs a wheel for each supported combination of +Python interpreter, operating system, and CPU architecture that it supports. +If a suitable wheel file is not available, tools like :ref:`pip` will fall +back to installing the source distribution. + +The :ref:`build` package knows how to invoke your build tool to create one of +these: + +.. code-block:: bash + + python3 -m build --wheel source-tree-directory + +Or, your build tool may provide its own interface for creating a wheel. + +.. note:: + + The default behaviour of :ref:`build` is to make both an sdist and a wheel + from the source in the current directory; the above examples are + deliberately specific. + +Upload to the package distribution service +========================================== + +The :ref:`twine` tool can upload build artifacts to PyPI for distribution, +using a command like: + +.. code-block:: bash + + twine upload dist/package-name-version.tar.gz dist/package-name-version-py3-none-any.whl + +Or, your build tool may provide its own interface for uploading. + +Download and install +==================== + +Now that the package is published, end users can download and install the +package into their Python environment. Typically this is done with :ref:`pip`, +using a command like: + +.. code-block:: bash + + python3 -m pip install package-name + +End users may also use other tools like :ref:`pipenv`, :ref:`poetry`, or +:ref:`pdm`. diff --git a/source/glossary.rst b/source/glossary.rst index dda9524b8..6a592125f 100644 --- a/source/glossary.rst +++ b/source/glossary.rst @@ -1,11 +1,7 @@ - ======== Glossary ======== -:Page Status: Complete -:Last Reviewed: 2015-09-08 - .. glossary:: @@ -16,22 +12,85 @@ Glossary extensions. + Build Backend + + A library that takes a source tree + and builds a :term:`source distribution ` or + :term:`built distribution ` from it. + The build is delegated to the backend by a + :term:`frontend `. + All backends offer a standardized interface. + + Examples of build backends are + :ref:`flit's flit-core `, + :ref:`hatch's hatchling `, + :ref:`maturin`, + :ref:`meson-python`, + :ref:`scikit-build-core`, + and :ref:`setuptools`. + + + Build Frontend + + A tool that users might run + that takes arbitrary source trees or + :term:`source distributions ` + and builds source distributions or :term:`wheels ` from them. + The actual building is delegated to each source tree's + :term:`build backend `. + + Examples of build frontends are :ref:`pip` and :ref:`build`. + + Built Distribution A :term:`Distribution ` format containing files and metadata that only need to be moved to the correct location on the target system, to be installed. :term:`Wheel` is such a format, whereas - distutil's :term:`Source Distribution ` is not, in that it requires a build step before it can be - installed. This format does not imply that python files have to be + installed. This format does not imply that Python files have to be precompiled (:term:`Wheel` intentionally does not include compiled - python files). + Python files). See :ref:`package-formats` for more information. + + + Built Metadata + + The concrete form :term:`Core Metadata` takes + when included inside an installed :term:`Project` (``METADATA`` file) + or a :term:`Distribution Archive` + (``PKG-INFO`` in a + :term:`Sdist ` + and ``METADATA`` in a :term:`Wheel`). + + + Core Metadata + + The :ref:`specification ` + and the set of :term:`Core Metadata Field`\s it defines + that describe key static attributes of + a :term:`Distribution Package` or :term:`Installed Project`. + + + Core Metadata Field + + A single key-value pair + (or sequence of such with the same name, for multiple-use fields) + defined in the :term:`Core Metadata` spec + and stored in the :term:`Built Metadata`. + Notably, distinct from a :term:`Pyproject Metadata Key`. + + + Distribution Archive + + The physical distribution artifact (i.e. a file on disk) + for a :term:`Distribution Package`. Distribution Package A versioned archive file that contains Python :term:`packages `, :term:`modules `, and other resource files that are + Package>`, :term:`modules `, and other resource files that are used to distribute a :term:`Release`. The archive file is what an end-user will download from the internet and install. @@ -41,19 +100,18 @@ Glossary :term:`Import Package` (which is also commonly called a "package") or another kind of distribution (e.g. a Linux distribution or the Python language distribution), which are often referred to with the single term - "distribution". + "distribution". See :ref:`distribution-package-vs-import-package` + for a breakdown of the differences. Egg A :term:`Built Distribution` format introduced by :ref:`setuptools`, - which is being replaced by :term:`Wheel`. For details, see `The - Internal Structure of Python Eggs - `_ and - `Python Eggs `_ + which has been replaced by :term:`Wheel`. For details, see + :ref:`egg-format`. Extension Module - A :term:`module` written in the low-level language of the Python implementation: + A :term:`Module` written in the low-level language of the Python implementation: C/C++ for Python, Java for Jython. Typically contained in a single dynamically loadable pre-compiled file, e.g. a shared object (.so) file for Python extensions on Unix, a DLL (given the .pyd extension) for @@ -61,6 +119,25 @@ Glossary extensions. + Import Package + + A Python module which can contain other modules or recursively, other + packages. + + An import package is more commonly referred to with the single word + "package", but this guide will use the expanded term when more clarity + is needed to prevent confusion with a :term:`Distribution Package` which + is also commonly called a "package". See :ref:`distribution-package-vs-import-package` + for a breakdown of the differences. + + + Installed Project + + A :term:`Project` that is installed for use with + a Python interpreter or :term:`Virtual Environment`, + as described in the specification :ref:`recording-installed-packages`. + + Known Good Set (KGS) A set of distributions at specified versions which are compatible with @@ -70,15 +147,39 @@ Glossary multiple individual distributions. - Import Package + License Classifier - A Python module which can contain other modules or recursively, other - packages. + A PyPI Trove classifier + (as :ref:`described ` + in the :term:`Core Metadata` specification) + which begins with ``License ::``. + + + License Expression + SPDX Expression + + A string with valid SPDX license expression syntax, + including one or more SPDX :term:`License Identifier`\(s), + which describes a :term:`Project`'s license(s) + and how they inter-relate. + Examples: + ``GPL-3.0-or-later``, + ``MIT AND (Apache-2.0 OR BSD-2-Clause)`` + + + License Identifier + SPDX Identifier + + A valid SPDX short-form license identifier, + originally specified in :pep:`639`. + This includes all valid SPDX identifiers and + the custom ``LicenseRef-[idstring]`` strings conforming to the + SPDX specification. + Examples: + ``MIT``, + ``GPL-3.0-only``, + ``LicenseRef-My-Custom-License`` - An import package is more commonly referred to with the single word - "package", but this guide will use the expanded term when more clarity - is needed to prevent confusion with a :term:`Distribution Package` which - is also commonly called a "package". Module @@ -92,6 +193,13 @@ Glossary :term:`package ` discovery and consumption. + Per Project Index + + A private or other non-canonical :term:`Package Index` indicated by + a specific :term:`Project` as the index preferred or required to + resolve dependencies of that project. + + Project A library, framework, script, plugin, application, or collection of data @@ -99,11 +207,10 @@ Glossary packaged into a :term:`Distribution `. Since most projects create :term:`Distributions ` - using :ref:`distutils` or :ref:`setuptools`, another practical way to - define projects currently is something that contains a :term:`setup.py` - at the root of the project src directory, where "setup.py" is the - project specification filename used by :ref:`distutils` and - :ref:`setuptools`. + using either :pep:`518` ``build-system``, :ref:`distutils` or + :ref:`setuptools`, another practical way to define projects currently + is something that contains a :term:`pyproject.toml`, :term:`setup.py`, + or :term:`setup.cfg` file at the root of the project source directory. Python projects must have unique names, which are registered on :term:`PyPI `. Each project will then @@ -117,27 +224,94 @@ Glossary 'bar'. + Project Root Directory + + The filesystem directory in which + a :term:`Project`'s :term:`source tree ` is located. + + + Project Source Tree + + The on-disk format of a :term:`Project` used for development, + containing its raw source code before being packaged + into a + :term:`Source Distribution ` + or :term:`Built Distribution`. + + + Project Source Metadata + + Metadata defined by the package author + in a :term:`Project`'s :term:`source tree `, + to be transformed into :term:`Core Metadata field`\s + in the :term:`Built Metadata` + by the project's :term:`build backend `. + Can be written as :term:`Pyproject Metadata`, + or in a tool-specific format + (under the ``[tool]`` table in ``pyproject.toml``, + or in a tool's own configuration file). + + Pure Module - A :term:`module` written in Python and contained in a single .py file (and - possibly associated .pyc and/or .pyo files). + A :term:`Module` written in Python and contained in a single ``.py`` file (and + possibly associated ``.pyc`` and/or ``.pyo`` files). + + + Pyproject Metadata + + The :term:`Project Source Metadata` format + defined by the :ref:`declaring-project-metadata` specification + and originally introduced in :pep:`621`, + stored as :term:`Pyproject Metadata Key`\s + under the ``[project]`` table of a :term:`pyproject.toml` file. + Notably, *not* a tool-specific source metadata format + under the ``[tool]`` table in ``pyproject.toml``. + + + Pyproject Metadata Key + + A top-level TOML key in the ``[project]`` table in ``pyproject.toml``; + part of the :term:`Pyproject Metadata`. + Notably, distinct from a :term:`Core Metadata Field`. + + + Pyproject Metadata Subkey + + A second-level TOML key under a table-valued + :term:`Pyproject Metadata Key`. Python Packaging Authority (PyPA) - PyPA is a working group that maintains many of the relevant projects in - Python packaging. They maintain a site at https://www.pypa.io, host projects - on `github `_ and `bitbucket - `_, and discuss issues on the `pypa-dev - mailing list `_. + PyPA is a working group that maintains many of the relevant + projects in Python packaging. They maintain a site at + :doc:`pypa.io `, host projects on `GitHub + `_ and `Bitbucket + `_, and discuss issues on the + `distutils-sig mailing list + `_ + and `the Python Discourse forum `__. Python Package Index (PyPI) - `PyPI `_ is the default :term:`Package + `PyPI `_ is the default :term:`Package Index` for the Python community. It is open to all Python developers to consume and distribute their distributions. + pypi.org + + `pypi.org `_ is the domain name for the + :term:`Python Package Index (PyPI)`. It replaced the legacy index + domain name, ``pypi.python.org``, in 2017. It is powered by + :ref:`warehouse`. + + pyproject.toml + + The tool-agnostic :term:`Project` specification file. + Defined in :pep:`518`. + Release A snapshot of a :term:`Project` at a particular point in time, denoted @@ -161,10 +335,8 @@ Glossary Requirement Specifier A format used by :ref:`pip` to install packages from a :term:`Package - Index`. For an EBNF diagram of the format, see the - `pkg_resources.Requirement - `_ - entry in the :ref:`setuptools` docs. For example, "foo>=1.3" is a + Index`. For an EBNF diagram of the format, see :ref:`dependency-specifiers`. + For example, "foo>=1.3" is a requirement specifier, where "foo" is the project name, and the ">=1.3" portion is the :term:`Version Specifier` @@ -175,24 +347,44 @@ Glossary docs on :ref:`pip:Requirements Files`. + Root License Directory + License Directory + + The directory under which license files are stored in a + :term:`Project Source Tree`, :term:`Distribution Archive` + or :term:`Installed Project`. + For a :term:`Project Source Tree` or + :term:`Source Distribution (or "sdist")`, this is the + :term:`Project Root Directory`. + For a :term:`Built Distribution` or :term:`Installed Project`, + this is the :file:`.dist-info/licenses/` directory of + the wheel archive or project folder respectively. + Also, the root directory that paths + recorded in the ``License-File`` + :term:`Core Metadata Field` are relative to. + + setup.py + setup.cfg - The project specification file for :ref:`distutils` and :ref:`setuptools`. + The project specification files for :ref:`distutils` and :ref:`setuptools`. + See also :term:`pyproject.toml`. Source Archive An archive containing the raw source code for a :term:`Release`, prior - to creation of an :term:`Source Distribution ` or :term:`Built Distribution`. Source Distribution (or "sdist") - A :term:`distribution ` format (usually generated - using ``python setup.py sdist``) that provides metadata and the + A :term:`distribution ` format (usually generated + using ``python -m build --sdist``) that provides metadata and the essential source files needed for installing by a tool like :ref:`pip`, - or for generating a :term:`Built Distribution`. + or for generating a :term:`Built Distribution`. See :ref:`package-formats` + for more information. System Package @@ -204,11 +396,10 @@ Glossary Version Specifier The version component of a :term:`Requirement Specifier`. For example, - the ">=1.3" portion of "foo>=1.3". :pep:`440` contains - a :pep:`full specification - <440#version-specifiers>` of the - specifiers that Python packaging currently supports. Support for PEP440 - was implemented in :ref:`setuptools` v8.0 and :ref:`pip` v6.0. + the ">=1.3" portion of "foo>=1.3". Read the :ref:`Version specifier specification + ` for a full description of the + specifiers that Python packaging currently supports. Support for this + specification was implemented in :ref:`setuptools` v8.0 and :ref:`pip` v6.0. Virtual Environment @@ -217,11 +408,22 @@ Glossary wide. For more information, see the section on :ref:`Creating and using Virtual Environments`. + + Wheel Format Wheel - A :term:`Built Distribution` format introduced by :pep:`427`, - which is intended to replace the :term:`Egg` format. Wheel is currently - supported by :ref:`pip`. + The standard :term:`Built Distribution` format + originally introduced in :pep:`427` + and defined by the :ref:`binary-distribution-format` specification. + See :ref:`package-formats` for more information. + Not to be confused with its reference implementation, + the :term:`Wheel Project`. + + + Wheel Project + + The PyPA reference implementation of the :term:`Wheel Format`; see :ref:`wheel`. + Working Set diff --git a/source/guides/analyzing-pypi-package-downloads.rst b/source/guides/analyzing-pypi-package-downloads.rst new file mode 100644 index 000000000..2ad02fed5 --- /dev/null +++ b/source/guides/analyzing-pypi-package-downloads.rst @@ -0,0 +1,348 @@ +.. _analyzing-pypi-package-downloads: + +================================ +Analyzing PyPI package downloads +================================ + +This section covers how to use the public PyPI download statistics dataset +to learn more about downloads of a package (or packages) hosted on PyPI. For +example, you can use it to discover the distribution of Python versions used to +download a package. + + +Background +========== + +PyPI does not display download statistics for a number of reasons: [#]_ + +- **Inefficient to make work with a Content Distribution Network (CDN):** + Download statistics change constantly. Including them in project pages, which + are heavily cached, would require invalidating the cache more often, and + reduce the overall effectiveness of the cache. + +- **Highly inaccurate:** A number of things prevent the download counts from + being accurate, some of which include: + + - ``pip``'s download cache (lowers download counts) + - Internal or unofficial mirrors (can both raise or lower download counts) + - Packages not hosted on PyPI (for comparisons sake) + - Unofficial scripts or attempts at download count inflation (raises download + counts) + - Known historical data quality issues (lowers download counts) + +- **Not particularly useful:** Just because a project has been downloaded a lot + doesn't mean it's good; Similarly just because a project hasn't been + downloaded a lot doesn't mean it's bad! + +In short, because its value is low for various reasons, and the tradeoffs +required to make it work are high, it has been not an effective use of +limited resources. + +Public dataset +============== + +As an alternative, the `Linehaul project `__ +streams download logs from PyPI to `Google BigQuery`_ [#]_, where they are +stored as a public dataset. + +Getting set up +-------------- + +In order to use `Google BigQuery`_ to query the `public PyPI download +statistics dataset`_, you'll need a Google account and to enable the BigQuery +API on a Google Cloud Platform project. You can run up to 1TB of queries +per month `using the BigQuery free tier without a credit card +`__ + +- Navigate to the `BigQuery web UI`_. +- Create a new project. +- Enable the `BigQuery API + `__. + +For more detailed instructions on how to get started with BigQuery, check out +the `BigQuery quickstart guide +`__. + + +Data schema +----------- + +Linehaul writes an entry in a ``bigquery-public-data.pypi.file_downloads`` table for each +download. The table contains information about what file was downloaded and how +it was downloaded. Some useful columns from the `table schema +`__ +include: + ++------------------------+-----------------+-----------------------------+ +| Column | Description | Examples | ++========================+=================+=============================+ +| timestamp | Date and time | ``2020-03-09 00:33:03 UTC`` | ++------------------------+-----------------+-----------------------------+ +| file.project | Project name | ``pipenv``, ``nose`` | ++------------------------+-----------------+-----------------------------+ +| file.version | Package version | ``0.1.6``, ``1.4.2`` | ++------------------------+-----------------+-----------------------------+ +| details.installer.name | Installer | pip, :ref:`bandersnatch` | ++------------------------+-----------------+-----------------------------+ +| details.python | Python version | ``2.7.12``, ``3.6.4`` | ++------------------------+-----------------+-----------------------------+ + + +Useful queries +-------------- + +Run queries in the `BigQuery web UI`_ by clicking the "Compose query" button. + +Note that the rows are stored in a partitioned table, which helps +limit the cost of queries. These example queries analyze downloads from +recent history by filtering on the ``timestamp`` column. + +Counting package downloads +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following query counts the total number of downloads for the project +"pytest". + +.. code-block:: sql + + #standardSQL + SELECT COUNT(*) AS num_downloads + FROM `bigquery-public-data.pypi.file_downloads` + WHERE file.project = 'pytest' + -- Only query the last 30 days of history + AND DATE(timestamp) + BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY) + AND CURRENT_DATE() + ++---------------+ +| num_downloads | ++===============+ +| 26190085 | ++---------------+ + +To count downloads from pip only, filter on the ``details.installer.name`` +column. + +.. code-block:: sql + + #standardSQL + SELECT COUNT(*) AS num_downloads + FROM `bigquery-public-data.pypi.file_downloads` + WHERE file.project = 'pytest' + AND details.installer.name = 'pip' + -- Only query the last 30 days of history + AND DATE(timestamp) + BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY) + AND CURRENT_DATE() + ++---------------+ +| num_downloads | ++===============+ +| 24334215 | ++---------------+ + +Package downloads over time +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To group by monthly downloads, use the ``TIMESTAMP_TRUNC`` function. Also +filtering by this column reduces corresponding costs. + +.. code-block:: sql + + #standardSQL + SELECT + COUNT(*) AS num_downloads, + DATE_TRUNC(DATE(timestamp), MONTH) AS `month` + FROM `bigquery-public-data.pypi.file_downloads` + WHERE + file.project = 'pytest' + -- Only query the last 6 months of history + AND DATE(timestamp) + BETWEEN DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 6 MONTH), MONTH) + AND CURRENT_DATE() + GROUP BY `month` + ORDER BY `month` DESC + ++---------------+------------+ +| num_downloads | month | ++===============+============+ +| 1956741 | 2018-01-01 | ++---------------+------------+ +| 2344692 | 2017-12-01 | ++---------------+------------+ +| 1730398 | 2017-11-01 | ++---------------+------------+ +| 2047310 | 2017-10-01 | ++---------------+------------+ +| 1744443 | 2017-09-01 | ++---------------+------------+ +| 1916952 | 2017-08-01 | ++---------------+------------+ + +Python versions over time +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Extract the Python version from the ``details.python`` column. Warning: This +query processes over 500 GB of data. + +.. code-block:: sql + + #standardSQL + SELECT + REGEXP_EXTRACT(details.python, r"[0-9]+\.[0-9]+") AS python_version, + COUNT(*) AS num_downloads, + FROM `bigquery-public-data.pypi.file_downloads` + WHERE + -- Only query the last 6 months of history + DATE(timestamp) + BETWEEN DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 6 MONTH), MONTH) + AND CURRENT_DATE() + GROUP BY `python_version` + ORDER BY `num_downloads` DESC + ++--------+---------------+ +| python | num_downloads | ++========+===============+ +| 3.7 | 18051328726 | ++--------+---------------+ +| 3.6 | 9635067203 | ++--------+---------------+ +| 3.8 | 7781904681 | ++--------+---------------+ +| 2.7 | 6381252241 | ++--------+---------------+ +| null | 2026630299 | ++--------+---------------+ +| 3.5 | 1894153540 | ++--------+---------------+ + + +Getting absolute links to artifacts +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +It's sometimes helpful to be able to get the absolute links to download +artifacts from PyPI based on their hashes, e.g. if a particular project or +release has been deleted from PyPI. The metadata table includes the ``path`` +column, which includes the hash and artifact filename. + +.. note:: + The URL generated here is not guaranteed to be stable, but currently aligns with the URL where PyPI artifacts are hosted. + +.. code-block:: sql + + SELECT + CONCAT('https://files.pythonhosted.org/packages', path) as url + FROM + `bigquery-public-data.pypi.distribution_metadata` + WHERE + filename LIKE 'sampleproject%' + + ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| url | ++===================================================================================================================================================================+ +| https://files.pythonhosted.org/packages/eb/45/79be82bdeafcecb9dca474cad4003e32ef8e4a0dec6abbd4145ccb02abe1/sampleproject-1.2.0.tar.gz | ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| https://files.pythonhosted.org/packages/56/0a/178e8bbb585ec5b13af42dae48b1d7425d6575b3ff9b02e5ec475e38e1d6/sampleproject_nomura-1.2.0-py2.py3-none-any.whl | ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| https://files.pythonhosted.org/packages/63/88/3200eeaf22571f18d2c41e288862502e33365ccbdc12b892db23f51f8e70/sampleproject_nomura-1.2.0.tar.gz | ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| https://files.pythonhosted.org/packages/21/e9/2743311822e71c0756394b6c5ab15cb64ca66c78c6c6a5cd872c9ed33154/sampleproject_doubleyoung18-1.3.0-py2.py3-none-any.whl | ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| https://files.pythonhosted.org/packages/6f/5b/2f3fe94e1c02816fe23c7ceee5292fb186912929e1972eee7fb729fa27af/sampleproject-1.3.1.tar.gz | ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + + +Caveats +======= + +In addition to the caveats listed in the background above, Linehaul suffered +from a bug which caused it to significantly under-report download statistics +prior to July 26, 2018. Downloads before this date are proportionally accurate +(e.g. the percentage of Python 2 vs. Python 3 downloads) but total numbers are +lower than actual by an order of magnitude. + + +Additional tools +================ + +Besides using the BigQuery console, there are some additional tools which may +be useful when analyzing download statistics. + +``google-cloud-bigquery`` +------------------------- + +You can also access the public PyPI download statistics dataset +programmatically via the BigQuery API and the `google-cloud-bigquery`_ project, +the official Python client library for BigQuery. + +.. code-block:: python + + from google.cloud import bigquery + + # Note: depending on where this code is being run, you may require + # additional authentication. See: + # https://cloud.google.com/bigquery/docs/authentication/ + client = bigquery.Client() + + query_job = client.query(""" + SELECT COUNT(*) AS num_downloads + FROM `bigquery-public-data.pypi.file_downloads` + WHERE file.project = 'pytest' + -- Only query the last 30 days of history + AND DATE(timestamp) + BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY) + AND CURRENT_DATE()""") + + results = query_job.result() # Waits for job to complete. + for row in results: + print("{} downloads".format(row.num_downloads)) + + +``pypinfo`` +----------- + +`pypinfo`_ is a command-line tool which provides access to the dataset and +can generate several useful queries. For example, you can query the total +number of download for a package with the command ``pypinfo package_name``. + +Install `pypinfo`_ using pip. + +.. code-block:: bash + + python3 -m pip install pypinfo + +Usage: + +.. code-block:: console + + $ pypinfo requests + Served from cache: False + Data processed: 6.87 GiB + Data billed: 6.87 GiB + Estimated cost: $0.04 + + | download_count | + | -------------- | + | 9,316,415 | + + +``pandas-gbq`` +-------------- + +The `pandas-gbq`_ project allows for accessing query results via `Pandas`_. + + +References +========== + +.. [#] `PyPI Download Counts deprecation email `__ +.. [#] `PyPI BigQuery dataset announcement email `__ + +.. _public PyPI download statistics dataset: https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=pypi&page=dataset +.. _Google BigQuery: https://cloud.google.com/bigquery +.. _BigQuery web UI: https://console.cloud.google.com/bigquery +.. _pypinfo: https://github.com/ofek/pypinfo +.. _google-cloud-bigquery: https://cloud.google.com/bigquery/docs/reference/libraries +.. _pandas-gbq: https://pandas-gbq.readthedocs.io/en/latest/ +.. _Pandas: https://pandas.pydata.org/ diff --git a/source/code/appveyor.yml b/source/guides/appveyor-sample/appveyor.yml similarity index 91% rename from source/code/appveyor.yml rename to source/guides/appveyor-sample/appveyor.yml index 44856abb0..97dedea1a 100644 --- a/source/code/appveyor.yml +++ b/source/guides/appveyor-sample/appveyor.yml @@ -3,7 +3,7 @@ environment: matrix: # For Python versions available on Appveyor, see - # http://www.appveyor.com/docs/installed-software#python + # https://www.appveyor.com/docs/windows-images-software/#python # The list here is complete (excluding Python 2.6, which # isn't covered by this document) at the time of writing. @@ -17,6 +17,7 @@ environment: - PYTHON: "C:\\Python34-x64" DISTUTILS_USE_SDK: "1" - PYTHON: "C:\\Python35-x64" + - PYTHON: "C:\\Python36-x64" install: # We need wheel installed to build wheels @@ -31,7 +32,7 @@ test_script: # only needed to support those cases. # Note that you must use the environment variable %PYTHON% to refer to # the interpreter you're using - Appveyor does not do anything special - # to put the Python evrsion you want to use on PATH. + # to put the Python version you want to use on PATH. - "build.cmd %PYTHON%\\python.exe setup.py test" after_test: diff --git a/source/code/build.cmd b/source/guides/appveyor-sample/build.cmd similarity index 91% rename from source/code/build.cmd rename to source/guides/appveyor-sample/build.cmd index 243dc9a1f..23df2b69b 100644 --- a/source/code/build.cmd +++ b/source/guides/appveyor-sample/build.cmd @@ -4,7 +4,7 @@ :: MS Windows SDK for Windows 7 and .NET Framework 4 :: :: More details at: -:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows +:: https://github.com/cython/cython/wiki/CythonExtensionsOnWindows IF "%DISTUTILS_USE_SDK%"=="1" ( ECHO Configuring environment to build with MSVC on a 64bit architecture diff --git a/source/guides/creating-and-discovering-plugins.rst b/source/guides/creating-and-discovering-plugins.rst new file mode 100644 index 000000000..601f2b4a6 --- /dev/null +++ b/source/guides/creating-and-discovering-plugins.rst @@ -0,0 +1,171 @@ +================================ +Creating and discovering plugins +================================ + +Often when creating a Python application or library you'll want the ability to +provide customizations or extra features via **plugins**. Because Python +packages can be separately distributed, your application or library may want to +automatically **discover** all of the plugins available. + +There are three major approaches to doing automatic plugin discovery: + +#. `Using naming convention`_. +#. `Using namespace packages`_. +#. `Using package metadata`_. + + +Using naming convention +======================= + +If all of the plugins for your application follow the same naming convention, +you can use :func:`pkgutil.iter_modules` to discover all of the top-level +modules that match the naming convention. For example, `Flask`_ uses the +naming convention ``flask_{plugin_name}``. If you wanted to automatically +discover all of the Flask plugins installed: + +.. code-block:: python + + import importlib + import pkgutil + + discovered_plugins = { + name: importlib.import_module(name) + for finder, name, ispkg + in pkgutil.iter_modules() + if name.startswith('flask_') + } + +If you had both the `Flask-SQLAlchemy`_ and `Flask-Talisman`_ plugins installed +then ``discovered_plugins`` would be: + +.. code-block:: python + + { + 'flask_sqlalchemy': , + 'flask_talisman': , + } + +Using naming convention for plugins also allows you to query +the Python Package Index's :ref:`simple repository API ` +for all packages that conform to your naming convention. + +.. _Flask: https://pypi.org/project/Flask/ +.. _Flask-SQLAlchemy: https://pypi.org/project/Flask-SQLAlchemy/ +.. _Flask-Talisman: https://pypi.org/project/flask-talisman + + +Using namespace packages +======================== + +:doc:`Namespace packages ` can be used to provide +a convention for where to place plugins and also provides a way to perform +discovery. For example, if you make the sub-package ``myapp.plugins`` a +namespace package then other :term:`distributions ` can +provide modules and packages to that namespace. Once installed, you can use +:func:`pkgutil.iter_modules` to discover all modules and packages installed +under that namespace: + +.. code-block:: python + + import importlib + import pkgutil + + import myapp.plugins + + def iter_namespace(ns_pkg): + # Specifying the second argument (prefix) to iter_modules makes the + # returned name an absolute name instead of a relative one. This allows + # import_module to work without having to do additional modification to + # the name. + return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".") + + discovered_plugins = { + name: importlib.import_module(name) + for finder, name, ispkg + in iter_namespace(myapp.plugins) + } + +Specifying ``myapp.plugins.__path__`` to :func:`~pkgutil.iter_modules` causes +it to only look for the modules directly under that namespace. For example, +if you have installed distributions that provide the modules ``myapp.plugins.a`` +and ``myapp.plugins.b`` then ``discovered_plugins`` in this case would be: + +.. code-block:: python + + { + 'a': , + 'b': , + } + +This sample uses a sub-package as the namespace package (``myapp.plugins``), but +it's also possible to use a top-level package for this purpose (such as +``myapp_plugins``). How to pick the namespace to use is a matter of preference, +but it's not recommended to make your project's main top-level package +(``myapp`` in this case) a namespace package for the purpose of plugins, as one +bad plugin could cause the entire namespace to break which would in turn make +your project unimportable. For the "namespace sub-package" approach to work, +the plugin packages must omit the :file:`__init__.py` for your top-level +package directory (``myapp`` in this case) and include the namespace-package +style :file:`__init__.py` in the namespace sub-package directory +(``myapp/plugins``). This also means that plugins will need to explicitly pass +a list of packages to :func:`setup`'s ``packages`` argument instead of using +:func:`setuptools.find_packages`. + +.. warning:: Namespace packages are a complex feature and there are several + different ways to create them. It's highly recommended to read the + :doc:`packaging-namespace-packages` documentation and clearly document + which approach is preferred for plugins to your project. + +.. _plugin-entry-points: + +Using package metadata +====================== + +Packages can have metadata for plugins described in the :ref:`entry-points`. +By specifying them, a package announces that it contains a specific kind of plugin. +Another package supporting this kind of plugin can use the metadata to discover that plugin. + +For example if you have a package named ``myapp-plugin-a`` and it includes +the following in its ``pyproject.toml``: + +.. code-block:: toml + + [project.entry-points.'myapp.plugins'] + a = 'myapp_plugin_a' + +Then you can discover and load all of the registered entry points by using +:func:`importlib.metadata.entry_points` (or the backport_ +``importlib_metadata >= 3.6`` for Python 3.6-3.9): + +.. code-block:: python + + import sys + if sys.version_info < (3, 10): + from importlib_metadata import entry_points + else: + from importlib.metadata import entry_points + + discovered_plugins = entry_points(group='myapp.plugins') + + +In this example, ``discovered_plugins`` would be a collection of type :class:`importlib.metadata.EntryPoint`: + +.. code-block:: python + + ( + EntryPoint(name='a', value='myapp_plugin_a', group='myapp.plugins'), + ... + ) + +Now the module of your choice can be imported by executing +``discovered_plugins['a'].load()``. + +.. note:: The ``entry_point`` specification in :file:`setup.py` is fairly + flexible and has a lot of options. It's recommended to read over the entire + section on :doc:`entry points ` . + +.. note:: Since this specification is part of the :doc:`standard library + `, most packaging tools other than setuptools + provide support for defining entry points. + +.. _backport: https://importlib-metadata.readthedocs.io/en/latest/ diff --git a/source/guides/creating-command-line-tools.rst b/source/guides/creating-command-line-tools.rst new file mode 100644 index 000000000..8266fffdb --- /dev/null +++ b/source/guides/creating-command-line-tools.rst @@ -0,0 +1,192 @@ +.. _creating-command-line-tools: + +========================================= +Creating and packaging command-line tools +========================================= + +This guide will walk you through creating and packaging a standalone command-line application +that can be installed with :ref:`pipx`, a tool for creating and managing :term:`Python Virtual Environments ` +and exposing the executable scripts of packages (and available manual pages) for use on the command-line. + +Creating the package +==================== + +First of all, create a source tree for the :term:`project `. For the sake of an example, we'll +build a simple tool outputting a greeting (a string) for a person based on arguments given on the command-line. + +.. todo:: Advise on the optimal structure of a Python package in another guide or discussion and link to it here. + +This project will adhere to :ref:`src-layout ` and in the end be alike this file tree, +with the top-level folder and package name ``greetings``: + +:: + + . + ├── pyproject.toml + └── src + └── greetings + ├── cli.py + ├── greet.py + ├── __init__.py + └── __main__.py + +The actual code responsible for the tool's functionality will be stored in the file :file:`greet.py`, +named after the main module: + +.. code-block:: python + + import typer + from typing_extensions import Annotated + + + def greet( + name: Annotated[str, typer.Argument(help="The (last, if --gender is given) name of the person to greet")] = "", + gender: Annotated[str, typer.Option(help="The gender of the person to greet")] = "", + knight: Annotated[bool, typer.Option(help="Whether the person is a knight")] = False, + count: Annotated[int, typer.Option(help="Number of times to greet the person")] = 1 + ): + greeting = "Greetings, dear " + masculine = gender == "masculine" + feminine = gender == "feminine" + if gender or knight: + salutation = "" + if knight: + salutation = "Sir " + elif masculine: + salutation = "Mr. " + elif feminine: + salutation = "Ms. " + greeting += salutation + if name: + greeting += f"{name}!" + else: + pronoun = "her" if feminine else "his" if masculine or knight else "its" + greeting += f"what's-{pronoun}-name" + else: + if name: + greeting += f"{name}!" + elif not gender: + greeting += "friend!" + for i in range(0, count): + print(greeting) + +The above function receives several keyword arguments that determine how the greeting to output is constructed. +Now, construct the command-line interface to provision it with the same, which is done +in :file:`cli.py`: + +.. code-block:: python + + import typer + + from .greet import greet + + + app = typer.Typer() + app.command()(greet) + + + if __name__ == "__main__": + app() + +The command-line interface is built with typer_, an easy-to-use CLI parser based on Python type hints. It provides +auto-completion and nicely styled command-line help out of the box. Another option would be :py:mod:`argparse`, +a command-line parser which is included in Python's standard library. It is sufficient for most needs, but requires +a lot of code, usually in ``cli.py``, to function properly. Alternatively, docopt_ makes it possible to create CLI +interfaces based solely on docstrings; advanced users are encouraged to make use of click_ (on which ``typer`` is based). + +Now, add an empty :file:`__init__.py` file, to define the project as a regular :term:`import package `. + +The file :file:`__main__.py` marks the main entry point for the application when running it via :mod:`runpy` +(i.e. ``python -m greetings``, which works immediately with flat layout, but requires installation of the package with src layout), +so initialize the command-line interface here: + +.. code-block:: python + + if __name__ == "__main__": + from greetings.cli import app + app() + +.. note:: + + In order to enable calling the command-line interface directly from the :term:`source tree `, + i.e. as ``python src/greetings``, a certain hack could be placed in this file; read more at + :ref:`running-cli-from-source-src-layout`. + + +``pyproject.toml`` +------------------ + +The project's :term:`metadata ` is placed in :term:`pyproject.toml`. The :term:`pyproject metadata keys ` and the ``[build-system]`` table may be filled in as described in :ref:`writing-pyproject-toml`, adding a dependency +on ``typer`` (this tutorial uses version *0.12.3*). + +For the project to be recognised as a command-line tool, additionally a ``console_scripts`` :ref:`entry point ` (see :ref:`console_scripts`) needs to be added as a :term:`subkey `: + +.. code-block:: toml + + [project.scripts] + greet = "greetings.cli:app" + +Now, the project's source tree is ready to be transformed into a :term:`distribution package `, +which makes it installable. + + +Installing the package with ``pipx`` +==================================== + +After installing ``pipx`` as described in :ref:`installing-stand-alone-command-line-tools`, install your project: + +.. code-block:: console + + $ cd path/to/greetings/ + $ pipx install . + +This will expose the executable script we defined as an entry point and make the command ``greet`` available. +Let's test it: + +.. code-block:: console + + $ greet --knight Lancelot + Greetings, dear Sir Lancelot! + $ greet --gender feminine Parks + Greetings, dear Ms. Parks! + $ greet --gender masculine + Greetings, dear Mr. what's-his-name! + +Since this example uses ``typer``, you could now also get an overview of the program's usage by calling it with +the ``--help`` option, or configure completions via the ``--install-completion`` option. + +To just run the program without installing it permanently, use ``pipx run``, which will create a temporary +(but cached) virtual environment for it: + +.. code-block:: console + + $ pipx run --spec . greet --knight + +This syntax is a bit impractical, however; as the name of the entry point we defined above does not match the package name, +we need to state explicitly which executable script to run (even though there is only on in existence). + +There is, however, a more practical solution to this problem, in the form of an entry point specific to ``pipx run``. +The same can be defined as follows in :file:`pyproject.toml`: + +.. code-block:: toml + + [project.entry-points."pipx.run"] + greetings = "greetings.cli:app" + + +Thanks to this entry point (which *must* match the package name), ``pipx`` will pick up the executable script as the +default one and run it, which makes this command possible: + +.. code-block:: console + + $ pipx run . --knight + +Conclusion +========== + +You know by now how to package a command-line application written in Python. A further step could be to distribute your package, +meaning uploading it to a :term:`package index `, most commonly :term:`PyPI `. To do that, follow the instructions at :ref:`Packaging your project`. And once you're done, don't forget to :ref:`do some research ` on how your package is received! + +.. _click: https://click.palletsprojects.com/ +.. _docopt: https://docopt.readthedocs.io/en/latest/ +.. _typer: https://typer.tiangolo.com/ diff --git a/source/guides/distributing-packages-using-setuptools.rst b/source/guides/distributing-packages-using-setuptools.rst new file mode 100644 index 000000000..bf4227aae --- /dev/null +++ b/source/guides/distributing-packages-using-setuptools.rst @@ -0,0 +1,592 @@ +.. _distributing-packages: + +=================================== +Packaging and distributing projects +=================================== + +:Page Status: Outdated +:Last Reviewed: 2023-12-14 + +This section covers some additional details on configuring, packaging and +distributing Python projects with ``setuptools`` that aren't covered by the +introductory tutorial in :doc:`/tutorials/packaging-projects`. It still assumes +that you are already familiar with the contents of the +:doc:`/tutorials/installing-packages` page. + +The section does *not* aim to cover best practices for Python project +development as a whole. For example, it does not provide guidance or tool +recommendations for version control, documentation, or testing. + +For more reference material, see :std:doc:`Building and Distributing +Packages ` in the :ref:`setuptools` docs, but note +that some advisory content there may be outdated. In the event of +conflicts, prefer the advice in the Python Packaging User Guide. + + + +Requirements for packaging and distributing +=========================================== +1. First, make sure you have already fulfilled the :ref:`requirements for + installing packages `. + +2. Install "twine" [1]_: + + .. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install twine + + .. tab:: Windows + + .. code-block:: bat + + py -m pip install twine + + You'll need this to upload your project :term:`distributions ` to :term:`PyPI ` (see :ref:`below + `). + + +Configuring your project +======================== + + +Initial files +------------- + +setup.py +~~~~~~~~ + +The most important file is :file:`setup.py` which exists at the root of your +project directory. For an example, see the `setup.py +`_ in the `PyPA +sample project `_. + +:file:`setup.py` serves two primary functions: + +1. It's the file where various aspects of your project are configured. The + primary feature of :file:`setup.py` is that it contains a global ``setup()`` + function. The keyword arguments to this function are how specific details + of your project are defined. The most relevant arguments are explained in + :ref:`the section below `. + +2. It's the command line interface for running various commands that + relate to packaging tasks. To get a listing of available commands, run + ``python3 setup.py --help-commands``. + + +setup.cfg +~~~~~~~~~ + +:file:`setup.cfg` is an ini file that contains option defaults for +:file:`setup.py` commands. For an example, see the `setup.cfg +`_ in the `PyPA +sample project `_. + + +README.rst / README.md +~~~~~~~~~~~~~~~~~~~~~~ + +All projects should contain a readme file that covers the goal of the project. +The most common format is `reStructuredText +`_ with an "rst" extension, although +this is not a requirement; multiple variants of `Markdown +`_ are supported as well (look +at ``setup()``'s :ref:`long_description_content_type ` argument). + +For an example, see `README.md +`_ from the `PyPA +sample project `_. + +.. note:: Projects using :ref:`setuptools` 0.6.27+ have standard readme files + (:file:`README.rst`, :file:`README.txt`, or :file:`README`) included in + source distributions by default. The built-in :ref:`distutils` library adopts + this behavior beginning in Python 3.7. Additionally, :ref:`setuptools` + 36.4.0+ will include a :file:`README.md` if found. If you are using + setuptools, you don't need to list your readme file in :file:`MANIFEST.in`. + Otherwise, include it to be explicit. + +MANIFEST.in +~~~~~~~~~~~ + +A :file:`MANIFEST.in` is needed when you need to package additional files that +are not automatically included in a source distribution. For details on +writing a :file:`MANIFEST.in` file, including a list of what's included by +default, see ":ref:`Using MANIFEST.in`". + +However, you may not have to use a :file:`MANIFEST.in`. For an example, the `PyPA +sample project `_ has removed its manifest +file, since all the necessary files have been included by :ref:`setuptools` 43.0.0 +and newer. + +.. note:: :file:`MANIFEST.in` does not affect binary distributions such as wheels. + +LICENSE.txt +~~~~~~~~~~~ + +Every package should include a license file detailing the terms of +distribution. In many jurisdictions, packages without an explicit license can +not be legally used or distributed by anyone other than the copyright holder. +If you're unsure which license to choose, you can use resources such as +`GitHub's Choose a License `_ or consult a lawyer. + +For an example, see the `LICENSE.txt +`_ from the `PyPA +sample project `_. + + +~~~~~~~~~~~~~~ + +Although it's not required, the most common practice is to include your +Python modules and packages under a single top-level package that has the same +:ref:`name ` as your project, or something very close. + +For an example, see the `sample +`_ package that's +included in the `PyPA sample project `_. + + +.. _`setup() args`: + +setup() args +------------ + +As mentioned above, the primary feature of :file:`setup.py` is that it contains +a global ``setup()`` function. The keyword arguments to this function are how +specific details of your project are defined. + +Some are temporarily explained below until their information is moved elsewhere. +The full list can be found :doc:`in the setuptools documentation +`. + +Most of the snippets given are +taken from the `setup.py +`_ contained in the +`PyPA sample project `_. + + + +See :ref:`Choosing a versioning scheme` for more information on ways to use versions to convey +compatibility information to your users. + + + + +``packages`` +~~~~~~~~~~~~ + +:: + + packages=find_packages(include=['sample', 'sample.*']), + +Set ``packages`` to a list of all :term:`packages ` in your +project, including their subpackages, sub-subpackages, etc. Although the +packages can be listed manually, ``setuptools.find_packages()`` finds them +automatically. Use the ``include`` keyword argument to find only the given +packages. Use the ``exclude`` keyword argument to omit packages that are not +intended to be released and installed. + + +``py_modules`` +~~~~~~~~~~~~~~ + +:: + + py_modules=["six"], + +If your project contains any single-file Python modules that aren't part of a +package, set ``py_modules`` to a list of the names of the modules (minus the +``.py`` extension) in order to make :ref:`setuptools` aware of them. + + +``install_requires`` +~~~~~~~~~~~~~~~~~~~~ + +:: + + install_requires=['peppercorn'], + +"install_requires" should be used to specify what dependencies a project +minimally needs to run. When the project is installed by :ref:`pip`, this is the +specification that is used to install its dependencies. + +For more on using "install_requires" see :ref:`install_requires vs Requirements files`. + + + +.. _`Package Data`: + +``package_data`` +~~~~~~~~~~~~~~~~ + +:: + + package_data={ + 'sample': ['package_data.dat'], + }, + + +Often, additional files need to be installed into a :term:`package `. These files are often data that’s closely related to the package’s +implementation, or text files containing documentation that might be of interest +to programmers using the package. These files are called "package data". + +The value must be a mapping from package name to a list of relative path names +that should be copied into the package. The paths are interpreted as relative to +the directory containing the package. + +For more information, see :std:doc:`Including Data Files +` from the +:std:doc:`setuptools docs `. + + +.. _`Data Files`: + +``data_files`` +~~~~~~~~~~~~~~ + +:: + + data_files=[('my_data', ['data/data_file'])], + +Although configuring :ref:`Package Data` is sufficient for most needs, in some +cases you may need to place data files *outside* of your :term:`packages +`. The ``data_files`` directive allows you to do that. +It is mostly useful if you need to install files which are used by other +programs, which may be unaware of Python packages. + +Each ``(directory, files)`` pair in the sequence specifies the installation +directory and the files to install there. The ``directory`` must be a relative +path (although this may change in the future, see +`wheel Issue #92 `_), +and it is interpreted relative to the installation prefix +(Python’s ``sys.prefix`` for a default installation; +``site.USER_BASE`` for a user installation). +Each file name in ``files`` is interpreted relative to the :file:`setup.py` +script at the top of the project source distribution. + +For more information see the distutils section on :ref:`Installing Additional Files +`. + +.. note:: + + When installing packages as egg, ``data_files`` is not supported. + So, if your project uses :ref:`setuptools`, you must use ``pip`` + to install it. Alternatively, if you must use ``python setup.py``, + then you need to pass the ``--old-and-unmanageable`` option. + + +``scripts`` +~~~~~~~~~~~ + +Although ``setup()`` supports a :ref:`scripts +` +keyword for pointing to pre-made scripts to install, the recommended approach to +achieve cross-platform compatibility is to use :ref:`console_scripts` entry +points (see below). + + +Choosing a versioning scheme +---------------------------- + +See :ref:`versioning` for information on common version schemes and how to +choose between them. + + +Working in "development mode" +============================= + +You can install a project in "editable" +or "develop" mode while you're working on it. +When installed as editable, a project can be +edited in-place without reinstallation: +changes to Python source files in projects installed as editable will be reflected the next time an interpreter process is started. + +To install a Python package in "editable"/"development" mode +Change directory to the root of the project directory and run: + +.. code-block:: bash + + python3 -m pip install -e . + + +The pip command-line flag ``-e`` is short for ``--editable``, and ``.`` refers +to the current working directory, so together, it means to install the current +directory (i.e. your project) in editable mode. This will also install any +dependencies declared with ``install_requires`` and any scripts declared with +``console_scripts``. Dependencies will be installed in the usual, non-editable +mode. + +You may want to install some of your dependencies in editable +mode as well. For example, supposing your project requires "foo" and "bar", but +you want "bar" installed from VCS in editable mode, then you could construct a +requirements file like so:: + + -e . + -e bar @ git+https://somerepo/bar.git + +The first line says to install your project and any dependencies. The second +line overrides the "bar" dependency, such that it's fulfilled from VCS, not +PyPI. + +If, however, you want "bar" installed from a local directory in editable mode, the requirements file should look like this, with the local paths at the top of the file:: + + -e /path/to/project/bar + -e . + +Otherwise, the dependency will be fulfilled from PyPI, due to the installation order of the requirements file. For more on requirements files, see the :ref:`Requirements File +` section in the pip docs. For more on VCS installs, +see the :ref:`VCS Support ` section of the pip docs. + +Lastly, if you don't want to install any dependencies at all, you can run: + +.. code-block:: bash + + python3 -m pip install -e . --no-deps + + +For more information, see the +:doc:`Development Mode ` section +of the :ref:`setuptools` docs. + +.. _`Packaging your project`: + +Packaging your project +====================== + +To have your project installable from a :term:`Package Index` like :term:`PyPI +`, you'll need to create a :term:`Distribution +` (aka ":term:`Package `") for your +project. + +Before you can build wheels and sdists for your project, you'll need to install the +``build`` package: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install build + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install build + + +Source distributions +-------------------- + +Minimally, you should create a :term:`Source Distribution `: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m build --sdist + +.. tab:: Windows + + .. code-block:: bat + + py -m build --sdist + + +A "source distribution" is unbuilt (i.e. it's not a :term:`Built +Distribution`), and requires a build step when installed by pip. Even if the +distribution is pure Python (i.e. contains no extensions), it still involves a +build step to build out the installation metadata from :file:`setup.py` and/or +:file:`setup.cfg`. + + +Wheels +------ + +You should also create a wheel for your project. A wheel is a :term:`built +package ` that can be installed without needing to go +through the "build" process. Installing wheels is substantially faster for the +end user than installing from a source distribution. + +If your project is pure Python then you'll be creating a +:ref:`"Pure Python Wheel" (see section below) `. + +If your project contains compiled extensions, then you'll be creating what's +called a :ref:`*Platform Wheel* (see section below) `. + +.. note:: If your project also supports Python 2 *and* contains no C extensions, + then you should create what's called a *Universal Wheel* by adding the + following to your :file:`setup.cfg` file: + + .. code-block:: text + + [bdist_wheel] + universal=1 + + Only use this setting if your project does not have any C extensions *and* + supports Python 2 and 3. + + +.. _`Pure Python Wheels`: + +Pure Python Wheels +~~~~~~~~~~~~~~~~~~ + +*Pure Python Wheels* contain no compiled extensions, and therefore only require a +single Python wheel. + +To build the wheel: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m build --wheel + +.. tab:: Windows + + .. code-block:: bat + + py -m build --wheel + +The ``wheel`` package will detect that the code is pure Python, and build a +wheel that's named such that it's usable on any Python 3 installation. For +details on the naming of wheel files, see :pep:`425`. + +If you run ``build`` without ``--wheel`` or ``--sdist``, it will build both +files for you; this is useful when you don't need multiple wheels. + +.. _`Platform Wheels`: + +Platform Wheels +~~~~~~~~~~~~~~~ + +*Platform Wheels* are wheels that are specific to a certain platform like Linux, +macOS, or Windows, usually due to containing compiled extensions. + +To build the wheel: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m build --wheel + +.. tab:: Windows + + .. code-block:: bat + + py -m build --wheel + + +The ``wheel`` package will detect that the code is not pure Python, and build +a wheel that's named such that it's only usable on the platform that it was +built on. For details on the naming of wheel files, see :pep:`425`. + +.. note:: + + :term:`PyPI ` currently supports uploads of + platform wheels for Windows, macOS, and the multi-distro ``manylinux*`` ABI. + Details of the latter are defined in :pep:`513`. + + +.. _`Uploading your Project to PyPI`: + +Uploading your Project to PyPI +============================== + +When you ran the command to create your distribution, a new directory ``dist/`` +was created under your project's root directory. That's where you'll find your +distribution file(s) to upload. + +.. note:: These files are only created when you run the command to create your + distribution. This means that any time you change the source of your project + or the configuration in your :file:`setup.py` file, you will need to rebuild + these files again before you can distribute the changes to PyPI. + +.. note:: Before releasing on main PyPI repo, you might prefer + training with the `PyPI test site `_ which + is cleaned on a semi regular basis. See :ref:`using-test-pypi` on + how to setup your configuration in order to use it. + +.. warning:: In other resources you may encounter references to using + ``python setup.py register`` and ``python setup.py upload``. These methods + of registering and uploading a package are **strongly discouraged** as it may + use a plaintext HTTP or unverified HTTPS connection on some Python versions, + allowing your username and password to be intercepted during transmission. + +.. tip:: The reStructuredText parser used on PyPI is **not** Sphinx! + Furthermore, to ensure safety of all users, certain kinds of URLs and + directives are forbidden or stripped out (e.g., the ``.. raw::`` + directive). **Before** trying to upload your distribution, you should check + to see if your brief / long descriptions provided in :file:`setup.py` are + valid. You can do this by running :std:doc:`twine check ` on + your package files: + + .. code-block:: bash + + twine check dist/* + +Create an account +----------------- + +First, you need a :term:`PyPI ` user account. You +can create an account +`using the form on the PyPI website `_. + +Now you'll create a PyPI `API token`_ so you will be able to securely upload +your project. + +Go to https://pypi.org/manage/account/#api-tokens and create a new +`API token`_; don't limit its scope to a particular project, since you +are creating a new project. + +**Don't close the page until you have copied and saved the token — you +won't see that token again.** + +.. Note:: To avoid having to copy and paste the token every time you + upload, you can create a :file:`$HOME/.pypirc` file: + + .. code-block:: text + + [pypi] + username = __token__ + password = + + **Be aware that this stores your token in plaintext.** + + For more details, see the :ref:`specification ` for :file:`.pypirc`. + +.. _register-your-project: +.. _API token: https://pypi.org/help/#apitoken + +Upload your distributions +------------------------- + +Once you have an account you can upload your distributions to +:term:`PyPI ` using :ref:`twine`. + +The process for uploading a release is the same regardless of whether +or not the project already exists on PyPI - if it doesn't exist yet, +it will be automatically created when the first release is uploaded. + +For the second and subsequent releases, PyPI only requires that the +version number of the new release differ from any previous releases. + +.. code-block:: bash + + twine upload dist/* + +You can see if your package has successfully uploaded by navigating to the URL +``https://pypi.org/project/`` where ``sampleproject`` is +the name of your project that you uploaded. It may take a minute or two for +your project to appear on the site. + +---- + +.. [1] Depending on your platform, this may require root or Administrator + access. :ref:`pip` is currently considering changing this by `making user + installs the default behavior + `_. diff --git a/source/guides/dropping-older-python-versions.rst b/source/guides/dropping-older-python-versions.rst new file mode 100644 index 000000000..267d7b923 --- /dev/null +++ b/source/guides/dropping-older-python-versions.rst @@ -0,0 +1,138 @@ +.. _`Dropping support for older Python versions`: + +========================================== +Dropping support for older Python versions +========================================== + +The ability to drop support for older Python versions is enabled by the standard :ref:`core-metadata` 1.2 specification via the :ref:`"Requires-Python" ` attribute. + +Metadata 1.2+ installers, such as Pip, will adhere to this specification by matching the current Python runtime and comparing it with the required version +in the package metadata. If they do not match, it will attempt to install the last package distribution that supported that Python runtime. + +This mechanism can be used to drop support for older Python versions, by amending the ``Requires-Python`` attribute in the package metadata. + +Requirements +------------ + +This workflow requires that the user installing the package uses Pip [#]_, or another installer that supports the Metadata 1.2 specification. + +Dealing with the universal wheels +--------------------------------- + +Traditionally, :ref:`setuptools` projects providing Python code that is semantically +compatible with both Python 2 and Python 3, produce :term:`wheels +` that have a ``py2.py3`` tag in their names. When dropping +support for Python 2, it is important not to forget to change this tag +to just ``py3``. It is often configured within :file:`setup.cfg` under +the ``[bdist_wheel]`` section by setting ``universal = 1``. + +If you use this method, either remove this option or section, or +explicitly set ``universal`` to ``0``: + +.. code-block:: ini + + # setup.cfg + + [bdist_wheel] + universal = 0 # Make the generated wheels have "py3" tag + +.. hint:: + + Regarding :ref:`deprecated ` direct ``setup.py`` invocations, + passing the ``--universal`` flag on the command line could override this setting. + +Defining the Python version required +------------------------------------ + +1. Install twine +~~~~~~~~~~~~~~~~ + +Ensure that you have twine available at its latest version. +Steps: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade twine + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade twine + +2. Specify the version ranges for supported Python distributions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Set the version ranges declaring which Python distributions are supported +within your project's :file:`pyproject.toml`. The :ref:`requires-python` configuration field +corresponds to the :ref:`Requires-Python ` core metadata field: + +.. code-block:: toml + + [build-system] + ... + + [project] + requires-python = ">= 3.8" # At least Python 3.8 + +You can specify version ranges and exclusion rules (complying with the :ref:`version-specifiers` specification), +such as at least Python 3.9. Or, at least Python 3.7 and beyond, skipping the 3.7.0 and 3.7.1 point releases: + +.. code-block:: toml + + requires-python = ">= 3.9" + requires-python = ">= 3.7, != 3.7.0, != 3.7.1" + + +If using the :ref:`setuptools` build backend, consult the `dependency-management`_ documentation for more options. + +.. caution:: + Avoid adding upper bounds to the version ranges, e. g. ``">= 3.8, < 3.10"``. Doing so can cause different errors + and version conflicts. See the `discourse-discussion`_ for more information. + +3. Validating the Metadata before publishing +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Within a Python source package (the zip or the tar-gz file you download) is a text file called PKG-INFO. + +This file is generated by the :term:`build backend ` when it generates the source package. +The file contains a set of keys and values, the list of keys is part of the PyPA standard metadata format. + +You can see the contents of the generated file like this: + +.. code-block:: bash + + tar xfO dist/my-package-1.0.0.tar.gz my-package-1.0.0/PKG-INFO + +Validate that the following is in place, before publishing the package: + +- If you have upgraded correctly, the ``Metadata-Version`` value should be 1.2 or higher. +- The ``Requires-Python`` field is set and matches your specification in the configuration file. + +4. Publishing the package +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Proceed as suggested in :ref:`Uploading your Project to PyPI`. + +Dropping a Python version +------------------------- + +In principle, at least metadata support for Python versions should be kept as long as possible, because +once that has been dropped, people still depending on a version will be forced to downgrade. +If however supporting a specific version becomes a blocker for a new feature or other issues occur, the metadata +``Requires-Python`` should be amended. Of course this also depends on whether the project needs to be stable and +well-covered for a wider range of users. + +Each version compatibility change should have its own release. + +.. tip:: + + When dropping a Python version, it might also be rewarding to upgrade the project's code syntax generally, apart from updating the versions used in visible places (like the testing environment). Tools like pyupgrade_ or `ruff `_ can automate some of this work. + +.. _discourse-discussion: https://discuss.python.org/t/requires-python-upper-limits/12663 +.. _pyupgrade: https://pypi.org/project/pyupgrade/ +.. _dependency-management: https://setuptools.pypa.io/en/latest/userguide/dependency_management.html#python-requirement + +.. [#] Support for the Metadata 1.2 specification has been added in Pip 9.0. diff --git a/source/guides/github-actions-ci-cd-sample/publish-to-test-pypi.yml b/source/guides/github-actions-ci-cd-sample/publish-to-test-pypi.yml new file mode 100644 index 000000000..8813a0392 --- /dev/null +++ b/source/guides/github-actions-ci-cd-sample/publish-to-test-pypi.yml @@ -0,0 +1,76 @@ +name: Publish Python 🐍 distribution 📦 to PyPI and TestPyPI + +on: push + +jobs: + build: + name: Build distribution 📦 + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install pypa/build + run: >- + python3 -m + pip install + build + --user + - name: Build a binary wheel and a source tarball + run: python3 -m build + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: python-package-distributions + path: dist/ + + publish-to-pypi: + name: >- + Publish Python 🐍 distribution 📦 to PyPI + if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes + needs: + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/ # Replace with your PyPI project name + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + publish-to-testpypi: + name: Publish Python 🐍 distribution 📦 to TestPyPI + needs: + - build + runs-on: ubuntu-latest + + environment: + name: testpypi + url: https://test.pypi.org/p/ + + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ diff --git a/source/guides/hosting-your-own-index.rst b/source/guides/hosting-your-own-index.rst new file mode 100644 index 000000000..ebcd228ee --- /dev/null +++ b/source/guides/hosting-your-own-index.rst @@ -0,0 +1,137 @@ +.. _`Hosting your Own Simple Repository`: + +================================== +Hosting your own simple repository +================================== + + +If you wish to host your own simple repository [1]_, you can either use a +software package like :doc:`devpi ` or you can simply create the proper +directory structure and use any web server that can serve static files and +generate an autoindex. + +In either case, since you'll be hosting a repository that is likely not in +your user's default repositories, you should instruct them in your project's +description to configure their installer appropriately. For example with pip: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --extra-index-url https://python.example.com/ foobar + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --extra-index-url https://python.example.com/ foobar + +In addition, it is **highly** recommended that you serve your repository with +valid HTTPS. At this time, the security of your user's installations depends on +all repositories using a valid HTTPS setup. + + +"Manual" repository +=================== + +The directory layout is fairly simple, within a root directory you need to +create a directory for each project. This directory should be the :ref:`normalized name ` of the project. Within each of these directories +simply place each of the downloadable files. If you have the projects "Foo" +(with the versions 1.0 and 2.0) and "bar" (with the version 0.1) You should +end up with a structure that looks like:: + + . + ├── bar + │   └── bar-0.1.tar.gz + └── foo + ├── Foo-1.0.tar.gz + └── Foo-2.0.tar.gz + +Once you have this layout, simply configure your webserver to serve the root +directory with autoindex enabled. For an example using the built in Web server +in `Twisted`_, you would simply run ``twistd -n web --path .`` and then +instruct users to add the URL to their installer's configuration. + + +Existing projects +================= + +.. list-table:: + :header-rows: 1 + + * - Project + - Package upload + - PyPI fall-through [2]_ + - Additional notes + + * - :ref:`devpi` + - ✔ + - ✔ + - multiple indexes with inheritance, with syncing, replication, fail-over; + mirroring + + * - :ref:`simpleindex` + - + - ✔ + - + + * - :ref:`pypiserver` + - ✔ + - + - + + * - :ref:`pypiprivate` + - + - + - + + * - :ref:`pypicloud` + - + - + - unmaintained; also cached proxying; authentication, authorisation + + * - :ref:`pywharf` + - + - + - unmaintained; serve files in GitHub + + * - :ref:`pulppython` + - ✔ + - + - also mirroring, proxying; plugin for Pulp + + * - :ref:`pip2pi` + - + - + - also mirroring; manual synchronisation + + * - :ref:`dumb-pypi` + - + - + - not a server, but a static file site generator + + * - :ref:`httpserver` + - + - + - standard-library + + * - `Apache `_ + - + - ✔ + - using + `mod_rewrite + `_ + and + `mod_cache_disk + `_, + you can cache requests to package indexes through an Apache server + +---- + +.. [1] For complete documentation of the simple repository protocol, see + :ref:`simple repository API `. + +.. [2] Can be configured to fall back to PyPI (or another package index) + if a requested package is missing. + +.. _Twisted: https://twistedmatrix.com/ diff --git a/source/guides/index-mirrors-and-caches.rst b/source/guides/index-mirrors-and-caches.rst new file mode 100644 index 000000000..f3b7bd243 --- /dev/null +++ b/source/guides/index-mirrors-and-caches.rst @@ -0,0 +1,117 @@ +.. _`PyPI mirrors and caches`: + +================================ +Package index mirrors and caches +================================ + +:Page Status: Incomplete +:Last Reviewed: 2023-11-08 + +Mirroring or caching of PyPI (and other +:term:`package indexes `) can be used to speed up local +package installation, +allow offline work, handle corporate firewalls or just plain Internet flakiness. + +There are multiple classes of options in this area: + +1. local/hosted caching of package indexes. + +2. local/hosted mirroring of a package index. A mirror is a (whole or + partial) copy of a package index, which can be used in place of the + original index. + +3. private package index with fall-through to public package indexes (for + example, to mitigate dependency confusion attacks), also known as a + proxy. + + +Caching with pip +---------------- + +pip provides a number of facilities for speeding up installation by using local +cached copies of :term:`packages `: + +1. :ref:`Fast & local installs ` + by downloading all the requirements for a project and then pointing pip at + those downloaded files instead of going to PyPI. +2. A variation on the above which pre-builds the installation files for + the requirements using :ref:`python3 -m pip wheel `: + + .. code-block:: bash + + python3 -m pip wheel --wheel-dir=/tmp/wheelhouse SomeProject + python3 -m pip install --no-index --find-links=/tmp/wheelhouse SomeProject + + +Existing projects +----------------- + +.. list-table:: + :header-rows: 1 + + * - Project + - Cache + - Mirror + - Proxy + - Additional notes + + * - :ref:`devpi` + - ✔ + - ✔ + - + - multiple indexes with inheritance; syncing, replication, fail-over; + package upload + + * - :ref:`bandersnatch` + - ✔ + - ✔ + - + - + + * - :ref:`simpleindex` + - + - + - ✔ + - custom plugin enables caching; re-routing to other package indexes + + * - :ref:`pypicloud` + - ✔ + - + - ✔ + - unmaintained; authentication, authorisation + + * - :ref:`pulppython` + - + - ✔ + - ✔ + - plugin for Pulp; multiple proxied indexes; package upload + + * - :ref:`proxpi` + - ✔ + - + - ✔ + - multiple proxied indexes + + * - :ref:`nginx_pypi_cache` + - ✔ + - + - ✔ + - multiple proxied indexes + + * - :ref:`flaskpypiproxy` + - ✔ + - + - ✔ + - unmaintained + + * - `Apache `_ + - ✔ + - + - ✔ + - using + `mod_rewrite + `_ + and + `mod_cache_disk + `_, + you can cache requests to package indexes through an Apache server diff --git a/source/guides/index.rst b/source/guides/index.rst new file mode 100644 index 000000000..b87d0b1a8 --- /dev/null +++ b/source/guides/index.rst @@ -0,0 +1,15 @@ +Guides +###### + +**Guides** are focused on accomplishing a specific task and assume that you are +already familiar with the basics of Python packaging. If you're looking for an +introduction to packaging, see :doc:`/tutorials/index`. + +.. toctree:: + :titlesonly: + + section-install + section-build-and-publish + section-hosting + tool-recommendations + analyzing-pypi-package-downloads diff --git a/source/science.rst b/source/guides/installing-scientific-packages.rst similarity index 50% rename from source/science.rst rename to source/guides/installing-scientific-packages.rst index 0e31fbaa0..a1aeae567 100644 --- a/source/science.rst +++ b/source/guides/installing-scientific-packages.rst @@ -1,44 +1,31 @@ .. _`NumPy and the Science Stack`: ============================== -Installing Scientific Packages +Installing scientific packages ============================== -:Page Status: Incomplete -:Last Reviewed: 2014-07-24 - -.. contents:: Contents - :local: - Scientific software tends to have more complex dependencies than most, and it will often have multiple build options to take advantage of different kinds of hardware, or to interoperate with different pieces of external software. -In particular, `NumPy `__, which provides the basis +In particular, `NumPy `__, which provides the basis for most of the software in the `scientific Python stack -`__ can be configured +`_ can be configured to interoperate with different FORTRAN libraries, and can take advantage -of different levels of vectorised instructions available in modern CPUs. - -Unfortunately, as of December 2013, given NumPy's current build and -distribution model, the standard tools currently aren't quite up to the -task of distributing pre-built NumPy binaries, as most users aren't going -to know which version they need, and the ``wheel`` format currently doesn't -allow the installer to make that decision on the user's behalf at install -time. +of different levels of vectorized instructions available in modern CPUs. -It is expected that this situation will eventually be resolved either by -future iterations of the standard tools providing full support for the -intricacies of NumPy's current build and distribution process, or by the -NumPy developers choosing one build variant as the "lowest acceptable -common denominator" and publishing that as a wheel file on PyPI. +Starting with version 1.10.4 of NumPy and version 1.0.0 of SciPy, pre-built +32-bit and 64-bit binaries in the ``wheel`` format are available for all major +operating systems (Windows, macOS, and Linux) on PyPI. Note, however, that on +Windows, NumPy binaries are linked against the `ATLAS +`__ BLAS/LAPACK library, restricted to SSE2 +instructions, so they may not provide optimal linear algebra performance. -In the meantime, however, there are a number of alternative options for -obtaining scientific Python libraries (or any other Python libraries that -require a compilation environment to install from source and don't provide -pre-built wheel files on PyPI). +There are a number of alternative options for obtaining scientific Python +libraries (or any other Python libraries that require a compilation environment +to install from source and don't provide pre-built wheel files on PyPI). Building from source @@ -74,39 +61,36 @@ up a suitable environment to build extensions locally. The extensions provided in these installers are typically compatible with the CPython Windows installers published on python.org. -For projects which don't provide their own Windows installers (and even -some which do), Christoph Gohlke at the University of California provides -a `collection of Windows installers -`__. Many Python users on -Windows have reported a positive experience with these prebuilt versions. - As with Linux system packages, the Windows installers will only install into a system Python installation - they do not support installation in virtual environments. Allowing access to distributions installed into the system Python when using virtual environments is a common approach to working around this limitation. -The `wheel` project also provides a `wheel convert` subcommand that can -convert a Windows `bdist_wininst` installer to a wheel. +The :term:`Wheel` project also provides a :command:`wheel convert` subcommand that can +convert a Windows :command:`bdist_wininst` installer to a wheel. -Mac OS X installers and package managers ----------------------------------------- +.. preserve old links to this heading +.. _mac-os-x-installers-and-package-managers: + +macOS installers and package managers +------------------------------------- Similar to the situation on Windows, many projects (including NumPy) publish -Mac OS X installers that are compatible with the Mac OS X CPython binaries +macOS installers that are compatible with the macOS CPython binaries published on python.org. -Mac OS X users also have access to Linux distribution style package managers -such as ``MacPorts``. The SciPy site has more details on using MacPorts to -install the `scientific Python stack -`__ +macOS users also have access to Linux distribution style package managers +such as ``Homebrew``. The SciPy site has more details on using Homebrew to +`install SciPy on macOS `_. SciPy distributions ------------------- The SciPy site lists `several distributions -`__ that provide the full SciPy stack to +`_ +that provide the full SciPy stack to end users in an easy to use and update format. Some of these distributions may not be compatible with the standard ``pip`` @@ -114,11 +98,11 @@ and ``virtualenv`` based toolchain. Spack ------ -`Spack `_ is a flexible package manager +`Spack `_ is a flexible package manager designed to support multiple versions, configurations, platforms, and compilers. It was built to support the needs of large supercomputing centers and scientific application teams, who must often build software many different ways. -Spack is not limited to Python; it can install packages for ``C``, ``C++``, +Spack is not limited to Python; it can install packages for ``C``, ``C++``, ``Fortran``, ``R``, and other languages. It is non-destructive; installing a new version of one package does not break existing installations, so many configurations can coexist on the same system. @@ -134,21 +118,22 @@ be loaded and unloaded from the user's environment. The conda cross-platform package manager ---------------------------------------- -`Anaconda `__ is a Python -distribution published by Continuum Analytics. It is a stable collection of -Open Source packages for big data and scientific use. About 100 are -installed with Anaconda 2.2, and a total of 279 can be installed and -updated from the Anaconda repository. - -``conda`` an open source (BSD licensed) package management system and -environment management system included in Anaconda that allows users to -install multiple versions of binary software packages and their dependencies, -and easily switch between them. It is a cross-platform tool working on Windows, -OSX, and Linux. Conda can be used to package up and distribute all kinds of -packages, it is not limited to just Python packages. It has full support -for native virtual environments. Conda makes environments first-class citizens, -making it easy to create independent environments even for C libraries. It is -written in Python, but is Python-agnostic. Conda manages python itself as a -package, so that `conda update python` is possible, in contrast to pip, which only -manages Python packages. Conda is available in Anaconda and Miniconda -(an easy-to-install download with just Python and conda). +``conda`` is an open source (BSD licensed) package management system and +environment management system that allows users to install +multiple versions of binary software packages and their dependencies, and +easily switch between them. It is a cross-platform tool working on Windows, +MacOS, and Linux. Conda can be used to package up and distribute all kinds of +packages, it is not limited to just Python packages. It has full support for +native virtual environments. Conda makes environments first-class citizens, +making it easy to create independent environments even for C libraries. It is +written in Python, but is Python-agnostic. Conda manages Python itself as a +package, so that :command:`conda update python` is possible, in contrast to +pip, which only manages Python packages. + +Anaconda `Anaconda `_ is a Python distribution published by Anaconda, Inc. It is a stable collection of Open Source packages for big data and scientific use, and a collection of Graphical Interface utilities for managing conda environments. + +In addition to the full distribution provided by Anaconda, the conda package manager itself is available in `miniconda `_, `miniforge `_, and `pixi `_. + + +Conda packages are available on multiple channels on Anaconda.org, including the +default channel supported by Anaconda, Inc, the community supported conda-forge channel, which provides a wide variety of pre-built packages, and some domain-specific package collections. diff --git a/source/guides/installing-stand-alone-command-line-tools.rst b/source/guides/installing-stand-alone-command-line-tools.rst new file mode 100644 index 000000000..c078fd1e4 --- /dev/null +++ b/source/guides/installing-stand-alone-command-line-tools.rst @@ -0,0 +1,136 @@ +.. _installing-stand-alone-command-line-tools: + +Installing stand alone command line tools +========================================= + +Many packages provide command line applications. Examples of such packages are +`mypy `_, +`flake8 `_, +`black `_, and +:ref:`pipenv`. + +Usually you want to be able to access these applications from anywhere on your +system, but installing packages and their dependencies to the same global +environment can cause version conflicts and break dependencies the operating +system has on Python packages. + +:ref:`pipx` solves this by creating a virtual environment for each package, +while also ensuring that its applications are accessible through a directory +that is on your ``$PATH``. This allows each package to be upgraded or +uninstalled without causing conflicts with other packages, and allows you to +safely run the applications from anywhere. + +.. note:: pipx only works with Python 3.6+. + +pipx is installed with pip: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --user pipx + python3 -m pipx ensurepath + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --user pipx + py -m pipx ensurepath + +.. note:: + + ``ensurepath`` ensures that the application directory is on your ``$PATH``. + You may need to restart your terminal for this update to take effect. + +Now you can install packages with ``pipx install`` and run the package's +applications(s) from anywhere. + +.. code-block:: console + + $ pipx install PACKAGE + $ PACKAGE_APPLICATION [ARGS] + +For example: + +.. code-block:: console + + $ pipx install cowsay + installed package cowsay 6.1, installed using Python 3.12.2 + These apps are now globally available + - cowsay + done! ✨ 🌟 ✨ + $ cowsay -t moo + ___ + < moo > + === + \ + \ + ^__^ + (oo)\_______ + (__)\ )\/ + || || + ||----w | + + +To see a list of packages installed with pipx and which applications are +available, use ``pipx list``: + +.. code-block:: console + + $ pipx list + venvs are in /Users/user/Library/Application Support/pipx/venvs + apps are exposed on your $PATH at /Users/user/.local/bin + manual pages are exposed at /Users/user/.local/share/man + package black 24.2.0, installed using Python 3.12.2 + - black + - blackd + package cowsay 6.1, installed using Python 3.12.2 + - cowsay + package mypy 1.9.0, installed using Python 3.12.2 + - dmypy + - mypy + - mypyc + - stubgen + - stubtest + package nox 2024.3.2, installed using Python 3.12.2 + - nox + - tox-to-nox + +To upgrade or uninstall a package: + +.. code-block:: bash + + pipx upgrade PACKAGE + pipx uninstall PACKAGE + +pipx can be upgraded or uninstalled with pip: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade pipx + python3 -m pip uninstall pipx + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade pipx + py -m pip uninstall pipx + +pipx also allows you to install and run the latest version of an application +in a temporary, ephemeral environment. For example: + +.. code-block:: bash + + pipx run cowsay -t moooo + +To see the full list of commands pipx offers, run: + +.. code-block:: bash + + pipx --help + +You can learn more about pipx at https://pipx.pypa.io/. diff --git a/source/install_requirements_linux.rst b/source/guides/installing-using-linux-tools.rst similarity index 56% rename from source/install_requirements_linux.rst rename to source/guides/installing-using-linux-tools.rst index c92b8d4b7..56647f3e9 100644 --- a/source/install_requirements_linux.rst +++ b/source/guides/installing-using-linux-tools.rst @@ -1,4 +1,3 @@ - .. _`Installing pip/setuptools/wheel with Linux Package Managers`: =========================================================== @@ -6,8 +5,7 @@ Installing pip/setuptools/wheel with Linux Package Managers =========================================================== :Page Status: Incomplete -:Last Reviewed: 2015-09-17 - +:Last Reviewed: 2021-07-26 This section covers how to install :ref:`pip`, :ref:`setuptools`, and :ref:`wheel` using Linux package managers. @@ -32,33 +30,16 @@ versions. When this is known, we will make note of it below. Fedora ~~~~~~ -* Fedora 21: - - * Python 2:: - - sudo yum upgrade python-setuptools - sudo yum install python-pip python-wheel - - * Python 3: ``sudo yum install python3 python3-wheel`` - -* Fedora 22: - - * Python 2:: - - sudo dnf upgrade python-setuptools - sudo dnf install python-pip python-wheel - - * Python 3: ``sudo dnf install python3 python3-wheel`` +.. code-block:: bash + sudo dnf install python3-pip python3-wheel -To get newer versions of pip, setuptools, and wheel for Python 2, you can enable -the `PyPA Copr Repo `_ using -the `Copr Repo instructions -`__, and then run:: - - sudo yum|dnf upgrade python-setuptools - sudo yum|dnf install python-pip python-wheel +To learn more about Python in Fedora, please visit the `official Fedora docs`_, +`Python Classroom`_ or `Fedora Loves Python`_. +.. _official Fedora docs: https://developer.fedoraproject.org/tech/languages/python/python-installation.html +.. _Python Classroom: https://labs.fedoraproject.org/en/python-classroom/ +.. _Fedora Loves Python: https://fedoralovespython.org CentOS/RHEL ~~~~~~~~~~~ @@ -70,51 +51,55 @@ To install pip and wheel for the system Python, there are two options: 1. Enable the `EPEL repository `_ using `these instructions - `__. On - EPEL 6 and EPEL7, you can install pip like so:: - - sudo yum install python-pip + `__. + On EPEL 7, you can install pip and wheel like so: - On EPEL 7 (but not EPEL 6), you can install wheel like so:: + .. code-block:: bash - sudo yum install python-wheel + sudo dnf install python3-pip python3-wheel Since EPEL only offers extra, non-conflicting packages, EPEL does not offer setuptools, since it's in the core repository. 2. Enable the `PyPA Copr Repo - `_ using `these instructions - `__ [1]_. You can install - pip and wheel like so:: + `_ using `these instructions + `__ [1]_. You can install + pip and wheel like so: + + .. code-block:: bash - sudo yum install python-pip python-wheel + sudo dnf install python3-pip python3-wheel - To additionally upgrade setuptools, run:: + To additionally upgrade setuptools, run: - sudo yum upgrade python-setuptools + .. code-block:: bash + + sudo dnf upgrade python3-setuptools To install pip, wheel, and setuptools, in a parallel, non-system environment (using yum) then there are two options: -1. Use the "Sofware Collections" feature to enable a parallel collection that +1. Use the "Software Collections" feature to enable a parallel collection that includes pip, setuptools, and wheel. * For Redhat, see here: - http://developers.redhat.com/products/softwarecollections/overview/ - * For CentOS, see here: https://www.softwarecollections.org/en/ + https://developers.redhat.com/products/softwarecollections/overview + * For CentOS, see here: https://github.com/sclorg Be aware that collections may not contain the most recent versions. -2. Enable the `IUS repository `_ and +2. Enable the `IUS repository `_ and install one of the `parallel-installable - `_ + `_ Pythons, along with pip, setuptools, and wheel, which are kept fairly up to date. - For example, for Python 3.4 on CentOS7/RHEL7:: + For example, for Python 3.4 on CentOS7/RHEL7: + + .. code-block:: bash sudo yum install python34u python34u-wheel @@ -122,25 +107,22 @@ To install pip, wheel, and setuptools, in a parallel, non-system environment openSUSE ~~~~~~~~ -* Python 2:: +.. code-block:: bash - sudo zypper install python-pip python-setuptools python-wheel + sudo zypper install python3-pip python3-setuptools python3-wheel -* Python 3:: - - sudo zypper install python3-pip python3-setuptools python3-wheel +.. _debian-ubuntu: +Debian/Ubuntu and derivatives +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Debian/Ubuntu -~~~~~~~~~~~~~ +Firstly, update and refresh repository lists by running this command: -:: - - sudo apt-get install python-pip - -Replace "python" with "python3" for Python 3. +.. code-block:: bash + sudo apt update + sudo apt install python3-venv python3-pip .. warning:: @@ -152,13 +134,9 @@ Replace "python" with "python3" for Python 3. Arch Linux ~~~~~~~~~~ -* Python 2:: - - sudo pacman -S python2-pip - -* Python 3:: +.. code-block:: bash - sudo pacman -S python-pip + sudo pacman -S python-pip ---- diff --git a/source/guides/installing-using-pip-and-virtual-environments.rst b/source/guides/installing-using-pip-and-virtual-environments.rst new file mode 100644 index 000000000..22d1840cc --- /dev/null +++ b/source/guides/installing-using-pip-and-virtual-environments.rst @@ -0,0 +1,512 @@ +Install packages in a virtual environment using pip and venv +============================================================ + +This guide discusses how to create and activate a virtual environment using +the standard library's virtual environment tool :ref:`venv` and install packages. +The guide covers how to: + +* Create and activate a virtual environment +* Prepare pip +* Install packages into a virtual environment using the ``pip`` command +* Use and create a requirements file + + +.. note:: This guide applies to supported versions of Python, currently 3.8 + and higher. + + +.. note:: This guide uses the term **package** to refer to a + :term:`Distribution Package`, which commonly is installed from an external + host. This differs from the term :term:`Import Package` which refers to + import modules in your Python source code. + + +.. important:: + This guide has the prerequisite that you are using an official Python version obtained from + . If you are using your operating + system's package manager to install Python, please ensure that Python is + installed before proceeding with these steps. + + +Create and Use Virtual Environments +----------------------------------- + +Create a new virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:ref:`venv` (for Python 3) allows you to manage separate package installations for +different projects. It creates a "virtual" isolated Python installation. When +you switch projects, you can create a new virtual environment which is isolated +from other virtual environments. You benefit from the virtual environment +since packages can be installed confidently and will not interfere with +another project's environment. + +.. tip:: + It is recommended to use a virtual environment when working with third + party packages. + +To create a virtual environment, go to your project's directory and run the +following command. This will create a new virtual environment in a local folder +named ``.venv``: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m venv .venv + +.. tab:: Windows + + .. code-block:: bat + + py -m venv .venv + +The second argument is the location to create the virtual environment. Generally, you +can just create this in your project and call it ``.venv``. + +``venv`` will create a virtual Python installation in the ``.venv`` folder. + +.. Note:: You should exclude your virtual environment directory from your version + control system using ``.gitignore`` or similar. + + +Activate a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Before you can start installing or using packages in your virtual environment you'll +need to ``activate`` it. Activating a virtual environment will put the +virtual environment-specific ``python`` and ``pip`` executables into your +shell's ``PATH``. + +.. tab:: Unix/macOS + + .. code-block:: bash + + source .venv/bin/activate + +.. tab:: Windows + + .. code-block:: bat + + .venv\Scripts\activate + +To confirm the virtual environment is activated, check the location of your +Python interpreter: + +.. tab:: Unix/macOS + + .. code-block:: bash + + which python + +.. tab:: Windows + + .. code-block:: bat + + where python + +While the virtual environment is active, the above command will output a +filepath that includes the ``.venv`` directory, by ending with the following: + +.. tab:: Unix/macOS + + .. code-block:: bash + + .venv/bin/python + +.. tab:: Windows + + .. code-block:: bat + + .venv\Scripts\python + + +While a virtual environment is activated, pip will install packages into that +specific environment. This enables you to import and use packages in your +Python application. + + +Deactivate a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you want to switch projects or leave your virtual environment, +``deactivate`` the environment: + +.. code-block:: bash + + deactivate + +.. note:: + Closing your shell will deactivate the virtual environment. If + you open a new shell window and want to use the virtual environment, + reactivate it. + +Reactivate a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you want to reactivate an existing virtual environment, follow the same +instructions about activating a virtual environment. There's no need to create +a new virtual environment. + + +Prepare pip +----------- + +:ref:`pip` is the reference Python package manager. +It's used to install and update packages into a virtual environment. + + +.. tab:: Unix/macOS + + The Python installers for macOS include pip. On Linux, you may have to install + an additional package such as ``python3-pip``. You can make sure that pip is + up-to-date by running: + + .. code-block:: bash + + python3 -m pip install --upgrade pip + python3 -m pip --version + + Afterwards, you should have the latest version of pip installed in your + user site: + + .. code-block:: text + + pip 23.3.1 from .../.venv/lib/python3.9/site-packages (python 3.9) + +.. tab:: Windows + + The Python installers for Windows include pip. You can make sure that pip is + up-to-date by running: + + .. code-block:: bat + + py -m pip install --upgrade pip + py -m pip --version + + Afterwards, you should have the latest version of pip: + + .. code-block:: text + + pip 23.3.1 from .venv\lib\site-packages (Python 3.9.4) + + +Install packages using pip +-------------------------- + +When your virtual environment is activated, you can install packages. Use the +``pip install`` command to install packages. + +Install a package +~~~~~~~~~~~~~~~~~ + +For example, let's install the +`Requests`_ library from the :term:`Python Package Index (PyPI)`: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install requests + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install requests + +pip should download requests and all of its dependencies and install them: + +.. code-block:: text + + Collecting requests + Using cached requests-2.18.4-py2.py3-none-any.whl + Collecting chardet<3.1.0,>=3.0.2 (from requests) + Using cached chardet-3.0.4-py2.py3-none-any.whl + Collecting urllib3<1.23,>=1.21.1 (from requests) + Using cached urllib3-1.22-py2.py3-none-any.whl + Collecting certifi>=2017.4.17 (from requests) + Using cached certifi-2017.7.27.1-py2.py3-none-any.whl + Collecting idna<2.7,>=2.5 (from requests) + Using cached idna-2.6-py2.py3-none-any.whl + Installing collected packages: chardet, urllib3, certifi, idna, requests + Successfully installed certifi-2017.7.27.1 chardet-3.0.4 idna-2.6 requests-2.18.4 urllib3-1.22 + +.. _Requests: https://pypi.org/project/requests/ + + +Install a specific package version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +pip allows you to specify which version of a package to install using +:term:`version specifiers `. For example, to install +a specific version of ``requests``: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install 'requests==2.18.4' + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "requests==2.18.4" + +To install the latest ``2.x`` release of requests: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install 'requests>=2.0.0,<3.0.0' + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "requests>=2.0.0,<3.0.0" + +To install pre-release versions of packages, use the ``--pre`` flag: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --pre requests + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --pre requests + + +Install extras +~~~~~~~~~~~~~~ + +Some packages have optional `extras`_. You can tell pip to install these by +specifying the extra in brackets: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install 'requests[security]' + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "requests[security]" + +.. _extras: + https://setuptools.readthedocs.io/en/latest/userguide/dependency_management.html#optional-dependencies + + +Install a package from source +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +pip can install a package directly from its source code. For example, to install +the source code in the ``google-auth`` directory: + +.. tab:: Unix/macOS + + .. code-block:: bash + + cd google-auth + python3 -m pip install . + +.. tab:: Windows + + .. code-block:: bat + + cd google-auth + py -m pip install . + +Additionally, pip can install packages from source in +:doc:`development mode `, +meaning that changes to the source directory will immediately affect the +installed package without needing to re-install: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --editable . + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --editable . + + +Install from version control systems +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +pip can install packages directly from their version control system. For +example, you can install directly from a git repository: + +.. code-block:: bash + + google-auth @ git+https://github.com/GoogleCloudPlatform/google-auth-library-python.git + +For more information on supported version control systems and syntax, see pip's +documentation on :ref:`VCS Support `. + + +Install from local archives +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you have a local copy of a :term:`Distribution Package`'s archive (a zip, +wheel, or tar file) you can install it directly with pip: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install requests-2.18.4.tar.gz + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install requests-2.18.4.tar.gz + +If you have a directory containing archives of multiple packages, you can tell +pip to look for packages there and not to use the +:term:`Python Package Index (PyPI)` at all: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --no-index --find-links=/local/dir/ requests + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --no-index --find-links=/local/dir/ requests + +This is useful if you are installing packages on a system with limited +connectivity or if you want to strictly control the origin of distribution +packages. + + +Install from other package indexes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you want to download packages from a different index than the +:term:`Python Package Index (PyPI)`, you can use the ``--index-url`` flag: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --index-url http://index.example.com/simple/ SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --index-url http://index.example.com/simple/ SomeProject + +If you want to allow packages from both the :term:`Python Package Index (PyPI)` +and a separate index, you can use the ``--extra-index-url`` flag instead: + + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --extra-index-url http://index.example.com/simple/ SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --extra-index-url http://index.example.com/simple/ SomeProject + +Upgrading packages +------------------ + +pip can upgrade packages in-place using the ``--upgrade`` flag. For example, to +install the latest version of ``requests`` and all of its dependencies: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade requests + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade requests + +Using a requirements file +------------------------- + +Instead of installing packages individually, pip allows you to declare all +dependencies in a :ref:`Requirements File `. For +example you could create a :file:`requirements.txt` file containing: + +.. code-block:: text + + requests==2.18.4 + google-auth==1.1.0 + +And tell pip to install all of the packages in this file using the ``-r`` flag: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install -r requirements.txt + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install -r requirements.txt + +Freezing dependencies +--------------------- + +Pip can export a list of all installed packages and their versions using the +``freeze`` command: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip freeze + +.. tab:: Windows + + .. code-block:: bat + + py -m pip freeze + +Which will output a list of package specifiers such as: + +.. code-block:: text + + cachetools==2.0.1 + certifi==2017.7.27.1 + chardet==3.0.4 + google-auth==1.1.1 + idna==2.6 + pyasn1==0.3.6 + pyasn1-modules==0.1.4 + requests==2.18.4 + rsa==3.4.2 + six==1.11.0 + urllib3==1.22 + +The ``pip freeze`` command is useful for creating :ref:`pip:Requirements Files` +that can re-create the exact versions of all packages installed in an environment. diff --git a/source/guides/installing-using-virtualenv.rst b/source/guides/installing-using-virtualenv.rst new file mode 100644 index 000000000..a584b89d5 --- /dev/null +++ b/source/guides/installing-using-virtualenv.rst @@ -0,0 +1,15 @@ +Installing packages using virtualenv +==================================== + +This guide discusses how to install packages using :ref:`pip` and +:ref:`virtualenv`, a tool to create isolated Python environments. + +.. important:: + This "how to" guide on installing packages and using :ref:`virtualenv` is + under development. Please refer to the :ref:`virtualenv` documentation for + details on installation and usage. + + +.. note:: This doc uses the term **package** to refer to a + :term:`Distribution Package` which is different from an :term:`Import + Package` that which is used to import modules in your Python source code. diff --git a/source/guides/licensing-examples-and-user-scenarios.rst b/source/guides/licensing-examples-and-user-scenarios.rst new file mode 100644 index 000000000..2c25ddfb0 --- /dev/null +++ b/source/guides/licensing-examples-and-user-scenarios.rst @@ -0,0 +1,358 @@ +.. _licensing-examples-and-user-scenarios: + + +===================================== +Licensing examples and user scenarios +===================================== + + +:pep:`639` has specified the way to declare a project's license and paths to +license files and other legally required information. +This document aims to provide clear guidance how to migrate from the legacy +to the standardized way of declaring licenses. +Make sure your preferred build backend supports :pep:`639` before +trying to apply the newer guidelines. + + +Licensing Examples +================== + +.. _licensing-example-basic: + +Basic example +------------- + +The Setuptools project itself, as of `version 75.6.0 `__, +does not use the ``License`` field in its own project source metadata. +Further, it no longer explicitly specifies ``license_file``/``license_files`` +as it did previously, since Setuptools relies on its own automatic +inclusion of license-related files matching common patterns, +such as the :file:`LICENSE` file it uses. + +It includes the following license-related metadata in its +:file:`pyproject.toml`: + +.. code-block:: toml + + [project] + classifiers = [ + "License :: OSI Approved :: MIT License" + ] + +The simplest migration to PEP 639 would consist of using this instead: + +.. code-block:: toml + + [project] + license = "MIT" + +Or, if the project used :file:`setup.cfg`, in its ``[metadata]`` table: + +.. code-block:: ini + + [metadata] + license = MIT + +The output Core Metadata for the distribution packages would then be: + +.. code-block:: email + + License-Expression: MIT + License-File: LICENSE + +The :file:`LICENSE` file would be stored at :file:`/setuptools-{VERSION}/LICENSE` +in the sdist and :file:`/setuptools-{VERSION}.dist-info/licenses/LICENSE` +in the wheel, and unpacked from there into the site directory (e.g. +:file:`site-packages/`) on installation; :file:`/` is the root of the respective archive +and ``{VERSION}`` the version of the Setuptools release in the Core Metadata. + + +.. _licensing-example-advanced: + +Advanced example +---------------- + +Suppose Setuptools were to include the licenses of the third-party projects +that are vendored in the :file:`setuptools/_vendor/` and :file:`pkg_resources/_vendor/` +directories; specifically: + +.. code-block:: text + + packaging==21.2 + pyparsing==2.2.1 + ordered-set==3.1.1 + more_itertools==8.8.0 + +The license expressions for these projects are: + +.. code-block:: text + + packaging: Apache-2.0 OR BSD-2-Clause + pyparsing: MIT + ordered-set: MIT + more_itertools: MIT + +A comprehensive license expression covering both Setuptools +proper and its vendored dependencies would contain these metadata, +combining all the license expressions into one. Such an expression might be: + +.. code-block:: text + + MIT AND (Apache-2.0 OR BSD-2-Clause) + +In addition, per the requirements of the licenses, the relevant license files +must be included in the package. Suppose the :file:`LICENSE` file contains the text +of the MIT license and the copyrights used by Setuptools, ``pyparsing``, +``more_itertools`` and ``ordered-set``; and the :file:`LICENSE*` files in the +:file:`setuptools/_vendor/packaging/` directory contain the Apache 2.0 and +2-clause BSD license text, and the Packaging copyright statement and +`license choice notice `__. + +Specifically, we assume the license files are located at the following +paths in the project source tree (relative to the project root and +:file:`pyproject.toml`): + +.. code-block:: text + + LICENSE + setuptools/_vendor/packaging/LICENSE + setuptools/_vendor/packaging/LICENSE.APACHE + setuptools/_vendor/packaging/LICENSE.BSD + +Putting it all together, our :file:`pyproject.toml` would be: + +.. code-block:: toml + + [project] + license = "MIT AND (Apache-2.0 OR BSD-2-Clause)" + license-files = [ + "LICENSE*", + "setuptools/_vendor/LICENSE*", + ] + +Or alternatively, the license files can be specified explicitly (paths will be +interpreted as glob patterns): + +.. code-block:: toml + + [project] + license = "MIT AND (Apache-2.0 OR BSD-2-Clause)" + license-files = [ + "LICENSE", + "setuptools/_vendor/LICENSE", + "setuptools/_vendor/LICENSE.APACHE", + "setuptools/_vendor/LICENSE.BSD", + ] + +If our project used :file:`setup.cfg`, we could define this in : + +.. code-block:: ini + + [metadata] + license = MIT AND (Apache-2.0 OR BSD-2-Clause) + license_files = + LICENSE + setuptools/_vendor/packaging/LICENSE + setuptools/_vendor/packaging/LICENSE.APACHE + setuptools/_vendor/packaging/LICENSE.BSD + +With either approach, the output Core Metadata in the distribution +would be: + +.. code-block:: email + + License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause) + License-File: LICENSE + License-File: setuptools/_vendor/packaging/LICENSE + License-File: setuptools/_vendor/packaging/LICENSE.APACHE + License-File: setuptools/_vendor/packaging/LICENSE.BSD + +In the resulting sdist, with :file:`/` as the root of the archive and ``{VERSION}`` +the version of the Setuptools release specified in the Core Metadata, +the license files would be located at the paths: + +.. code-block:: text + + /setuptools-{VERSION}/LICENSE + /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE + /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE.APACHE + /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE.BSD + +In the built wheel, with :file:`/` being the root of the archive and +``{VERSION}`` as the previous, the license files would be stored at: + +.. code-block:: text + + /setuptools-{VERSION}.dist-info/licenses/LICENSE + /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE + /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.APACHE + /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.BSD + +Finally, in the installed project, with :file:`site-packages/` being the site dir +and ``{VERSION}`` as the previous, the license files would be installed to: + +.. code-block:: text + + site-packages/setuptools-{VERSION}.dist-info/licenses/LICENSE + site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE + site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.APACHE + site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.BSD + + +Expression examples +''''''''''''''''''' + +Some additional examples of valid ``License-Expression`` values: + +.. code-block:: email + + License-Expression: MIT + License-Expression: BSD-3-Clause + License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause) + License-Expression: MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause) + License-Expression: GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause + License-Expression: LicenseRef-Public-Domain OR CC0-1.0 OR Unlicense + License-Expression: LicenseRef-Proprietary + License-Expression: LicenseRef-Custom-License + + +User Scenarios +============== + +The following covers the range of common use cases from a user perspective, +providing guidance for each. Do note that the following +should **not** be considered legal advice, and readers should consult a +licensed legal practitioner in their jurisdiction if they are unsure about +the specifics for their situation. + + +I have a private package that won't be distributed +-------------------------------------------------- + +If your package isn't shared publicly, i.e. outside your company, +organization or household, it *usually* isn't strictly necessary to include +a formal license, so you wouldn't necessarily have to do anything extra here. + +However, it is still a good idea to include ``LicenseRef-Proprietary`` +as a license expression in your package configuration, and/or a +copyright statement and any legal notices in a :file:`LICENSE.txt` file +in the root of your project directory, which will be automatically +included by packaging tools. + + +I just want to share my own work without legal restrictions +----------------------------------------------------------- + +While you aren't required to include a license, if you don't, no one has +`any permission to download, use or improve your work `__, +so that's probably the *opposite* of what you actually want. +The `MIT license `__ is a great choice instead, as it's simple, +widely used and allows anyone to do whatever they want with your work +(other than sue you, which you probably also don't want). + +To apply it, just paste `the text `__ into a file named +:file:`LICENSE.txt` at the root of your repo, and add the year and your name to +the copyright line. Then, just add ``license = "MIT"`` under +``[project]`` in your :file:`pyproject.toml` if your packaging tool supports it, +or in its config file/section. You're done! + + +I want to distribute my project under a specific license +-------------------------------------------------------- + +To use a particular license, simply paste its text into a :file:`LICENSE.txt` +file at the root of your repo, if you don't have it in a file starting with +:file:`LICENSE` or :file:`COPYING` already, and add +``license = "LICENSE-ID"`` under ``[project]`` in your +:file:`pyproject.toml` if your packaging tool supports it, or else in its +config file. You can find the ``LICENSE-ID`` +and copyable license text on sites like +`ChooseALicense `__ or `SPDX `__. + +Many popular code hosts, project templates and packaging tools can add the +license file for you, and may support the expression as well in the future. + + +I maintain an existing package that's already licensed +------------------------------------------------------ + +If you already have license files and metadata in your project, you +should only need to make a couple of tweaks to take advantage of the new +functionality. + +In your project config file, enter your license expression under +``license`` (``[project]`` table in :file:`pyproject.toml`), +or the equivalent for your packaging tool, +and make sure to remove any legacy ``license`` table subkeys or +``License ::`` classifiers. Your existing ``license`` value may already +be valid as one (e.g. ``MIT``, ``Apache-2.0 OR BSD-2-Clause``, etc); +otherwise, check the `SPDX license list `__ for the identifier +that matches the license used in your project. + +Make sure to list your license files under ``license-files`` +under ``[project]`` in :file:`pyproject.toml` +or else in your tool's configuration file. + +See the :ref:`licensing-example-basic` for a simple but complete real-world demo +of how this works in practice. +See also the best-effort guidance on how to translate license classifiers +into license expression provided by the :pep:`639` authors: +`Mapping License Classifiers to SPDX Identifiers `__. +Packaging tools may support automatically converting legacy licensing +metadata; check your tool's documentation for more information. + + +My package includes other code under different licenses +------------------------------------------------------- + +If your project includes code from others covered by different licenses, +such as vendored dependencies or files copied from other open source +software, you can construct a license expression +to describe the licenses involved and the relationship +between them. + +In short, ``License-1 AND License-2`` mean that *both* licenses apply +to your project, or parts of it (for example, you included a file +under another license), and ``License-1 OR License-2`` means that +*either* of the licenses can be used, at the user's option (for example, +you want to allow users a choice of multiple licenses). You can use +parenthesis (``()``) for grouping to form expressions that cover even the most +complex situations. + +In your project config file, enter your license expression under +``license`` (``[project]`` table of :file:`pyproject.toml`), +or the equivalent for your packaging tool, +and make sure to remove any legacy ``license`` table subkeys +or ``License ::`` classifiers. + +Also, make sure you add the full license text of all the licenses as files +somewhere in your project repository. List the +relative path or glob patterns to each of them under ``license-files`` +under ``[project]`` in :file:`pyproject.toml` +(if your tool supports it), or else in your tool's configuration file. + +As an example, if your project was licensed MIT but incorporated +a vendored dependency (say, ``packaging``) that was licensed under +either Apache 2.0 or the 2-clause BSD, your license expression would +be ``MIT AND (Apache-2.0 OR BSD-2-Clause)``. You might have a +:file:`LICENSE.txt` in your repo root, and a :file:`LICENSE-APACHE.txt` and +:file:`LICENSE-BSD.txt` in the :file:`_vendor/` subdirectory, so to include +all of them, you'd specify ``["LICENSE.txt", "_vendor/packaging/LICENSE*"]`` +as glob patterns, or +``["LICENSE.txt", "_vendor/LICENSE-APACHE.txt", "_vendor/LICENSE-BSD.txt"]`` +as literal file paths. + +See a fully worked out :ref:`licensing-example-advanced` for an end-to-end +application of this to a real-world complex project, with many technical +details, and consult a `tutorial `__ for more help and examples +using SPDX identifiers and expressions. + + +.. _chooseamitlicense: https://choosealicense.com/licenses/mit/ +.. _choosealicenselist: https://choosealicense.com/licenses/ +.. _dontchoosealicense: https://choosealicense.com/no-permission/ +.. _mappingclassifierstospdx: https://peps.python.org/pep-0639/appendix-mapping-classifiers/ +.. _packaginglicense: https://github.com/pypa/packaging/blob/21.2/LICENSE +.. _setuptools7560: https://github.com/pypa/setuptools/blob/v75.6.0/pyproject.toml +.. _spdxlist: https://spdx.org/licenses/ +.. _spdxtutorial: https://github.com/david-a-wheeler/spdx-tutorial diff --git a/source/guides/making-a-pypi-friendly-readme.rst b/source/guides/making-a-pypi-friendly-readme.rst new file mode 100644 index 000000000..4a3a20670 --- /dev/null +++ b/source/guides/making-a-pypi-friendly-readme.rst @@ -0,0 +1,132 @@ +Making a PyPI-friendly README +============================= + +README files can help your users understand your project and can be used to set your project's description on PyPI. +This guide helps you create a README in a PyPI-friendly format and include your README in your package so it appears on PyPI. + + +Creating a README file +---------------------- + +README files for Python projects are often named ``README``, ``README.txt``, ``README.rst``, or ``README.md``. + +For your README to display properly on PyPI, choose a markup language supported by PyPI. +Formats supported by `PyPI's README renderer `_ are: + +* plain text +* `reStructuredText `_ (without Sphinx extensions) +* Markdown (`GitHub Flavored Markdown `_ by default, + or `CommonMark `_) + +It's customary to save your README file in the root of your project, in the same directory as your :file:`setup.py` file. + + +Including your README in your package's metadata +------------------------------------------------ + +To include your README's contents as your package description, +set your project's ``Description`` and ``Description-Content-Type`` metadata, +typically in your project's :file:`setup.py` file. + +.. seealso:: + + * :ref:`description-optional` + * :ref:`description-content-type-optional` + +For example, to set these values in a package's :file:`setup.py` file, +use ``setup()``'s ``long_description`` and ``long_description_content_type``. + +Set the value of ``long_description`` to the contents (not the path) of the README file itself. +Set the ``long_description_content_type`` to an accepted ``Content-Type``-style value for your README file's markup, +such as ``text/plain``, ``text/x-rst`` (for reStructuredText), or ``text/markdown``. + +.. note:: + + If you're using GitHub-flavored Markdown to write a project's description, ensure you upgrade + the following tools: + + .. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --user --upgrade setuptools wheel twine + + .. tab:: Windows + + .. code-block:: bat + + py -m pip install --user --upgrade setuptools wheel twine + + The minimum required versions of the respective tools are: + + - ``setuptools >= 38.6.0`` + - ``wheel >= 0.31.0`` + - ``twine >= 1.11.0`` + + It's recommended that you use ``twine`` to upload the project's distribution packages: + + .. code-block:: bash + + twine upload dist/* + +For example, see this :file:`setup.py` file, +which reads the contents of :file:`README.md` as ``long_description`` +and identifies the markup as GitHub-flavored Markdown: + +.. code-block:: python + + from setuptools import setup + + # read the contents of your README file + from pathlib import Path + this_directory = Path(__file__).parent + long_description = (this_directory / "README.md").read_text() + + setup( + name='an_example_package', + # other arguments omitted + long_description=long_description, + long_description_content_type='text/markdown' + ) + + +Validating reStructuredText markup +---------------------------------- + +If your README is written in reStructuredText, any invalid markup will prevent +it from rendering, causing PyPI to instead just show the README's raw source. + +Note that Sphinx extensions used in docstrings, such as +:doc:`directives ` and :doc:`roles ` +(e.g., "``:py:func:`getattr```" or "``:ref:`my-reference-label```"), are not allowed here and will result in error +messages like "``Error: Unknown interpreted text role "py:func".``". + +You can check your README for markup errors before uploading as follows: + +1. Install the latest version of `twine `_; + version 1.12.0 or higher is required: + + .. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade twine + + .. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade twine + +2. Build the sdist and wheel for your project as described under + :ref:`Packaging Your Project`. + +3. Run ``twine check`` on the sdist and wheel: + + .. code-block:: bash + + twine check dist/* + + This command will report any problems rendering your README. If your markup + renders fine, the command will output ``Checking distribution FILENAME: + Passed``. diff --git a/source/guides/migrating-to-pypi-org.rst b/source/guides/migrating-to-pypi-org.rst new file mode 100644 index 000000000..2b565e8ee --- /dev/null +++ b/source/guides/migrating-to-pypi-org.rst @@ -0,0 +1,142 @@ +:orphan: + +.. _`Migrating to PyPI.org`: + +Migrating to PyPI.org +===================== + +:Page Status: Obsolete + +:term:`pypi.org` is the new, rewritten version of PyPI that has replaced the +legacy PyPI code base. It is the default version of PyPI that people are +expected to use. These are the tools and processes that people will need to +interact with ``PyPI.org``. + +Publishing releases +------------------- + +``pypi.org`` is the default upload platform as of September 2016. + +Uploads through ``pypi.python.org`` were *switched off* on **July 3, 2017**. +As of April 13th, 2018, ``pypi.org`` is the URL for PyPI. + +The recommended way to migrate to PyPI.org for uploading is to ensure that you +are using a new enough version of your upload tool. + +The default upload settings switched to ``pypi.org`` in the following versions: + +* ``twine`` 1.8.0 +* ``setuptools`` 27.0.0 +* Python 2.7.13 (``distutils`` update) +* Python 3.4.6 (``distutils`` update) +* Python 3.5.3 (``distutils`` update) +* Python 3.6.0 (``distutils`` update) + +In addition to ensuring you're on a new enough version of the tool for the +tool's default to have switched, you must also make sure that you have not +configured the tool to override its default upload URL. Typically this is +configured in a file located at :file:`$HOME/.pypirc`. If you see a file like: + +.. code:: + + [distutils] + index-servers = + pypi + + [pypi] + repository = https://pypi.python.org/pypi + username = + password = + + +Then simply delete the line starting with ``repository`` and you will use +your upload tool's default URL. + +If for some reason you're unable to upgrade the version of your tool +to a version that defaults to using PyPI.org, then you may edit +:file:`$HOME/.pypirc` and include the ``repository:`` line, but use the +value ``https://upload.pypi.org/legacy/`` instead: + +.. code:: + + [distutils] + index-servers = + pypi + + [pypi] + repository = https://upload.pypi.org/legacy/ + username = + password = + +(``legacy`` in this URL refers to the fact that this is the new server +implementation's emulation of the legacy server implementation's upload API.) + +For more details, see the :ref:`specification ` for :file:`.pypirc`. + +Registering package names & metadata +------------------------------------ + +Explicit pre-registration of package names with the ``setup.py register`` +command prior to the first upload is no longer required, and is not +currently supported by the legacy upload API emulation on PyPI.org. + +As a result, attempting explicit registration after switching to using +PyPI.org for uploads will give the following error message:: + + Server response (410): This API is no longer supported, instead simply upload the file. + +The solution is to skip the registration step, and proceed directly to +uploading artifacts. + + +Using TestPyPI +-------------- + +Legacy TestPyPI (testpypi.python.org) is no longer available; use +`test.pypi.org `_ instead. If you use TestPyPI, +you must update your :file:`$HOME/.pypirc` to handle TestPyPI's new +location, by replacing ``https://testpypi.python.org/pypi`` with +``https://test.pypi.org/legacy/``, for example: + +.. code:: + + [distutils] + index-servers= + pypi + testpypi + + [testpypi] + repository = https://test.pypi.org/legacy/ + username = + password = + +For more details, see the :ref:`specification ` for :file:`.pypirc`. + + +Registering new user accounts +----------------------------- + +In order to help mitigate spam attacks against PyPI, new user registration +through ``pypi.python.org`` was *switched off* on **February 20, 2018**. +New user registrations at ``pypi.org`` are open. + + +Browsing packages +----------------- + +While ``pypi.python.org`` is may still be used in links from other PyPA +documentation, etc, the default interface for browsing packages is +``pypi.org``. The domain pypi.python.org now redirects to pypi.org, +and may be disabled sometime in the future. + + +Downloading packages +-------------------- + +``pypi.org`` is the default host for downloading packages. + +Managing published packages and releases +---------------------------------------- + +``pypi.org`` provides a fully functional interface for logged in users to +manage their published packages and releases. diff --git a/source/guides/modernize-setup-py-project.rst b/source/guides/modernize-setup-py-project.rst new file mode 100644 index 000000000..5b6ab3c26 --- /dev/null +++ b/source/guides/modernize-setup-py-project.rst @@ -0,0 +1,248 @@ +.. _modernize-setup-py-project: + + +============================================== +How to modernize a ``setup.py`` based project? +============================================== + + +Should ``pyproject.toml`` be added? +=================================== + +A :term:`pyproject.toml` file is strongly recommended. +The presence of a :file:`pyproject.toml` file itself does not bring much. [#]_ +What is actually strongly recommended is the ``[build-system]`` table in :file:`pyproject.toml`. + +.. [#] Note that it has influence on the build isolation feature of pip, + see below. + + +Should ``setup.py`` be deleted? +=============================== + +No, :file:`setup.py` can exist in a modern :ref:`setuptools` based project. +The :term:`setup.py` file is a valid configuration file for setuptools +that happens to be written in Python. +However, the following commands are deprecated and **MUST NOT** be run anymore, +and their recommended replacement commands should be used instead: + ++---------------------------------+----------------------------------------+ +| Deprecated | Recommendation | ++=================================+========================================+ +| ``python setup.py install`` | ``python -m pip install .`` | ++---------------------------------+----------------------------------------+ +| ``python setup.py develop`` | ``python -m pip install --editable .`` | ++---------------------------------+----------------------------------------+ +| ``python setup.py sdist`` | ``python -m build`` | ++---------------------------------+ | +| ``python setup.py bdist_wheel`` | | ++---------------------------------+----------------------------------------+ + + +For more details: + +* :ref:`setup-py-deprecated` + + +Where to start? +=============== + +The :term:`project` must contain a :file:`pyproject.toml` file at the root of its source tree +that contains a ``[build-system]`` table like so: + +.. code:: toml + + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + +This is the standardized method of letting :term:`build frontends ` know +that :ref:`setuptools` is the :term:`build backend ` for this project. + +Note that the presence of a :file:`pyproject.toml` file (even if empty) +triggers :ref:`pip` to change its default behavior to use *build isolation*. + +For more details: + +* :ref:`distributing-packages` +* :ref:`pyproject-build-system-table` +* :doc:`pip:reference/build-system/pyproject-toml` + + +How to handle additional build-time dependencies? +================================================= + +On top of setuptools itself, +if :file:`setup.py` depends on other third-party libraries (outside of Python's standard library), +those must be listed in the ``requires`` list of the ``[build-system]`` table, +so that the build frontend knows to install them +when building the :term:`distributions `. + +For example, a :file:`setup.py` file such as this: + +.. code:: python + + import setuptools + import some_build_toolkit # comes from the `some-build-toolkit` library + + def get_version(): + version = some_build_toolkit.compute_version() + return version + + setuptools.setup( + name="my-project", + version=get_version(), + ) + + +requires a :file:`pyproject.toml` file like this (:file:`setup.py` stays unchanged): + +.. code:: toml + + [build-system] + requires = [ + "setuptools", + "some-build-toolkit", + ] + build-backend = "setuptools.build_meta" + + +For more details: + +* :ref:`pyproject-build-system-table` + + +What is the build isolation feature? +==================================== + +Build frontends typically create an ephemeral virtual environment +where they install only the build dependencies (and their dependencies) +that are listed under ``build-system.requires`` +and trigger the build in that environment. + +For some projects this isolation is unwanted and it can be deactivated as follows: + +* ``python -m build --no-isolation`` +* ``python -m pip install --no-build-isolation`` + +For more details: + +* :doc:`pip:reference/build-system/pyproject-toml` + + +How to handle packaging metadata? +================================= + +All static metadata can optionally be moved to a ``[project]`` table in :file:`pyproject.toml`. + +For example, a :file:`setup.py` file such as this: + +.. code:: python + + import setuptools + + setuptools.setup( + name="my-project", + version="1.2.3", + ) + + +can be entirely replaced by a :file:`pyproject.toml` file like this: + +.. code:: toml + + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + [project] + name = "my-project" + version = "1.2.3" + + +Read :ref:`pyproject-project-table` for the full specification +of the content allowed in the ``[project]`` table. + + +How to handle dynamic metadata? +=============================== + +If some packaging metadata fields are not static +they need to be listed as ``dynamic`` in this ``[project]`` table. + +For example, a :file:`setup.py` file such as this: + +.. code:: python + + import setuptools + import some_build_toolkit + + def get_version(): + version = some_build_toolkit.compute_version() + return version + + setuptools.setup( + name="my-project", + version=get_version(), + ) + + +can be modernized as follows: + +.. code:: toml + + [build-system] + requires = [ + "setuptools", + "some-build-toolkit", + ] + build-backend = "setuptools.build_meta" + + [project] + name = "my-project" + dynamic = ["version"] + + +.. code:: python + + import setuptools + import some_build_toolkit + + def get_version(): + version = some_build_toolkit.compute_version() + return version + + setuptools.setup( + version=get_version(), + ) + + +For more details: + +* :ref:`declaring-project-metadata-dynamic` + + +What if something that can not be changed expects a ``setup.py`` file? +====================================================================== + +For example, a process exists that can not be changed easily +and it needs to execute a command such as ``python setup.py --name``. + +It is perfectly fine to leave a :file:`setup.py` file in the project source tree +even after all its content has been moved to :file:`pyproject.toml`. +This file can be as minimalistic as this: + +.. code:: python + + import setuptools + + setuptools.setup() + + +Where to read more about this? +============================== + +* :ref:`pyproject-toml-spec` +* :doc:`pip:reference/build-system/pyproject-toml` +* :doc:`setuptools:build_meta` diff --git a/source/multi_version_install.rst b/source/guides/multi-version-installs.rst similarity index 91% rename from source/multi_version_install.rst rename to source/guides/multi-version-installs.rst index fb94a667e..a09bc900a 100644 --- a/source/multi_version_install.rst +++ b/source/guides/multi-version-installs.rst @@ -1,11 +1,11 @@ +:orphan: -.. _`Multi-version Installs`: +.. _`Multi-version installs`: -Multi-version Installs +Multi-version installs ====================== -:Page Status: Complete -:Last Reviewed: 2014-12-27 +:Page Status: Obsolete easy_install allows simultaneous installation of different versions of the same @@ -35,7 +35,7 @@ This can be worked around by setting all dependencies in ``__main__.__requires__`` before importing ``pkg_resources`` for the first time, but that approach does mean that standard command line invocations of the affected tools can't be used - it's necessary to write a custom -wrapper script or use ``python -c ''`` to invoke the application's +wrapper script or use ``python3 -c ''`` to invoke the application's main entry point directly. Refer to the `pkg_resources documentation diff --git a/source/guides/packaging-binary-extensions.rst b/source/guides/packaging-binary-extensions.rst new file mode 100644 index 000000000..de8a9d2d6 --- /dev/null +++ b/source/guides/packaging-binary-extensions.rst @@ -0,0 +1,417 @@ +.. _`Binary Extensions`: + +=========================== +Packaging binary extensions +=========================== + +:Page Status: Incomplete +:Last Reviewed: 2013-12-08 + +One of the features of the CPython reference interpreter is that, in +addition to allowing the execution of Python code, it also exposes a rich +C API for use by other software. One of the most common uses of this C API +is to create importable C extensions that allow things which aren't +always easy to achieve in pure Python code. + + +An overview of binary extensions +================================ + +Use cases +--------- + +The typical use cases for binary extensions break down into just three +conventional categories: + +* **accelerator modules**: these modules are completely self-contained, and + are created solely to run faster than the equivalent pure Python code + runs in CPython. Ideally, accelerator modules will always have a pure + Python equivalent to use as a fallback if the accelerated version isn't + available on a given system. The CPython standard library makes extensive + use of accelerator modules. + *Example*: When importing ``datetime``, Python falls back to the + `datetime.py `_ + module if the C implementation ( + `_datetimemodule.c `_) + is not available. +* **wrapper modules**: these modules are created to expose existing C interfaces + to Python code. They may either expose the underlying C interface directly, + or else expose a more "Pythonic" API that makes use of Python language + features to make the API easier to use. The CPython standard library makes + extensive use of wrapper modules. + *Example*: `functools.py `_ + is a Python module wrapper for + `_functoolsmodule.c `_. +* **low-level system access**: these modules are created to access lower level + features of the CPython runtime, the operating system, or the underlying + hardware. Through platform specific code, extension modules may achieve + things that aren't possible in pure Python code. A number of CPython + standard library modules are written in C in order to access interpreter + internals that aren't exposed at the language level. + *Example*: ``sys``, which comes from + `sysmodule.c `_. + + One particularly notable feature of C extensions is that, when they don't + need to call back into the interpreter runtime, they can release CPython's + global interpreter lock around long-running operations (regardless of + whether those operations are CPU or IO bound). + +Not all extension modules will fit neatly into the above categories. The +extension modules included with NumPy, for example, span all three use cases +- they move inner loops to C for speed reasons, wrap external libraries +written in C, FORTRAN and other languages, and use low level system +interfaces for both CPython and the underlying operation system to support +concurrent execution of vectorised operations and to tightly control the +exact memory layout of created objects. + + +Disadvantages +------------- + +The main disadvantage of using binary extensions is the fact that it makes +subsequent distribution of the software more difficult. One of the +advantages of using Python is that it is largely cross platform, and the +languages used to write extension modules (typically C or C++, but really +any language that can bind to the CPython C API) typically require that +custom binaries be created for different platforms. + +This means that binary extensions: + +* require that end users be able to either build them from source, or else + that someone publish pre-built binaries for common platforms + +* may not be compatible with different builds of the CPython reference + interpreter + +* often will not work correctly with alternative interpreters such as PyPy, + IronPython or Jython + +* if handcoded, make maintenance more difficult by requiring that + maintainers be familiar not only with Python, but also with the language + used to create the binary extension, as well as with the details of the + CPython C API. + +* if a pure Python fallback implementation is provided, make maintenance + more difficult by requiring that changes be implemented in two places, + and introducing additional complexity in the test suite to ensure both + versions are always executed. + +Another disadvantage of relying on binary extensions is that alternative +import mechanisms (such as the ability to import modules directly from +zipfiles) often won't work for extension modules (as the dynamic loading +mechanisms on most platforms can only load libraries from disk). + + +Alternatives to handcoded accelerator modules +--------------------------------------------- + +When extension modules are just being used to make code run faster (after +profiling has identified the code where the speed increase is worth +additional maintenance effort), a number of other alternatives should +also be considered: + +* look for existing optimised alternatives. The CPython standard library + includes a number of optimised data structures and algorithms (especially + in the builtins and the ``collections`` and ``itertools`` modules). The + Python Package Index also offers additional alternatives. Sometimes, the + appropriate choice of standard library or third party module can avoid the + need to create your own accelerator module. + +* for long running applications, the JIT compiled `PyPy interpreter + `__ may offer a suitable alternative to the standard + CPython runtime. The main barrier to adopting PyPy is typically reliance + on other binary extension modules - while PyPy does emulate the CPython + C API, modules that rely on that cause problems for the PyPy JIT, and the + emulation layer can often expose latent defects in extension modules that + CPython currently tolerates (frequently around reference counting errors - + an object having one live reference instead of two often won't break + anything, but no references instead of one is a major problem). + +* `Cython `__ is a mature static compiler that can + compile most Python code to C extension modules. The initial compilation + provides some speed increases (by bypassing the CPython interpreter layer), + and Cython's optional static typing features can offer additional + opportunities for speed increases. Using Cython still carries the + `disadvantages`_ associated with using binary extensions, + but has the benefit of having a reduced barrier to entry for Python + programmers (relative to other languages like C or C++). + +* `Numba `__ is a newer tool, created by members + of the scientific Python community, that aims to leverage LLVM to allow + selective compilation of pieces of a Python application to native + machine code at runtime. It requires that LLVM be available on the + system where the code is running, but can provide significant speed + increases, especially for operations that are amenable to vectorisation. + + +Alternatives to handcoded wrapper modules +----------------------------------------- + +The C ABI (Application Binary Interface) is a common standard for sharing +functionality between multiple applications. One of the strengths of the +CPython C API (Application Programming Interface) is allowing Python users +to tap into that functionality. However, wrapping modules by hand is quite +tedious, so a number of other alternative approaches should be considered. + +The approaches described below don't simplify the distribution case at all, +but they *can* significantly reduce the maintenance burden of keeping +wrapper modules up to date. + +* In addition to being useful for the creation of accelerator modules, + `Cython `__ is also widely used for creating wrapper + modules for C or C++ APIs. It involves wrapping the interfaces by + hand, which gives a wide range of freedom in designing and optimising + the wrapper code, but may not be a good choice for wrapping very + large APIs quickly. See the + `list of third-party tools `_ + for automatic wrapping with Cython. It also supports performance-oriented + Python implementations that provide a CPython-like C-API, such as PyPy + and Pyston. + +* :doc:`pybind11 ` is a pure C++11 library + that provides a clean C++ interface to the CPython (and PyPy) C API. It + does not require a pre-processing step; it is written entirely in + templated C++. Helpers are included for Setuptools or CMake builds. It + was based on `Boost.Python `__, + but doesn't require the Boost libraries or BJam. + +* :doc:`cffi ` is a project created by some of the PyPy + developers to make it straightforward for developers that already know + both Python and C to expose their C modules to Python applications. It + also makes it relatively straightforward to wrap a C module based on its + header files, even if you don't know C yourself. + + One of the key advantages of ``cffi`` is that it is compatible with the + PyPy JIT, allowing CFFI wrapper modules to participate fully in PyPy's + tracing JIT optimisations. + +* `SWIG `__ is a wrapper interface generator that + allows a variety of programming languages, including Python, to interface + with C and C++ code. + +* The standard library's ``ctypes`` module, while useful for getting access + to C level interfaces when header information isn't available, suffers + from the fact that it operates solely at the C ABI level, and thus has + no automatic consistency checking between the interface actually being + exported by the library and the one declared in the Python code. By + contrast, the above alternatives are all able to operate at the C *API* + level, using C header files to ensure consistency between the interface + exported by the library being wrapped and the one expected by the Python + wrapper module. While ``cffi`` *can* operate directly at the C ABI level, + it suffers from the same interface inconsistency problems as ``ctypes`` + when it is used that way. + + +Alternatives for low level system access +---------------------------------------- + +For applications that need low level system access (regardless of the +reason), a binary extension module often *is* the best way to go about it. +This is particularly true for low level access to the CPython runtime +itself, since some operations (like releasing the Global Interpreter Lock) +are simply invalid when the interpreter is running code, even if a module +like ``ctypes`` or ``cffi`` is used to obtain access to the relevant C +API interfaces. + +For cases where the extension module is manipulating the underlying +operating system or hardware (rather than the CPython runtime), it may +sometimes be better to just write an ordinary C library (or a library in +another systems programming language like C++ or Rust that can export a C +compatible ABI), and then use one of the wrapping techniques described +above to make the interface available as an importable Python module. + + +Implementing binary extensions +============================== + +The CPython :doc:`Extending and Embedding ` +guide includes an introduction to writing a +:doc:`custom extension module in C `. + +FIXME: Elaborate that all this is one of the reasons why you probably +*don't* want to handcode your extension modules :) + + +Extension module lifecycle +-------------------------- + +FIXME: This section needs to be fleshed out. + + +Implications of shared static state and subinterpreters +------------------------------------------------------- + +FIXME: This section needs to be fleshed out. + + +Implications of the GIL +----------------------- + +FIXME: This section needs to be fleshed out. + + +Memory allocation APIs +---------------------- + +FIXME: This section needs to be fleshed out. + + +.. _cpython-stable-abi: + +ABI Compatibility +----------------- + +The CPython C API does not guarantee ABI stability between minor releases +(3.2, 3.3, 3.4, etc.). This means that, typically, if you build an +extension module against one version of Python, it is only guaranteed to +work with the same minor version of Python and not with any other minor +versions. + +Python 3.2 introduced the Limited API, with is a well-defined subset of +Python's C API. The symbols needed for the Limited API form the +"Stable ABI" which is guaranteed to be compatible across all Python 3.x +versions. Wheels containing extensions built against the stable ABI use +the ``abi3`` ABI tag, to reflect that they're compatible with all Python +3.x versions. + +CPython's :doc:`C API stability` page provides +detailed information about the API / ABI stability guarantees, how to use +the Limited API and the exact contents of the "Limited API". + + +Building binary extensions +========================== + +FIXME: Cover the build-backends available for building extensions. + +Building extensions for multiple platforms +------------------------------------------ + +If you plan to distribute your extension, you should provide +:term:`wheels ` for all the platforms you intend to support. These +are usually built on continuous integration (CI) systems. There are tools +to help you build highly redistributable binaries from CI; these include +:ref:`cibuildwheel` and :ref:`multibuild`. + +For most extensions, you will need to build wheels for all the platforms +you intend to support. This means that the number of wheels you need to +build is the product of:: + + count(Python minor versions) * count(OS) * count(architectures) + +Using CPython's :ref:`Stable ABI ` can help significantly +reduce the number of wheels you need to provide, since a single wheel on a +platform can be used with all Python minor versions; eliminating one dimension +of the matrix. It also removes the need to generate new wheels for each new +minor version of Python. + +Binary extensions for Windows +----------------------------- + +Before it is possible to build a binary extension, it is necessary to ensure +that you have a suitable compiler available. On Windows, Visual C is used to +build the official CPython interpreter, and should be used to build compatible +binary extensions. To set up a build environment for binary extensions, install +`Visual Studio Community Edition `__ +- any recent version is fine. + +One caveat: if you use Visual Studio 2019 or later, your extension will depend +on an "extra" file, ``VCRUNTIME140_1.dll``, in addition to the +``VCRUNTIME140.dll`` that all previous versions back to 2015 depend on. This +will add an extra requirement to using your extension on versions of CPython +that do not include this extra file. To avoid this, you can add the +compile-time argument ``/d2FH4-``. Recent versions of Python may include this +file. + +Building for Python prior to 3.5 is discouraged, because older versions of +Visual Studio are no longer available from Microsoft. If you do need to build +for older versions, you can set ``DISTUTILS_USE_SDK=1`` and ``MSSdk=1`` to +force a the currently activated version of MSVC to be found, and you should +exercise care when designing your extension not to malloc/free memory across +different libraries, avoid relying on changed data structures, and so on. Tools +for generating extension modules usually avoid these things for you. + + + +Binary extensions for Linux +--------------------------- + +Linux binaries must use a sufficiently old glibc to be compatible with older +distributions. The `manylinux `_ Docker +images provide a build environment with a glibc old enough to support most +current Linux distributions on common architectures. + +Binary extensions for macOS +--------------------------- + +Binary compatibility on macOS is determined by the target minimum deployment +system, e.g. *10.9*, which is often specified with the +``MACOSX_DEPLOYMENT_TARGET`` environmental variable when building binaries on +macOS. When building with setuptools / distutils, the deployment target is +specified with the flag ``--plat-name``, e.g. ``macosx-10.9-x86_64``. For +common deployment targets for macOS Python distributions, see the `MacPython +Spinning Wheels wiki +`_. + +Publishing binary extensions +============================ + +Publishing binary extensions through PyPI uses the same upload mechanisms as +publishing pure Python packages. You build a wheel file for your extension +using the build-backend and upload it to PyPI using +:doc:`twine `. + +Avoid binary-only releases +-------------------------- + +It is strongly recommended that you publish your binary extensions as +well as the source code that was used to build them. This allows users to +build the extension from source if they need to. Notably, this is required +for certain Linux distributions that build from source within their +own build systems for the distro package repositories. + +Weak linking +------------ + +FIXME: This section needs to be fleshed out. + +Additional resources +==================== + +Cross-platform development and distribution of extension modules is a complex topic, +so this guide focuses primarily on providing pointers to various tools that automate +dealing with the underlying technical challenges. The additional resources in this +section are instead intended for developers looking to understand more about the +underlying binary interfaces that those systems rely on at runtime. + +Cross-platform wheel generation with scikit-build +------------------------------------------------- + +The `scikit-build `_ package +helps abstract cross-platform build operations and provides additional capabilities +when creating binary extension packages. Additional documentation is also available on +the `C runtime, compiler, and build system generator +`_ for Python +binary extension modules. + +Introduction to C/C++ extension modules +--------------------------------------- + +For a more in depth explanation of how extension modules are used by CPython on +a Debian system, see the following articles: + +* `What are (c)python extension modules? `_ +* `Releasing the gil `_ +* `Writing cpython extension modules using C++ `_ + +Additional considerations for binary wheels +------------------------------------------- + +The `pypackaging-native `_ website has +additional coverage of packaging Python packages with native code. It aims to +provide an overview of the most important packaging issues for such projects, +with in-depth explanations and references. + +Examples of topics covered are non-Python compiled dependencies ("native +dependencies"), the importance of the ABI (Application Binary Interface) of +native code, dependency on SIMD code and cross compilation. diff --git a/source/guides/packaging-namespace-packages.rst b/source/guides/packaging-namespace-packages.rst new file mode 100644 index 000000000..3d929d527 --- /dev/null +++ b/source/guides/packaging-namespace-packages.rst @@ -0,0 +1,291 @@ +.. _packaging-namespace-packages: + +============================ +Packaging namespace packages +============================ + +Namespace packages allow you to split the sub-packages and modules within a +single :term:`package ` across multiple, separate +:term:`distribution packages ` (referred to as +**distributions** in this document to avoid ambiguity). For example, if you +have the following package structure: + +.. code-block:: text + + mynamespace/ + __init__.py + subpackage_a/ + __init__.py + ... + subpackage_b/ + __init__.py + ... + module_b.py + pyproject.toml + +And you use this package in your code like so:: + + from mynamespace import subpackage_a + from mynamespace import subpackage_b + +Then you can break these sub-packages into two separate distributions: + +.. code-block:: text + + mynamespace-subpackage-a/ + pyproject.toml + src/ + mynamespace/ + subpackage_a/ + __init__.py + + mynamespace-subpackage-b/ + pyproject.toml + src/ + mynamespace/ + subpackage_b/ + __init__.py + module_b.py + +Each sub-package can now be separately installed, used, and versioned. + +Namespace packages can be useful for a large collection of loosely-related +packages (such as a large corpus of client libraries for multiple products from +a single company). However, namespace packages come with several caveats and +are not appropriate in all cases. A simple alternative is to use a prefix on +all of your distributions such as ``import mynamespace_subpackage_a`` (you +could even use ``import mynamespace_subpackage_a as subpackage_a`` to keep the +import object short). + + +Creating a namespace package +============================ + +There are currently two different approaches to creating namespace packages, +from which the latter is discouraged: + +#. Use `native namespace packages`_. This type of namespace package is defined + in :pep:`420` and is available in Python 3.3 and later. This is recommended if + packages in your namespace only ever need to support Python 3 and + installation via ``pip``. +#. Use `legacy namespace packages`_. This comprises `pkgutil-style namespace packages`_ + and `pkg_resources-style namespace packages`_. + +Native namespace packages +------------------------- + +Python 3.3 added **implicit** namespace packages from :pep:`420`. All that is +required to create a native namespace package is that you just omit +:file:`__init__.py` from the namespace package directory. An example file +structure (following :ref:`src-layout `): + +.. code-block:: text + + mynamespace-subpackage-a/ + pyproject.toml # AND/OR setup.py, setup.cfg + src/ + mynamespace/ # namespace package + # No __init__.py here. + subpackage_a/ + # Regular import packages have an __init__.py. + __init__.py + module.py + +It is extremely important that every distribution that uses the namespace +package omits the :file:`__init__.py` or uses a pkgutil-style +:file:`__init__.py`. If any distribution does not, it will cause the namespace +logic to fail and the other sub-packages will not be importable. + +The ``src-layout`` directory structure allows automatic discovery of packages +by most :term:`build backends `. See :ref:`src-layout-vs-flat-layout` +for more information. If however you want to manage exclusions or inclusions of packages +yourself, this is possible to be configured in the top-level :file:`pyproject.toml`: + +.. code-block:: toml + + [build-system] + ... + + [tool.setuptools.packages.find] + where = ["src/"] + include = ["mynamespace.subpackage_a"] + + [project] + name = "mynamespace-subpackage-a" + ... + +The same can be accomplished with a :file:`setup.cfg`: + +.. code-block:: ini + + [options] + package_dir = + =src + packages = find_namespace: + + [options.packages.find] + where = src + +Or :file:`setup.py`: + +.. code-block:: python + + from setuptools import setup, find_namespace_packages + + setup( + name='mynamespace-subpackage-a', + ... + packages=find_namespace_packages(where='src/', include=['mynamespace.subpackage_a']), + package_dir={'': 'src'}, + ) + +:ref:`setuptools` will search the directory structure for implicit namespace +packages by default. + +A complete working example of two native namespace packages can be found in +the `native namespace package example project`_. + +.. _native namespace package example project: + https://github.com/pypa/sample-namespace-packages/tree/master/native + +.. note:: Because native and pkgutil-style namespace packages are largely + compatible, you can use native namespace packages in the distributions that + only support Python 3 and pkgutil-style namespace packages in the + distributions that need to support Python 2 and 3. + + +Legacy namespace packages +------------------------- + +These two methods, that were used to create namespace packages prior to :pep:`420`, +are now considered to be obsolete and should not be used unless you need compatibility +with packages already using this method. Also, :doc:`pkg_resources ` +has been deprecated. + +To migrate an existing package, all packages sharing the namespace must be migrated simultaneously. + +.. warning:: While native namespace packages and pkgutil-style namespace + packages are largely compatible, pkg_resources-style namespace packages + are not compatible with the other methods. It's inadvisable to use + different methods in different distributions that provide packages to the + same namespace. + +pkgutil-style namespace packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Python 2.3 introduced the :doc:`pkgutil ` module and the +:py:func:`python:pkgutil.extend_path` function. This can be used to declare namespace +packages that need to be compatible with both Python 2.3+ and Python 3. This +is the recommended approach for the highest level of compatibility. + +To create a pkgutil-style namespace package, you need to provide an +:file:`__init__.py` file for the namespace package: + +.. code-block:: text + + mynamespace-subpackage-a/ + src/ + pyproject.toml # AND/OR setup.cfg, setup.py + mynamespace/ + __init__.py # Namespace package __init__.py + subpackage_a/ + __init__.py # Regular package __init__.py + module.py + +The :file:`__init__.py` file for the namespace package needs to contain +the following: + +.. code-block:: python + + __path__ = __import__('pkgutil').extend_path(__path__, __name__) + +**Every** distribution that uses the namespace package must include such +an :file:`__init__.py`. If any distribution does not, it will cause the +namespace logic to fail and the other sub-packages will not be importable. Any +additional code in :file:`__init__.py` will be inaccessible. + +A complete working example of two pkgutil-style namespace packages can be found +in the `pkgutil namespace example project`_. + +.. _extend_path: + https://docs.python.org/3/library/pkgutil.html#pkgutil.extend_path +.. _pkgutil namespace example project: + https://github.com/pypa/sample-namespace-packages/tree/master/pkgutil + + +pkg_resources-style namespace packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:doc:`Setuptools ` provides the `pkg_resources.declare_namespace`_ function and +the ``namespace_packages`` argument to :func:`~setuptools.setup`. Together +these can be used to declare namespace packages. While this approach is no +longer recommended, it is widely present in most existing namespace packages. +If you are creating a new distribution within an existing namespace package that +uses this method then it's recommended to continue using this as the different +methods are not cross-compatible and it's not advisable to try to migrate an +existing package. + +To create a pkg_resources-style namespace package, you need to provide an +:file:`__init__.py` file for the namespace package: + +.. code-block:: text + + mynamespace-subpackage-a/ + src/ + pyproject.toml # AND/OR setup.cfg, setup.py + mynamespace/ + __init__.py # Namespace package __init__.py + subpackage_a/ + __init__.py # Regular package __init__.py + module.py + +The :file:`__init__.py` file for the namespace package needs to contain +the following: + +.. code-block:: python + + __import__('pkg_resources').declare_namespace(__name__) + +**Every** distribution that uses the namespace package must include such an +:file:`__init__.py`. If any distribution does not, it will cause the +namespace logic to fail and the other sub-packages will not be importable. Any +additional code in :file:`__init__.py` will be inaccessible. + +.. note:: Some older recommendations advise the following in the namespace + package :file:`__init__.py`: + + .. code-block:: python + + try: + __import__('pkg_resources').declare_namespace(__name__) + except ImportError: + __path__ = __import__('pkgutil').extend_path(__path__, __name__) + + The idea behind this was that in the rare case that setuptools isn't + available packages would fall-back to the pkgutil-style packages. This + isn't advisable because pkgutil and pkg_resources-style namespace packages + are not cross-compatible. If the presence of setuptools is a concern + then the package should just explicitly depend on setuptools via + ``install_requires``. + +Finally, every distribution must provide the ``namespace_packages`` argument +to :func:`~setuptools.setup` in :file:`setup.py`. For example: + +.. code-block:: python + + from setuptools import find_packages, setup + + setup( + name='mynamespace-subpackage-a', + ... + packages=find_packages() + namespace_packages=['mynamespace'] + ) + +A complete working example of two pkg_resources-style namespace packages can be found +in the `pkg_resources namespace example project`_. + +.. _pkg_resources.declare_namespace: + https://setuptools.readthedocs.io/en/latest/pkg_resources.html#namespace-package-support +.. _pkg_resources namespace example project: + https://github.com/pypa/sample-namespace-packages/tree/master/pkg_resources diff --git a/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst b/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst new file mode 100644 index 000000000..1ee562cf7 --- /dev/null +++ b/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst @@ -0,0 +1,233 @@ +.. _trusted-publishing: + +============================================================================= +Publishing package distribution releases using GitHub Actions CI/CD workflows +============================================================================= + +`GitHub Actions CI/CD`_ allows you to run a series of commands +whenever an event occurs on the GitHub platform. One +popular choice is having a workflow that's triggered by a +``push`` event. +This guide shows you how to publish a Python distribution +whenever a tagged commit is pushed. +It will use the `pypa/gh-action-pypi-publish GitHub Action`_ for +publishing. It also uses GitHub's `upload-artifact`_ and `download-artifact`_ actions +for temporarily storing and downloading the source packages. + +.. attention:: + + This guide *assumes* that you already have a project that you know how to + build distributions for and *it lives on GitHub*. This guide also avoids + details of building platform specific projects. If you have binary + components, check out :ref:`cibuildwheel`'s GitHub Action examples. + +Configuring Trusted Publishing +============================== + +This guide relies on PyPI's `Trusted Publishing`_ implementation to connect +to `GitHub Actions CI/CD`_. This is recommended for security reasons, since +the generated tokens are created for each of your projects +individually and expire automatically. Otherwise, you'll need to generate an +`API token`_ for both PyPI and TestPyPI. In case of publishing to third-party +indexes like :doc:`devpi `, you may need to provide a +username/password combination. + +Since this guide will demonstrate uploading to both +PyPI and TestPyPI, we'll need two trusted publishers configured. +The following steps will lead you through creating the "pending" publishers +for your new :term:`PyPI project `. +However it is also possible to add `Trusted Publishing`_ to any +pre-existing project, if you are its owner. + +.. attention:: + + If you followed earlier versions of this guide, you + have created the secrets ``PYPI_API_TOKEN`` and ``TEST_PYPI_API_TOKEN`` + for direct PyPI and TestPyPI access. These are obsolete now and + you should remove them from your GitHub repository and revoke + them in your PyPI and TestPyPI account settings in case you are replacing your old setup with the new one. + + +Let's begin! 🚀 + +1. Go to https://pypi.org/manage/account/publishing/. +2. Fill in the name you wish to publish your new + :term:`PyPI project ` under + (the ``name`` value in your ``setup.cfg`` or ``pyproject.toml``), + the GitHub repository owner's name (org or user), + and repository name, and the name of the release workflow file under + the ``.github/`` folder, see :ref:`workflow-definition`. + Finally, add the name of the GitHub Environment + (``pypi``) we're going set up under your repository. + Register the trusted publisher. +3. Now, go to https://test.pypi.org/manage/account/publishing/ and repeat + the second step, but this time, enter ``testpypi`` as the name of the + GitHub Environment. +4. Your "pending" publishers are now ready for their first use and will + create your projects automatically once you use them + for the first time. + + .. note:: + + If you don't have a TestPyPI account, you'll need to + create it. It's not the same as a regular PyPI account. + + + .. attention:: + + For security reasons, you must require `manual approval `_ + on each run for the ``pypi`` environment. + + +.. _workflow-definition: + +Creating a workflow definition +============================== + +GitHub CI/CD workflows are declared in YAML files stored in the +``.github/workflows/`` directory of your repository. + +Let's create a ``.github/workflows/publish-to-test-pypi.yml`` +file. + +Start it with a meaningful name and define the event that +should make GitHub run this workflow: + +.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + :end-before: jobs: + +Checking out the project and building distributions +=================================================== + +We will have to define two jobs to publish to PyPI +and TestPyPI respectively, and an additional job to +build the distribution packages. + +First, we'll define the job for building the dist packages of +your project and storing them for later use: + +.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + :start-at: jobs: + :end-before: Install pypa/build + +This will download your repository into the CI runner and then +install and activate the newest available Python 3 release. + +And now we can build the dists from source and store them. +In this example, we'll use the ``build`` package. +So add this to the steps list: + +.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + :start-at: Install pypa/build + :end-before: publish-to-pypi + +Defining a workflow job environment +=================================== + +Now, let's add initial setup for our job that will publish to PyPI. +It's a process that will execute commands that we'll define later. +In this guide, we'll use the latest stable Ubuntu LTS version +provided by GitHub Actions. This also defines a GitHub Environment +for the job to run in its context and a URL to be displayed in GitHub's +UI nicely. Additionally, it allows acquiring an OpenID Connect token +that the ``pypi-publish`` actions needs to implement secretless +Trusted Publishing to PyPI. + +.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + :start-after: path: dist/ + :end-before: steps: + +This will also ensure that the PyPI publishing workflow is only triggered +if the current commit is tagged. + +Publishing the distribution to PyPI +=================================== + +Finally, add the following steps at the end: + +.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + :start-after: id-token: write + :end-before: publish-to-testpypi: + +This step uses the `pypa/gh-action-pypi-publish`_ GitHub +Action: after the stored distribution package has been +downloaded by the `download-artifact`_ action, it uploads +the contents of the ``dist/`` folder into PyPI unconditionally. + +.. tip:: + + Starting with version + `v1.11.0 `_, + `pypa/gh-action-pypi-publish`_ generates and uploads :pep:`740`-compatible + attestations for each distribution by default. No additional manual + signing steps are required. + + +Separate workflow for publishing to TestPyPI +============================================ + +Now, repeat these steps and create another job for +publishing to the TestPyPI package index under the ``jobs`` +section: + +.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + :start-at: publish-to-testpypi + +.. tip:: + + Requiring manual approvals in the ``testpypi`` GitHub Environment is typically unnecessary as it's designed to run on each commit to the main branch and is often used to indicate a healthy release publishing pipeline. + + +The whole CI/CD workflow +======================== + +This paragraph showcases the whole workflow after following the above guide. + +.. collapse:: Click here to display the entire GitHub Actions CI/CD workflow definition + + .. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + :language: yaml + +That's all, folks! +================== + +Now, whenever you push a tagged commit to your Git repository remote +on GitHub, this workflow will publish it to PyPI. +And it'll publish any push to TestPyPI which is useful for +providing test builds to your alpha users as well as making +sure that your release pipeline remains healthy! + +.. attention:: + + If your repository has frequent commit activity and every push is uploaded + to TestPyPI as described, the project might exceed the + `PyPI project size limit `_. + The limit could be increased, but a better solution may constitute to + use a PyPI-compatible server like :ref:`pypiserver` in the CI for testing purposes. + +.. note:: + + It is recommended to keep the integrated GitHub Actions at their latest + versions, updating them frequently. + + +.. _API token: https://pypi.org/help/#apitoken +.. _GitHub Actions CI/CD: https://github.com/features/actions +.. _join the waitlist: https://github.com/features/actions/signup +.. _pypa/gh-action-pypi-publish: + https://github.com/pypa/gh-action-pypi-publish +.. _`pypa/gh-action-pypi-publish GitHub Action`: + https://github.com/marketplace/actions/pypi-publish +.. _`download-artifact`: + https://github.com/actions/download-artifact +.. _`upload-artifact`: + https://github.com/actions/upload-artifact +.. _Secrets: + https://docs.github.com/en/actions/reference/encrypted-secrets +.. _Trusted Publishing: https://docs.pypi.org/trusted-publishers/ diff --git a/source/guides/section-build-and-publish.rst b/source/guides/section-build-and-publish.rst new file mode 100644 index 000000000..52f827553 --- /dev/null +++ b/source/guides/section-build-and-publish.rst @@ -0,0 +1,19 @@ +======================= +Building and Publishing +======================= + +.. toctree:: + :titlesonly: + + writing-pyproject-toml + distributing-packages-using-setuptools + dropping-older-python-versions + packaging-binary-extensions + packaging-namespace-packages + creating-command-line-tools + creating-and-discovering-plugins + using-testpypi + making-a-pypi-friendly-readme + publishing-package-distribution-releases-using-github-actions-ci-cd-workflows + modernize-setup-py-project + licensing-examples-and-user-scenarios diff --git a/source/guides/section-hosting.rst b/source/guides/section-hosting.rst new file mode 100644 index 000000000..f8f708c18 --- /dev/null +++ b/source/guides/section-hosting.rst @@ -0,0 +1,9 @@ +======= +Hosting +======= + +.. toctree:: + :titlesonly: + + index-mirrors-and-caches + hosting-your-own-index diff --git a/source/guides/section-install.rst b/source/guides/section-install.rst new file mode 100644 index 000000000..515092b4d --- /dev/null +++ b/source/guides/section-install.rst @@ -0,0 +1,12 @@ +============ +Installation +============ + +.. toctree:: + :titlesonly: + + installing-using-pip-and-virtual-environments + installing-using-virtualenv + installing-stand-alone-command-line-tools + installing-using-linux-tools + installing-scientific-packages diff --git a/source/guides/single-sourcing-package-version.rst b/source/guides/single-sourcing-package-version.rst new file mode 100644 index 000000000..7ed3d87da --- /dev/null +++ b/source/guides/single-sourcing-package-version.rst @@ -0,0 +1,8 @@ +:orphan: + +.. meta:: + :http-equiv=refresh: 0; url=../../discussions/single-source-version/ + +Redirecting stale single-source package version link... + +If the page doesn't automatically refresh, see :ref:`single-source-version`. diff --git a/source/multiple_python_versions.rst b/source/guides/supporting-multiple-python-versions.rst similarity index 82% rename from source/multiple_python_versions.rst rename to source/guides/supporting-multiple-python-versions.rst index 85730cfd6..8c128ed91 100644 --- a/source/multiple_python_versions.rst +++ b/source/guides/supporting-multiple-python-versions.rst @@ -1,15 +1,14 @@ +:orphan: + .. _`Supporting multiple Python versions`: =================================== Supporting multiple Python versions =================================== -:Page Status: Incomplete +:Page Status: Obsolete :Last Reviewed: 2014-12-24 -.. contents:: Contents - :local: - :: @@ -45,12 +44,12 @@ correctly on all the target Python versions (and OSs!) could be very time-consuming. Fortunately, several tools are available for dealing with this, and these will briefly be discussed here. -Automated Testing and Continuous Integration +Automated testing and continuous integration -------------------------------------------- Several hosted services for automated testing are available. These services will typically monitor your source code repository (e.g. at -`Github `_ or `Bitbucket `_) +`GitHub `_ or `Bitbucket `_) and run your project's test suite every time a new commit is made. These services also offer facilities to run your project's test suite on @@ -58,15 +57,15 @@ These services also offer facilities to run your project's test suite on will work, without the developer having to perform such tests themselves. Wikipedia has an extensive `comparison -`_ +`_ of many continuous-integration systems. There are two hosted services which when used in conjunction provide automated testing across Linux, Mac and Windows: - - `Travis CI `_ provides both a Linux and a Mac OSX + - `Travis CI `_ provides both a Linux and a macOS environment. The Linux environment is Ubuntu 12.04 LTS Server Edition 64 bit - while the OSX is 10.9.2 at the time of writing. - - `Appveyor `_ provides a Windows environment + while the macOS is 10.9.2 at the time of writing. + - `Appveyor `_ provides a Windows environment (Windows Server 2012). :: @@ -74,11 +73,11 @@ Windows: TODO Either link to or provide example .yml files for these two services. - TODO How do we keep the Travis Linux and OSX versions up-to-date in this + TODO How do we keep the Travis Linux and macOS versions up-to-date in this document? Both `Travis CI`_ and Appveyor_ require a `YAML -`_-formatted file as specification for the instructions +`_-formatted file as specification for the instructions for testing. If any tests fail, the output log for that specific configuration can be inspected. @@ -88,12 +87,12 @@ with a single-source strategy, there are a number of options. Tools for single-source Python packages ---------------------------------------- -`six `_ is a tool developed by Benjamin Peterson +`six `_ is a tool developed by Benjamin Peterson for wrapping over the differences between Python 2 and Python 3. The six_ package has enjoyed widespread use and may be regarded as a reliable way to -write a single-source python module that can be use in both Python 2 and 3. +write a single-source Python module that can be use in both Python 2 and 3. The six_ module can be used from as early as Python 2.5. A tool called -`modernize `_, developed by Armin +`modernize `_, developed by Armin Ronacher, can be used to automatically apply the code modifications provided by six_. @@ -120,13 +119,13 @@ package: with python-future_, the ``futurize`` script can be called with the provides for forward-compatibility to Python 3. Any remaining compatibility problems would require manual changes. -What's in Which Python? +What's in which Python? ----------------------- Ned Batchelder provides a list of changes in each Python release for -`Python 2 `__ -and separately -for `Python 3 `__. +`Python 2 `__, +`Python 3.0-3.3 `__ and +`Python 3.4-3.6 `__. These lists may be used to check whether any changes between Python versions may affect your package. diff --git a/source/appveyor.rst b/source/guides/supporting-windows-using-appveyor.rst similarity index 75% rename from source/appveyor.rst rename to source/guides/supporting-windows-using-appveyor.rst index f749390bc..0044d8c5e 100644 --- a/source/appveyor.rst +++ b/source/guides/supporting-windows-using-appveyor.rst @@ -1,8 +1,10 @@ +:orphan: + ================================= Supporting Windows using Appveyor ================================= -:Page Status: Incomplete +:Page Status: Obsolete :Last Reviewed: 2015-12-03 This section covers how to use the free `Appveyor`_ continuous integration @@ -10,9 +12,6 @@ service to provide Windows support for your project. This includes testing the code on Windows, and building Windows-targeted binaries for projects that use C extensions. -.. contents:: Contents - :local: - Background ========== @@ -23,26 +22,26 @@ non-trivial, and may require buying software licenses. The Appveyor service is a continuous integration service, much like the better-known `Travis`_ service that is commonly used for testing by projects -hosted on `Github`_. However, unlike Travis, the build workers on Appveyor are +hosted on `GitHub`_. However, unlike Travis, the build workers on Appveyor are Windows hosts and have the necessary compilers installed to build Python extensions. Windows users typically do not have access to a C compiler, and therefore are reliant on projects that use C extensions distributing binary wheels on PyPI in -order for the distribution to be installable via ``pip install ``. By +order for the distribution to be installable via ``python -m pip install ``. By using Appveyor as a build service (even if not using it for testing) it is possible for projects without a dedicated Windows environment to provide Windows-targeted binaries. -Setting Up +Setting up ========== In order to use Appveyor to build Windows wheels for your project, you must have an account on the service. Instructions on setting up an account are given -in `the Appveyor documentation `__. The free tier +in `the Appveyor documentation `__. The free tier of account is perfectly adequate for open source projects. -Appveyor provides integration with `Github`_ and `Bitbucket`_, so as long as +Appveyor provides integration with `GitHub`_ and `Bitbucket`_, so as long as your project is hosted on one of those two services, setting up Appveyor integration is straightforward. @@ -54,9 +53,9 @@ Adding Appveyor support to your project ======================================= In order to define how Appveyor should build your project, you need to add an -``appveyor.yml`` file to your project. The full details of what can be included -in the file are covered in the Appveyor documentation. This guide will provide -the details necessary to set up wheel builds. +:file:`appveyor.yml` file to your project. The full details of what can be +included in the file are covered in the Appveyor documentation. This guide will +provide the details necessary to set up wheel builds. Appveyor includes by default all of the compiler toolchains needed to build extensions for Python. For Python 2.7, 3.5+ and 32-bit versions of 3.3 and 3.4, @@ -68,13 +67,13 @@ Visual Studio used includes 64-bit compilers with no additional setup). appveyor.yml ------------ -.. literalinclude:: code/appveyor.yml +.. literalinclude:: appveyor-sample/appveyor.yml :language: yaml :linenos: -This file can be downloaded from `here `__. +This file can be downloaded from `here `__. -The ``appveyor.yml`` file must be located in the root directory of your +The :file:`appveyor.yml` file must be located in the root directory of your project. It is in ``YAML`` format, and consists of a number of sections. The ``environment`` section is the key to defining the Python versions for @@ -101,10 +100,10 @@ The ``test_script`` section is where you will run your project's tests. The supplied file runs your test suite using ``setup.py test``. If you are only interested in building wheels, and not in running your tests on Windows, you can replace this section with a dummy command such as ``echo Skipped Tests``. -You may wish to use another test tool, such as ``nose`` or ``py.test``. Or you -may wish to use a test driver like ``tox`` - however if you are using ``tox`` -there are some additional configuration changes you will need to consider, -which are described below. +You may wish to use another test tool, such as ``nose`` or :file:`py.test`. Or +you may wish to use a test driver like ``tox`` - however if you are using +``tox`` there are some additional configuration changes you will need to +consider, which are described below. The ``after_test`` runs once your tests have completed, and so is where the wheels should be built. Assuming your project uses the recommended tools @@ -118,12 +117,12 @@ tests to fail on Windows, you can skip them as described above. Support script -------------- -The ``appveyor.yml`` file relies on a single support script, which sets up the -environment to use the SDK compiler for 64-bit builds on Python 3.3 and 3.4. -For projects which do not need a compiler, or which don't support 3.3 or 3.4 on -64-bit Windows, only the ``appveyor.yml`` file is needed. +The :file:`appveyor.yml` file relies on a single support script, which sets up +the environment to use the SDK compiler for 64-bit builds on Python 3.3 and +3.4. For projects which do not need a compiler, or which don't support 3.3 or +3.4 on 64-bit Windows, only the :file:`appveyor.yml` file is needed. -`build.cmd `__ +`build.cmd `__ is a Windows batch script that runs a single command in an environment with the appropriate compiler for the selected Python version. All you need to do is to set the single environment variable ``DISTUTILS_USE_SDK`` to a value of ``1`` @@ -143,13 +142,13 @@ of links, one of which is "Artifacts". That page will include a list of links to the wheels for that Python version / architecture. You can download those wheels and upload them to PyPI as part of your release process. -Additional Notes +Additional notes ================ Testing with tox ---------------- -Many projects use the `Tox`_ tool to run their tests. It ensures that tests +Many projects use the :doc:`Tox ` tool to run their tests. It ensures that tests are run in an isolated environment using the exact files that will be distributed by the project. @@ -172,10 +171,11 @@ other CI systems). - ``INCLUDE`` - ``LIB`` - The ``passenv`` option can be set in your ``tox.ini``, or if you prefer to avoid - adding Windows-specific settings to your general project files, it can be set by - setting the ``TOX_TESTENV_PASSENV`` environment variable. The supplied ``build.cmd`` - script does this by default whenever ``DISTUTILS_USE_SDK`` is set. + The ``passenv`` option can be set in your :file:`tox.ini`, or if you prefer + to avoid adding Windows-specific settings to your general project files, it + can be set by setting the ``TOX_TESTENV_PASSENV`` environment variable. The + supplied :file:`build.cmd` script does this by default whenever + ``DISTUTILS_USE_SDK`` is set. 2. When used interactively, ``tox`` allows you to run your tests against multiple environments (often, this means multiple Python versions). This feature is not as @@ -194,21 +194,22 @@ other CI systems). will be run with the configured interpreter. In order to support running under the ``py`` environment, it is possible that - projects with complex ``tox`` configurations might need to modify their ``tox.ini`` - file. Doing so is, however, outside the scope of this document. + projects with complex ``tox`` configurations might need to modify their + :file:`tox.ini` file. Doing so is, however, outside the scope of this + document. Automatically uploading wheels ------------------------------ It is possible to request Appveyor to automatically upload wheels. There is a -``deployment`` step available in ``appveyor.yml`` that can be used to (for +``deployment`` step available in :file:`appveyor.yml` that can be used to (for example) copy the built artifacts to a FTP site, or an Amazon S3 instance. Documentation on how to do this is included in the Appveyor guides. Alternatively, it would be possible to add a ``twine upload`` step to the -build. The supplied ``appveyor.yml`` does not do this, as it is not clear that -uploading new wheels after every commit is desirable (although some projects -may wish to do this). +build. The supplied :file:`appveyor.yml` does not do this, as it is not clear +that uploading new wheels after every commit is desirable (although some +projects may wish to do this). External dependencies --------------------- @@ -216,11 +217,12 @@ External dependencies The supplied scripts will successfully build any distribution that does not rely on 3rd party external libraries for the build. -It is possible to add steps to the ``appveyor.yml`` configuration (typically -in the "install" section) to download and/or build external libraries needed by -the distribution. And if needed, it is possible to add extra configuration for -the build to supply the location of these libraries to the compiler. However, -this level of configuration is beyond the scope of this document. +It is possible to add steps to the :file:`appveyor.yml` configuration +(typically in the "install" section) to download and/or build external +libraries needed by the distribution. And if needed, it is possible to add +extra configuration for the build to supply the location of these libraries to +the compiler. However, this level of configuration is beyond the scope of this +document. Support scripts @@ -228,14 +230,13 @@ Support scripts For reference, the SDK setup support script is listed here: -``code/build.cmd`` +``appveyor-sample/build.cmd`` -.. literalinclude:: code/build.cmd +.. literalinclude:: appveyor-sample/build.cmd :language: bat :linenos: -.. _Appveyor: http://www.appveyor.com/ +.. _Appveyor: https://www.appveyor.com/ .. _Travis: https://travis-ci.org/ -.. _Github: https://github.org/ +.. _GitHub: https://github.com .. _Bitbucket: https://bitbucket.org/ -.. _Tox: http://tox.testrun.org diff --git a/source/guides/tool-recommendations.rst b/source/guides/tool-recommendations.rst new file mode 100644 index 000000000..1ba36ed61 --- /dev/null +++ b/source/guides/tool-recommendations.rst @@ -0,0 +1,185 @@ +.. _`Tool Recommendations`: + +==================== +Tool recommendations +==================== + +The Python packaging landscape consists of many different tools. For many tasks, +the :term:`Python Packaging Authority ` +(PyPA, the working group which encompasses many packaging tools and +maintains this guide) purposefully does not make a blanket recommendation; for +example, the reason there are many build backends is that the landscape was +opened up in order to enable the development of new backends serving certain users' +needs better than the previously unique backend, setuptools. This guide does +point to some tools that are widely recognized, and also makes some +recommendations of tools that you should *not* use because they are deprecated +or insecure. + + +Virtual environments +==================== + +The standard tools to create and use virtual environments manually are +:ref:`virtualenv` (PyPA project) and :doc:`venv ` (part of +the Python standard library, though missing some features of virtualenv). + + +Installing packages +=================== + +:ref:`Pip` is the standard tool to install packages from :term:`PyPI `. You may want to read pip's recommendations for +:doc:`secure installs `. Pip is available by default +in most Python installations through the standard library package +:doc:`ensurepip `. + +Alternatively, consider :ref:`pipx` for the specific use case of installing Python +applications that are distributed through PyPI and run from the command line. +Pipx is a wrapper around pip and venv that installs each +application into a dedicated virtual environment. This avoids conflicts between +the dependencies of different applications, and also with system-wide applications +making use of the same Python interpreter (especially on Linux). + +For scientific software specifically, consider :ref:`Conda` or :ref:`Spack`. + +.. todo:: Write a "pip vs. Conda" comparison, here or in a new discussion. + +Do **not** use ``easy_install`` (part of :ref:`setuptools`), which is deprecated +in favor of pip (see :ref:`pip vs easy_install` for details). Likewise, do +**not** use ``python setup.py install`` or ``python setup.py develop``, which +are also deprecated (see :ref:`setup-py-deprecated` for background and +:ref:`modernize-setup-py-project` for migration advice). + + +Lock files +========== + +:ref:`pip-tools` and :ref:`Pipenv` are two recognized tools to create lock +files, which contain the exact versions of all packages installed into an +environment, for reproducibility purposes. + + +Build backends +============== + +.. important:: + + Please, remember: this document does not seek to steer the reader towards + a particular tool, only to enumerate common tools. Different use cases often + need specialized workflows. + +Popular :term:`build backends ` for pure-Python packages include, +in alphabetical order: + +- :doc:`Flit-core ` -- developed with but separate from :ref:`Flit`. + A minimal and opinionated build backend. It does not support plugins. + +- Hatchling_ -- developed with but separate from :ref:`Hatch`. Supports plugins. + +- PDM-backend_ -- developed with but separate from :ref:`PDM`. Supports plugins. + +- Poetry-core_ -- developed with but separate from :ref:`Poetry`. Supports + plugins. + + Unlike other backends on this list, Poetry-core does not support the standard + :ref:`[project] table ` (it uses a different format, + in the ``[tool.poetry]`` table). + +- :ref:`setuptools`, which used to be the only build backend. Supports plugins. + + .. caution:: + + If you use setuptools, please be aware that some features that predate + standardisation efforts are now deprecated and only *temporarily kept* + for compatibility. + + In particular, do **not** use direct ``python setup.py`` invocations. On the + other hand, configuring setuptools with a :file:`setup.py` file is still fully + supported, although it is recommended to use the modern :ref:`[project] table + in pyproject.toml ` (or :file:`setup.cfg`) whenever possible and keep + :file:`setup.py` only if programmatic configuration is needed. See + :ref:`setup-py-deprecated`. + + Other examples of deprecated features you should **not** use include the + ``setup_requires`` argument to ``setup()`` (use the :ref:`[build-system] table + ` in :file:`pyproject.toml` instead), and + the ``easy_install`` command (cf. :ref:`pip vs easy_install`). + +Do **not** use :ref:`distutils`, which is deprecated, and has been removed from +the standard library in Python 3.12, although it still remains available from +setuptools. + +For packages with :term:`extension modules `, it is best to use +a build system with dedicated support for the language the extension is written in, +for example: + +- :ref:`setuptools` -- natively supports C and C++ (with third-party plugins for Go and Rust), +- :ref:`meson-python` -- C, C++, Fortran, Rust, and other languages supported by Meson, +- :ref:`scikit-build-core` -- C, C++, Fortran, and other languages supported by CMake, +- :ref:`maturin` -- Rust, via Cargo. + + +Building distributions +====================== + +The standard tool to build :term:`source distributions ` and :term:`wheels ` for uploading to PyPI is :ref:`build`. It +will invoke whichever build backend you :ref:`declared +` in :file:`pyproject.toml`. + +Do **not** use ``python setup.py sdist`` and ``python setup.py bdist_wheel`` for +this task. All direct invocations of :file:`setup.py` are :ref:`deprecated +`. + +If you have :term:`extension modules ` and want to distribute +wheels for multiple platforms, use :ref:`cibuildwheel` as part of your CI setup +to build distributable wheels. + + +Uploading to PyPI +================= + +For projects hosted on or published via supported CI/CD platforms, it is +recommended to use the :ref:`Trusted Publishing `, which +allows the package to be securely uploaded to PyPI from a CI/CD workflow +without a manually configured API token. + +As of November 2024, PyPI supports the following platforms as Trusted Publishing +providers: + +* GitHub Actions (on ``https://github.com``) +* GitLab CI/CD (on ``https://gitlab.com``) +* ActiveState +* Google Cloud + +The other available method is to upload the package manually using :ref:`twine`. + +.. danger:: + + **Never** use ``python setup.py upload`` for this task. In addition to being + :ref:`deprecated `, it is insecure. + + +Workflow tools +============== + +These tools are environment managers that automatically manage virtual +environments for a project. They also act as "task runners", allowing you to +define and invoke tasks such as running tests, compiling documentation, +regenerating some files, etc. Some of them provide shortcuts for building +distributions and uploading to PyPI, and some support lock files for +applications. They often call the tools mentioned above under the hood. In +alphabetical order: + +- :ref:`Flit`, +- :ref:`Hatch`, +- :doc:`nox `, +- :ref:`PDM`, +- :ref:`Pipenv`, +- :ref:`Poetry`, +- :doc:`tox `. + + +.. _hatchling: https://pypi.org/project/hatchling/ +.. _pdm-backend: https://backend.pdm-project.org +.. _poetry-core: https://pypi.org/project/poetry-core/ diff --git a/source/guides/using-manifest-in.rst b/source/guides/using-manifest-in.rst new file mode 100644 index 000000000..24bf154a1 --- /dev/null +++ b/source/guides/using-manifest-in.rst @@ -0,0 +1,9 @@ +:orphan: + +============================================================ +Including files in source distributions with ``MANIFEST.in`` +============================================================ + +The information on this page has moved to +:doc:`setuptools:userguide/miscellaneous` in the setuptools +documentation. diff --git a/source/guides/using-testpypi.rst b/source/guides/using-testpypi.rst new file mode 100644 index 000000000..1dbc82130 --- /dev/null +++ b/source/guides/using-testpypi.rst @@ -0,0 +1,84 @@ +.. _using-test-pypi: + +============== +Using TestPyPI +============== + +``TestPyPI`` is a separate instance of the :term:`Python Package Index (PyPI)` +that allows you to try out the distribution tools and process without worrying +about affecting the real index. TestPyPI is hosted at +`test.pypi.org `_ + +Registering your account +------------------------ + +Because TestPyPI has a separate database from the live PyPI, you'll need a +separate user account specifically for TestPyPI. Go to +https://test.pypi.org/account/register/ to register your account. + +.. note:: The database for TestPyPI may be periodically pruned, so it is not + unusual for user accounts to be deleted. + + +Using TestPyPI with Twine +------------------------- + +You can upload your distributions to TestPyPI using :ref:`twine` by specifying +the ``--repository`` flag: + +.. code-block:: bash + + twine upload --repository testpypi dist/* + +You can see if your package has successfully uploaded by navigating to the URL +``https://test.pypi.org/project/`` where ``sampleproject`` is +the name of your project that you uploaded. It may take a minute or two for +your project to appear on the site. + +Using TestPyPI with pip +----------------------- + +You can tell :ref:`pip` to download packages from TestPyPI instead of PyPI by +specifying the ``--index-url`` flag: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --index-url https://test.pypi.org/simple/ your-package + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --index-url https://test.pypi.org/simple/ your-package + +If you want to allow pip to also download packages from PyPI, you can +specify ``--extra-index-url`` to point to PyPI. This is useful when the package +you're testing has dependencies: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ your-package + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ your-package + +Setting up TestPyPI in :file:`.pypirc` +-------------------------------------- + +If you want to avoid being prompted for your username and password every time, +you can configure TestPyPI in your :file:`$HOME/.pypirc`: + +.. code:: ini + + [testpypi] + username = __token__ + password = + +For more details, see the :ref:`specification ` for :file:`.pypirc`. diff --git a/source/guides/writing-pyproject-toml.rst b/source/guides/writing-pyproject-toml.rst new file mode 100644 index 000000000..318fe0d51 --- /dev/null +++ b/source/guides/writing-pyproject-toml.rst @@ -0,0 +1,593 @@ +.. _writing-pyproject-toml: + +=============================== +Writing your ``pyproject.toml`` +=============================== + +``pyproject.toml`` is a configuration file used by packaging tools, as +well as other tools such as linters, type checkers, etc. There are +three possible TOML tables in this file. + +- The ``[build-system]`` table is **strongly recommended**. It allows + you to declare which :term:`build backend` you use and which other + dependencies are needed to build your project. + +- The ``[project]`` table is the format that most build backends use to specify + your project's basic metadata, such as the dependencies, your name, etc. + +- The ``[tool]`` table has tool-specific subtables, e.g., ``[tool.hatch]``, + ``[tool.black]``, ``[tool.mypy]``. We only touch upon this table here because + its contents are defined by each tool. Consult the particular tool's + documentation to know what it can contain. + +.. note:: + + The ``[build-system]`` table should always be present, + regardless of which build backend you use (``[build-system]`` *defines* the + build tool you use). + + On the other hand, the ``[project]`` table is understood by *most* build + backends, but some build backends use a different format. + + A notable exception is Poetry_, which before version 2.0 (released January + 5, 2025) did not use the ``[project]`` table, it used the ``[tool.poetry]`` + table instead. With version 2.0, it supports both. + Also, the setuptools_ build backend supports both the ``[project]`` table, + and the older format in ``setup.cfg`` or ``setup.py``. + + For new projects, use the ``[project]`` table, and keep ``setup.py`` only if + some programmatic configuration is needed (such as building C extensions), + but the ``setup.cfg`` and ``setup.py`` formats are still valid. See + :ref:`setup-py-deprecated`. + + +.. _pyproject-guide-build-system-table: + +Declaring the build backend +=========================== + +The ``[build-system]`` table contains a ``build-backend`` key, which specifies +the build backend to be used. It also contains a ``requires`` key, which is a +list of dependencies needed to build the project -- this is typically just the +build backend package, but it may also contain additional dependencies. You can +also constrain the versions, e.g., ``requires = ["setuptools >= 61.0"]``. + +Usually, you'll just copy what your build backend's documentation +suggests (after :ref:`choosing your build backend `). +Here are the values for some common build backends: + +.. include:: ../shared/build-backend-tabs.rst + + +Static vs. dynamic metadata +=========================== + +The rest of this guide is devoted to the ``[project]`` table. + +Most of the time, you will directly write the value of a ``[project]`` +field. For example: ``requires-python = ">= 3.8"``, or ``version = +"1.0"``. + +However, in some cases, it is useful to let your build backend compute +the metadata for you. For example: many build backends can read the +version from a ``__version__`` attribute in your code, a Git tag, or +similar. In such cases, you should mark the field as dynamic using, e.g., + +.. code-block:: toml + + [project] + dynamic = ["version"] + + +When a field is dynamic, it is the build backend's responsibility to +fill it. Consult your build backend's documentation to learn how it +does it. + + +Basic information +================= + +.. _`setup() name`: + +``name`` +-------- + +Put the name of your project on PyPI. This field is required and is the +only field that cannot be marked as dynamic. + +.. code-block:: toml + + [project] + name = "spam-eggs" + +The project name must consist of ASCII letters, digits, underscores "``_``", +hyphens "``-``" and periods "``.``". It must not start or end with an +underscore, hyphen or period. + +Comparison of project names is case insensitive and treats arbitrarily long runs +of underscores, hyphens, and/or periods as equal. For example, if you register +a project named ``cool-stuff``, users will be able to download it or declare a +dependency on it using any of the following spellings: ``Cool-Stuff``, +``cool.stuff``, ``COOL_STUFF``, ``CoOl__-.-__sTuFF``. + + +``version`` +----------- + +Put the version of your project. + +.. code-block:: toml + + [project] + version = "2020.0.0" + +Some more complicated version specifiers like ``2020.0.0a1`` (for an alpha +release) are possible; see the :ref:`specification ` +for full details. + +This field is required, although it is often marked as dynamic using + +.. code-block:: toml + + [project] + dynamic = ["version"] + +This allows use cases such as filling the version from a ``__version__`` +attribute or a Git tag. Consult the :ref:`single-source-version` +discussion for more details. + + +Dependencies and requirements +============================= + +``dependencies``/``optional-dependencies`` +------------------------------------------ + +If your project has dependencies, list them like this: + +.. code-block:: toml + + [project] + dependencies = [ + "httpx", + "gidgethub[httpx]>4.0.0", + "django>2.1; os_name != 'nt'", + "django>2.0; os_name == 'nt'", + ] + +See :ref:`Dependency specifiers ` for the full +syntax you can use to constrain versions. + +You may want to make some of your dependencies optional, if they are +only needed for a specific feature of your package. In that case, put +them in ``optional-dependencies``. + +.. code-block:: toml + + [project.optional-dependencies] + gui = ["PyQt5"] + cli = [ + "rich", + "click", + ] + +Each of the keys defines a "packaging extra". In the example above, one +could use, e.g., ``pip install your-project-name[gui]`` to install your +project with GUI support, adding the PyQt5 dependency. + + +.. _requires-python: +.. _python_requires: + +``requires-python`` +------------------- + +This lets you declare the minimum version of Python that you support +[#requires-python-upper-bounds]_. + +.. code-block:: toml + + [project] + requires-python = ">= 3.8" + + +.. _`console_scripts`: + +Creating executable scripts +=========================== + +To install a command as part of your package, declare it in the +``[project.scripts]`` table. + +.. code-block:: toml + + [project.scripts] + spam-cli = "spam:main_cli" + +In this example, after installing your project, a ``spam-cli`` command +will be available. Executing this command will do the equivalent of +``import sys; from spam import main_cli; sys.exit(main_cli())``. + +On Windows, scripts packaged this way need a terminal, so if you launch +them from within a graphical application, they will make a terminal pop +up. To prevent this from happening, use the ``[project.gui-scripts]`` +table instead of ``[project.scripts]``. + +.. code-block:: toml + + [project.gui-scripts] + spam-gui = "spam:main_gui" + +In that case, launching your script from the command line will give back +control immediately, leaving the script to run in the background. + +The difference between ``[project.scripts]`` and +``[project.gui-scripts]`` is only relevant on Windows. + + + +About your project +================== + +``authors``/``maintainers`` +--------------------------- + +Both of these fields contain lists of people identified by a name and/or +an email address. + +.. code-block:: toml + + [project] + authors = [ + {name = "Pradyun Gedam", email = "pradyun@example.com"}, + {name = "Tzu-Ping Chung", email = "tzu-ping@example.com"}, + {name = "Another person"}, + {email = "different.person@example.com"}, + ] + maintainers = [ + {name = "Brett Cannon", email = "brett@example.com"} + ] + + +.. _description: + +``description`` +--------------- + +This should be a one-line description of your project, to show as the "headline" +of your project page on PyPI (`example `_), and other places such as +lists of search results (`example `_). + +.. code-block:: toml + + [project] + description = "Lovely Spam! Wonderful Spam!" + + +``readme`` +---------- + +This is a longer description of your project, to display on your project +page on PyPI. Typically, your project will have a ``README.md`` or +``README.rst`` file and you just put its file name here. + +.. code-block:: toml + + [project] + readme = "README.md" + +The README's format is auto-detected from the extension: + +- ``README.md`` → `GitHub-flavored Markdown `_, +- ``README.rst`` → `reStructuredText `_ (without Sphinx extensions). + +You can also specify the format explicitly, like this: + +.. code-block:: toml + + [project] + readme = {file = "README.txt", content-type = "text/markdown"} + # or + readme = {file = "README.txt", content-type = "text/x-rst"} + + +.. _license-and-license-files: + +``license`` and ``license-files`` +--------------------------------- + +As per :pep:`639` licenses should be declared with two fields: + +- ``license`` is an :term:`SPDX license expression ` consisting + of one or more :term:`license identifiers `. +- ``license-files`` is a list of license file glob patterns. + +A previous PEP had specified ``license`` to be a table with a ``file`` or a +``text`` key, this format is now deprecated. Most :term:`build backends` now support the new format as shown in the following table. + +.. list-table:: build backend versions that introduced :pep:`639` support + :header-rows: 1 + + * - hatchling + - setuptools + - flit-core [#flit-core-pep639]_ + - pdm-backend + - poetry-core + * - 1.27.0 + - 77.0.3 + - 3.12 + - 2.4.0 + - `not yet `_ + + +.. _license: + +``license`` +''''''''''' + +The new format for ``license`` is a valid :term:`SPDX license expression ` +consisting of one or more :term:`license identifiers `. +The full license list is available at the +`SPDX license list page `_. The supported list version is +3.17 or any later compatible one. + +.. code-block:: toml + + [project] + license = "GPL-3.0-or-later" + # or + license = "MIT AND (Apache-2.0 OR BSD-2-Clause)" + +.. note:: If you get a build error that ``license`` should be a dict/table, + your build backend doesn't yet support the new format. See the + `above section `_ for more context. + The now deprecated format is `described in PEP 621 `__. + +As a general rule, it is a good idea to use a standard, well-known +license, both to avoid confusion and because some organizations avoid software +whose license is unapproved. + +If your project is licensed with a license that doesn't have an existing SPDX +identifier, you can create a custom one in format ``LicenseRef-[idstring]``. +The custom identifiers must follow the SPDX specification, +`clause 10.1 `_ of the version 2.2 or any later compatible one. + +.. code-block:: toml + + [project] + license = "LicenseRef-My-Custom-License" + + +.. _license-files: + +``license-files`` +''''''''''''''''' + +This is a list of license files and files containing other legal +information you want to distribute with your package. + +.. code-block:: toml + + [project] + license-files = ["LICEN[CS]E*", "vendored/licenses/*.txt", "AUTHORS.md"] + +The glob patterns must follow the specification: + +- Alphanumeric characters, underscores (``_``), hyphens (``-``) and dots (``.``) + will be matched verbatim. +- Special characters: ``*``, ``?``, ``**`` and character ranges: [] are supported. +- Path delimiters must be the forward slash character (``/``). +- Patterns are relative to the directory containing :file:`pyproject.toml`, and + thus may not start with a slash character. +- Parent directory indicators (``..``) must not be used. +- Each glob must match at least one file. + +Literal paths are valid globs. +Any characters or character sequences not covered by this specification are +invalid. + + +``keywords`` +------------ + +This will help PyPI's search box to suggest your project when people +search for these keywords. + +.. code-block:: toml + + [project] + keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"] + + +.. _classifiers: + +``classifiers`` +--------------- + +A list of PyPI classifiers that apply to your project. Check the +`full list of possibilities `_. + +.. code-block:: toml + + classifiers = [ + # How mature is this project? Common values are + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + "Development Status :: 4 - Beta", + + # Indicate who your project is intended for + "Intended Audience :: Developers", + "Topic :: Software Development :: Build Tools", + + # Specify the Python versions you support here. + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + ] + +Although the list of classifiers is often used to declare what Python versions a +project supports, this information is only used for searching and browsing +projects on PyPI, not for installing projects. To actually restrict what Python +versions a project can be installed on, use the :ref:`requires-python` argument. + +To prevent a package from being uploaded to PyPI, use the special ``Private :: +Do Not Upload`` classifier. PyPI will always reject packages with classifiers +beginning with ``Private ::``. + +.. _writing-pyproject-toml-urls: + +``urls`` +-------- + +A list of URLs associated with your project, displayed on the left +sidebar of your PyPI project page. + +.. note:: + + See :ref:`well-known-labels` for a listing + of labels that PyPI and other packaging tools are specifically aware of, + and `PyPI's project metadata docs `_ + for PyPI-specific URL processing. + +.. code-block:: toml + + [project.urls] + Homepage = "https://example.com/" + Documentation = "https://readthedocs.org/" + Repository = "https://github.com/me/spam.git" + Issues = "https://github.com/me/spam/issues" + Changelog = "https://github.com/me/spam/blob/master/CHANGELOG.md" + +Note that if the label contains spaces, it needs to be quoted, e.g., +``Website = "https://example.com/"`` but +``"Official Website" = "https://example.com/"``. + +Users are advised to use :ref:`well-known-labels` for their project URLs +where appropriate, since consumers of metadata (like package indices) can +specialize their presentation. + +For example in the following metadata, neither ``MyHomepage`` nor +``"Download Link"`` is a well-known label, so they will be rendered verbatim: + +.. code-block:: toml + + [project.urls] + MyHomepage = "https://example.com/" + "Download Link" = "https://example.com/abc.tar.gz" + + +Whereas in this metadata ``HomePage`` and ``DOWNLOAD`` both have +well-known equivalents (``homepage`` and ``download``), and can be presented +with those semantics in mind (the project's home page and its external +download location, respectively). + +.. code-block:: toml + + [project.urls] + HomePage = "https://example.com/" + DOWNLOAD = "https://example.com/abc.tar.gz" + +Advanced plugins +================ + +Some packages can be extended through plugins. Examples include Pytest_ +and Pygments_. To create such a plugin, you need to declare it in a subtable +of ``[project.entry-points]`` like this: + +.. code-block:: toml + + [project.entry-points."spam.magical"] + tomatoes = "spam:main_tomatoes" + +See the :ref:`Plugin guide ` for more information. + + + +A full example +============== + +.. code-block:: toml + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + + [project] + name = "spam-eggs" + version = "2020.0.0" + dependencies = [ + "httpx", + "gidgethub[httpx]>4.0.0", + "django>2.1; os_name != 'nt'", + "django>2.0; os_name == 'nt'", + ] + requires-python = ">=3.8" + authors = [ + {name = "Pradyun Gedam", email = "pradyun@example.com"}, + {name = "Tzu-Ping Chung", email = "tzu-ping@example.com"}, + {name = "Another person"}, + {email = "different.person@example.com"}, + ] + maintainers = [ + {name = "Brett Cannon", email = "brett@example.com"} + ] + description = "Lovely Spam! Wonderful Spam!" + readme = "README.rst" + license = "MIT" + license-files = ["LICEN[CS]E.*"] + keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"] + classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python" + ] + + [project.optional-dependencies] + gui = ["PyQt5"] + cli = [ + "rich", + "click", + ] + + [project.urls] + Homepage = "https://example.com/" + Documentation = "https://readthedocs.org/" + Repository = "https://github.com/me/spam.git" + "Bug Tracker" = "https://github.com/me/spam/issues" + Changelog = "https://github.com/me/spam/blob/master/CHANGELOG.md" + + [project.scripts] + spam-cli = "spam:main_cli" + + [project.gui-scripts] + spam-gui = "spam:main_gui" + + [project.entry-points."spam.magical"] + tomatoes = "spam:main_tomatoes" + + +------------------ + +.. [#requires-python-upper-bounds] Think twice before applying an upper bound + like ``requires-python = "<= 3.10"`` here. `This blog post `_ + contains some information regarding possible problems. + +.. [#flit-core-pep639] flit-core `does not yet `_ support WITH in SPDX license expressions. + +.. _flit-issue-735: https://github.com/pypa/flit/issues/735 +.. _gfm: https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax +.. _setuptools: https://setuptools.pypa.io +.. _poetry: https://python-poetry.org +.. _pypi-pip: https://pypi.org/project/pip +.. _pypi-search-pip: https://pypi.org/search?q=pip +.. _classifier-list: https://pypi.org/classifiers +.. _requires-python-blog-post: https://iscinumpy.dev/post/bound-version-constraints/#pinning-the-python-version-is-special +.. _poetry-pep639-issue: https://github.com/python-poetry/poetry/issues/9670 +.. _pytest: https://pytest.org +.. _pygments: https://pygments.org +.. _rest: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +.. _spdxcustomids: https://spdx.github.io/spdx-spec/v2.2.2/other-licensing-information-detected/ +.. _spdxlicenselist: https://spdx.org/licenses/ diff --git a/source/index.rst b/source/index.rst index 0ee3be28b..aa522c8c5 100644 --- a/source/index.rst +++ b/source/index.rst @@ -2,28 +2,92 @@ Python Packaging User Guide =========================== -:Last Reviewed: 2015-09-08 - -The "Python Packaging User Guide" (PyPUG) aims to be the authoritative resource -on how to package, publish and install Python distributions using current tools. - -To follow the development of Python packaging, see the `Python -Packaging Authority `_. - -This guide is maintained on `github -`_. +.. meta:: + :description: The Python Packaging User Guide (PyPUG) is a collection of tutorials and guides for packaging Python software. + :keywords: python, packaging, guide, tutorial .. toctree:: - :maxdepth: 1 + :maxdepth: 2 + :hidden: - current - installing - distributing - additional - specifications + overview + flow + tutorials/index + guides/index + discussions/index + specifications/index key_projects glossary support + contribute + news + +Welcome to the *Python Packaging User Guide*, a collection of tutorials and +references to help you distribute and install Python packages with modern +tools. + +This guide is maintained on `GitHub`_ by the :doc:`Python Packaging Authority `. We +happily accept :doc:`contributions and feedback `. 😊 + +.. _GitHub: https://github.com/pypa/packaging.python.org + + +Overview and Flow +================= + +.. note:: + + Building your understanding of Python packaging is a journey. Patience and + continuous improvement are key to success. The overview and flow sections + provide a starting point for understanding the Python packaging ecosystem. + +The :doc:`overview` explains Python packaging +and its use when preparing and distributing projects. +This section helps you build understanding about selecting the tools and +processes that are most suitable for your use case. +It includes what packaging is, the problems that it solves, and +key considerations. + +To get an overview of the workflow used to publish your code, see +:doc:`packaging flow `. + +Tutorials +========= + +Tutorials walk through the steps needed to complete a project for the first time. +Tutorials aim to help you succeed and provide a starting point for future +exploration. +The :doc:`tutorials/index` section includes: + +* A :doc:`tutorial on installing packages ` +* A :doc:`tutorial on managing application dependencies ` + in a version controlled project +* A :doc:`tutorial on packaging and distributing ` + your project + +Guides +====== + +Guides provide steps to perform a specific task. Guides are more focused on +users who are already familiar with Python packaging and are looking for +specific information. + +The :doc:`guides/index` section provides "how to" instructions in three major +areas: package installation; building and distributing packages; miscellaneous +topics. + +Explanations and Discussions +============================ + +The :doc:`discussions/index` section provides in-depth explanations and discussion +about topics, such as: + +* :doc:`discussions/deploying-python-applications` +* :doc:`discussions/pip-vs-easy-install` +Reference +========= -.. _docs.python.org: http://docs.python.org +* The :doc:`specifications/index` section for packaging interoperability specifications. +* The list of :doc:`other projects ` maintained by members of the Python Packaging Authority. +* The :doc:`glossary` for definitions of terms used in Python packaging. diff --git a/source/installing.rst b/source/installing.rst deleted file mode 100644 index 88978d9aa..000000000 --- a/source/installing.rst +++ /dev/null @@ -1,387 +0,0 @@ -=================== -Installing Packages -=================== - -:Page Status: Complete -:Last Reviewed: 2016-06-24 - -This section covers the basics of how to install Python :term:`packages -`. - -It's important to note that the term "package" in this context is being used as -a synonym for a :term:`distribution ` (i.e. a bundle of -software to be installed), not to refer to the kind of :term:`package ` that you import in your Python source code (i.e. a container of -modules). It is common in the Python community to refer to a :term:`distribution -` using the term "package". Using the term "distribution" -is often not preferred, because it can easily be confused with a Linux -distribution, or another larger software distribution like Python itself. - - -.. contents:: Contents - :local: - - -.. _installing_requirements: - -Requirements for Installing Packages -==================================== - -This section describes the steps to follow before installing other Python -packages. - -Install pip, setuptools, and wheel ----------------------------------- - -* If you have Python 2 >=2.7.9 or Python 3 >=3.4 installed from `python.org - `_, you will already have :ref:`pip` and - :ref:`setuptools`, but will need to upgrade to the latest version: - - On Linux or OS X: - - :: - - pip install -U pip setuptools - - - On Windows: - - :: - - python -m pip install -U pip setuptools - -* If you're using a Python install on Linux that's managed by the system package - manager (e.g "yum", "apt-get" etc...), and you want to use the system package - manager to install or upgrade pip, then see :ref:`Installing - pip/setuptools/wheel with Linux Package Managers` - -* Otherwise: - - * Securely Download `get-pip.py - `_ [1]_ - - * Run ``python get-pip.py``. [2]_ This will install or upgrade pip. - Additionally, it will install :ref:`setuptools` and :ref:`wheel` if they're - not installed already. - - .. warning:: - - Be cautious if you're using a Python install that's managed by your - operating system or another package manager. get-pip.py does not - coordinate with those tools, and may leave your system in an - inconsistent state. - - -Optionally, Create a virtual environment ----------------------------------------- - -See :ref:`section below ` for details, -but here's the basic commands: - - Using :ref:`virtualenv`: - - :: - - pip install virtualenv - virtualenv - source /bin/activate - - Using `venv`_: [3]_ - - :: - - python3 -m venv - source /bin/activate - - -.. _`Creating and using Virtual Environments`: - -Creating Virtual Environments -============================= - -Python "Virtual Environments" allow Python :term:`packages ` to be installed in an isolated location for a particular application, -rather than being installed globally. - -Imagine you have an application that needs version 1 of LibFoo, but another -application requires version 2. How can you use both these applications? If you -install everything into /usr/lib/python2.7/site-packages (or whatever your -platform’s standard location is), it’s easy to end up in a situation where you -unintentionally upgrade an application that shouldn’t be upgraded. - -Or more generally, what if you want to install an application and leave it be? -If an application works, any change in its libraries or the versions of those -libraries can break the application. - -Also, what if you can’t install :term:`packages ` into the -global site-packages directory? For instance, on a shared host. - -In all these cases, virtual environments can help you. They have their own -installation directories and they don’t share libraries with other virtual -environments. - -Currently, there are two viable tools for creating Python virtual environments: - -* `venv`_ is available by default in Python 3.3 and later, and installs - :ref:`pip` and :ref:`setuptools` into created virtual environments in - Python 3.4 and later. -* :ref:`virtualenv` needs to be installed separately, but supports Python 2.6+ - and Python 3.3+, and :ref:`pip`, :ref:`setuptools` and :ref:`wheel` are - always installed into created virtual environments by default (regardless of - Python version). - -The basic usage is like so: - -Using :ref:`virtualenv`: - -:: - - virtualenv - source /bin/activate - - -Using `venv`_: - -:: - - python3 -m venv - source /bin/activate - - -For more information, see the `virtualenv `_ docs or -the `venv`_ docs. - - -Use pip for Installing -====================== - -:ref:`pip` is the recommended installer. Below, we'll cover the most common -usage scenarios. For more detail, see the `pip docs `_, -which includes a complete `Reference Guide -`_. - -There are a few cases where you might want to use `easy_install -`_ instead of pip. For -details, see the the :ref:`pip vs easy_install` breakdown in the :doc:`Advanced -Topics ` section. - - -Installing from PyPI -==================== - -The most common usage of :ref:`pip` is to install from the :term:`Python Package -Index ` using a :term:`requirement specifier -`. Generally speaking, a requirement specifier is -composed of a project name followed by an optional :term:`version specifier -`. :pep:`440` contains a :pep:`full -specification <440#version-specifiers>` -of the currently supported specifiers. Below are some examples. - -To install the latest version of "SomeProject": - -:: - - pip install 'SomeProject' - - -To install a specific version: - -:: - - pip install 'SomeProject==1.4' - - -To install greater than or equal to one version and less than another: - -:: - - pip install 'SomeProject>=1,<2' - - -To install a version that's :pep:`"compatible" <440#compatible-release>` -with a certain version: [4]_ - -:: - - pip install 'SomeProject~=1.4.2' - -In this case, this means to install any version "==1.4.*" version that's also -">=1.4.2". - - -Source Distributions vs Wheels -============================== - -:ref:`pip` can install from either :term:`Source Distributions (sdist) ` or :term:`Wheels `, but if both are present -on PyPI, pip will prefer a compatible :term:`wheel `. - -:term:`Wheels ` are a pre-built :term:`distribution ` format that provides faster installation compared to :term:`Source -Distributions (sdist) `, especially when a -project contains compiled extensions. - -If :ref:`pip` does not find a wheel to install, it will locally build a wheel -and cache it for future installs, instead of rebuilding the source distribution -in the future. - - -Upgrading packages -================== - -Upgrade an already installed `SomeProject` to the latest from PyPI. - -:: - - pip install --upgrade SomeProject - - - -Installing to the User Site -=========================== - -To install :term:`packages ` that are isolated to the -current user, use the ``--user`` flag: - -:: - - pip install --user SomeProject - - -For more information see the `User Installs -`_ section -from the pip docs. - - -Requirements files -================== - -Install a list of requirements specified in a :ref:`Requirements File -`. - -:: - - pip install -r requirements.txt - - -Installing from VCS -=================== - -Install a project from VCS in "editable" mode. For a full breakdown of the -syntax, see pip's section on :ref:`VCS Support `. - -:: - - pip install -e git+https://git.repo/some_pkg.git#egg=SomeProject # from git - pip install -e hg+https://hg.repo/some_pkg.git#egg=SomeProject # from mercurial - pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomeProject # from svn - pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomeProject # from a branch - - -Installing from other Indexes -============================= - -Install from an alternate index - -:: - - pip install --index-url http://my.package.repo/simple/ SomeProject - - -Search an additional index during install, in addition to :term:`PyPI ` - -:: - - pip install --extra-index-url http://my.package.repo/simple SomeProject - - - -Installing from a local src tree -================================ - - -Installing from local src in `Development Mode -`_, -i.e. in such a way that the project appears to be installed, but yet is -still editable from the src tree. - -:: - - pip install -e - - -You can also install normally from src - -:: - - pip install - - -Installing from local archives -============================== - -Install a particular source archive file. - -:: - - pip install ./downloads/SomeProject-1.0.4.tar.gz - - -Install from a local directory containing archives (and don't check :term:`PyPI -`) - -:: - - pip install --no-index --find-links=file:///local/dir/ SomeProject - pip install --no-index --find-links=/local/dir/ SomeProject - pip install --no-index --find-links=relative/dir/ SomeProject - - - -Installing Prereleases -====================== - -Find pre-release and development versions, in addition to stable versions. By -default, pip only finds stable versions. - -:: - - pip install --pre SomeProject - - -Installing Setuptools "Extras" -============================== - -Install `setuptools extras`_. - -:: - - $ pip install SomePackage[PDF] - $ pip install SomePackage[PDF]==3.0 - $ pip install -e .[PDF]==3.0 # editable project in current directory - - - ----- - -.. [1] "Secure" in this context means using a modern browser or a - tool like `curl` that verifies SSL certificates when downloading from - https URLs. - -.. [2] Depending on your platform, this may require root or Administrator - access. :ref:`pip` is currently considering changing this by `making user - installs the default behavior - `_. - -.. [3] Beginning with Python 3.4, ``venv`` (a stdlib alternative to - :ref:`virtualenv`) will create virtualenv environments with ``pip`` - pre-installed, thereby making it an equal alternative to - :ref:`virtualenv`. - -.. [4] The compatible release specifier was accepted in :pep:`440` - and support was released in :ref:`setuptools` v8.0 and - :ref:`pip` v6.0 - -.. _venv: https://docs.python.org/3/library/venv.html -.. _setuptools extras: http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras-optional-features-with-their-own-dependencies diff --git a/source/key_projects.rst b/source/key_projects.rst index 5488ecded..e4501fe0e 100644 --- a/source/key_projects.rst +++ b/source/key_projects.rst @@ -5,9 +5,6 @@ Project Summaries ================= -:Page Status: Complete -:Last Reviewed: 2016-06-24 - Summaries and links for the most relevant projects in the space of Python installation and packaging. @@ -21,100 +18,294 @@ PyPA Projects bandersnatch ============ -`Mailing list `__ [2]_ | -`Issues `__ | -`Bitbucket `__ | -`PyPI `__ +`Docs `__ | +`Issues `__ | +`GitHub `__ | +`PyPI `__ + +``bandersnatch`` is a PyPI mirroring client designed to efficiently +create a complete mirror of the contents of PyPI. Organizations thus +save bandwidth and latency on package downloads (especially in the +context of automated tests) and to prevent heavily loading PyPI's +Content Delivery Network (CDN). +Files can be served from a local directory or `AWS S3`_. + + +.. _build: + +build +===== + +:any:`Docs ` | +`Issues `__ | +`GitHub `__ | +`PyPI `__ + +``build`` is a :pep:`517` compatible Python package builder. It provides a CLI to +build packages, as well as a Python API. + + +.. _cibuildwheel: + +cibuildwheel +============ -bandersnatch is a PyPI mirroring client designed to efficiently create a -complete mirror of the contents of PyPI. +`Docs `__ | +`Issues `__ | +`GitHub `__ | +`PyPI `__ | +`Discussions `__ | +`Discord #cibuildwheel `__ +``cibuildwheel`` is a Python package that builds :term:`wheels ` for all common +platforms and Python versions on most CI systems. Also see :ref:`multibuild`. .. _distlib: distlib ======= -`Docs `__ | -`Mailing list `__ [2]_ | -`Issues `__ | -`Bitbucket `__ | -`PyPI `__ +:doc:`Docs ` | +`Issues `__ | +`GitHub `__ | +`PyPI `__ + +``distlib`` is a library which implements low-level functions that +relate to packaging and distribution of Python software. ``distlib`` +implements several relevant PEPs (Python Enhancement Proposal +standards) and is useful for developers of third-party packaging tools +to make and upload binary and source :term:`distributions +`, achieve interoperability, resolve +dependencies, manage package resources, and do other similar +functions. + +Unlike the stricter :ref:`packaging` project (below), which +specifically implements modern Python packaging interoperability +standards, ``distlib`` also attempts to provide reasonable fallback +behaviours when asked to handle legacy packages and metadata that +predate the modern interoperability standards and fall into the subset +of packages that are incompatible with those standards. + + +.. _distutils: + +distutils +========= + +The original Python packaging system, added to the standard library in +Python 2.0 and removed in 3.12. + +Due to the challenges of maintaining a packaging system +where feature updates are tightly coupled to language runtime updates, +direct usage of :ref:`distutils` has been actively discouraged, with +:ref:`Setuptools` being the preferred replacement. :ref:`Setuptools` +not only provides features that plain :ref:`distutils` doesn't offer +(such as dependency declarations and entry point declarations), it +also provides a consistent build interface and feature set across all +supported Python versions. + +Consequently, :ref:`distutils` was deprecated in Python 3.10 by :pep:`632` and +has been :doc:`removed ` from the standard library in +Python 3.12. Setuptools bundles the standalone copy of distutils, and it is +injected even on Python < 3.12 if you import setuptools first or use pip. + -Distlib is a library which implements low-level functions that relate to -packaging and distribution of Python software. It consists in part of the -functions from the `distutils2 `_ -project, which was intended to be released as ``packaging`` in the Python 3.3 -stdlib, but was removed shortly before Python 3.3 entered beta testing. +.. _flit: +flit +==== + +`Docs `__ | +`Issues `__ | +`PyPI `__ + +Flit provides a simple way to create and upload pure Python packages and +modules to PyPI. It focuses on `making the easy things easy `_ +for packaging. Flit can generate a configuration file to quickly set up a +simple project, build source distributions and wheels, and upload them to PyPI. + +Flit uses ``pyproject.toml`` to configure a project. Flit does not rely on tools +such as :ref:`setuptools` to build distributions, or :ref:`twine` to upload them +to PyPI. Flit requires Python 3, but you can use it to distribute modules for +Python 2, so long as they can be imported on Python 3. + +The flit package is lifted by `Matthias Bussonnier +`__ since October 2023 on the `tidelift platform +`__, and funds sent to the PSF and +earmarked for PyPA usage. + +.. _flit-rationale: https://flit.readthedocs.io/en/latest/rationale.html + +.. _hatch: + +hatch +===== + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +Hatch is a unified command-line tool meant to conveniently manage +dependencies and environment isolation for Python developers. Python +package developers use Hatch and its :term:`build backend ` Hatchling to +configure, version, specify dependencies for, and publish packages +to PyPI. Its plugin system allows for easily extending functionality. .. _packaging: packaging ========= -`Dev list `__ | +:doc:`Docs ` | `Issues `__ | -`Github `__ | -`PyPI `__ | -User irc:#pypa | -Dev irc:#pypa-dev +`GitHub `__ | +`PyPI `__ Core utilities for Python packaging used by :ref:`pip` and :ref:`setuptools`. +The core utilities in the packaging library handle version handling, +specifiers, markers, requirements, tags, and similar attributes and +tasks for Python packages. Most Python users rely on this library +without needing to explicitly call it; developers of the other Python +packaging, distribution, and installation tools listed here often use +its functionality to parse, discover, and otherwise handle dependency +attributes. + +This project specifically focuses on implementing the modern Python +packaging interoperability standards defined at +:ref:`packaging-specifications`, and will report errors for +sufficiently old legacy packages that are incompatible with those +standards. In contrast, the :ref:`distlib` project is a more +permissive library that attempts to provide a plausible reading of +ambiguous metadata in cases where :ref:`packaging` will instead report +on error. .. _pip: pip === -`Docs `__ | -`User list `__ [1]_ | -`Dev list `__ | +`Docs `__ | `Issues `__ | -`Github `__ | -`PyPI `__ | -User irc:#pypa | -Dev irc:#pypa-dev +`GitHub `__ | +`PyPI `__ + +The most popular tool for installing Python packages, and the one +included with modern versions of Python. + +It provides the essential core features for finding, downloading, and +installing packages from PyPI and other Python package indexes, and can be +incorporated into a wide range of development workflows via its +command-line interface (CLI). + +.. _Pipenv: + +Pipenv +====== + +:doc:`Docs ` | +`Source `__ | +`Issues `__ | +`PyPI `__ + +Pipenv is a project that aims to bring the best of all packaging worlds to the +Python world. It harnesses :ref:`Pipfile`, :ref:`pip`, and :ref:`virtualenv` +into one single toolchain. It can autoimport ``requirements.txt`` and also +check for CVEs in `Pipfile`_ using `safety `_. + +Pipenv aims to help users manage environments, dependencies, and +imported packages on the command line. It also works well on Windows +(which other tools often underserve), makes and checks file hashes, +to ensure compliance with hash-locked dependency specifiers, and eases +uninstallation of packages and dependencies. + +.. _Pipfile: + +Pipfile +======= + +`Source `__ + +:file:`Pipfile` and its sister :file:`Pipfile.lock` are a higher-level +application-centric alternative to :ref:`pip`'s lower-level +:file:`requirements.txt` file. + +.. _pipx: -A tool for installing Python packages. +pipx +==== + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +pipx is a tool to install and run Python command-line applications without +causing dependency conflicts with other packages installed on the system. Python Packaging User Guide =========================== -`Docs `__ | -`Mailing list `__ | -`Issues `__ | -`Github `__ | -User irc:#pypa | -Dev irc:#pypa-dev +:doc:`Docs ` | +`Issues `__ | +`GitHub `__ This guide! +.. _readme_renderer: + +readme_renderer +=============== + +`GitHub and docs `__ | +`PyPI `__ + +``readme_renderer`` is a library that package developers use to render +their user documentation (README) files into HTML from markup +languages such as Markdown or reStructuredText. Developers call it on +its own or via :ref:`twine`, as part of their release management +process, to check that their package descriptions will properly +display on PyPI. .. _setuptools: .. _easy_install: -setuptools +Setuptools ========== `Docs `__ | -`User list `__ [2]_ | -`Dev list `__ | `Issues `__ | `GitHub `__ | -`PyPI `__ | -User irc:#pypa | -Dev irc:#pypa-dev +`PyPI `__ +Setuptools (which includes ``easy_install``) is a collection of +enhancements to the Python distutils that allow you to more easily +build and distribute Python :term:`distributions `, especially ones that have dependencies on other packages. -setuptools (which includes ``easy_install``) is a collection of enhancements to -the Python distutils that allow you to more easily build and distribute Python -distributions, especially ones that have dependencies on other packages. +.. _trove-classifiers: -`distribute`_ was a fork of setuptools that was merged back into setuptools (in -v0.7), thereby making setuptools the primary choice for Python packaging. +trove-classifiers +================= + +`Issues `__ | `GitHub +`__ | `PyPI +`__ + +trove-classifiers is the canonical source for `classifiers on PyPI +`_, which project maintainers use to +:ref:`systematically describe their projects ` +so that users can better find projects that match their needs on the PyPI. + +The trove-classifiers package contains a list of valid classifiers and +deprecated classifiers (which are paired with the classifiers that replace +them). Use this package to validate classifiers used in packages intended for +uploading to PyPI. As this list of classifiers is published as code, you +can install and import it, giving you a more convenient workflow compared to +referring to the `list published on PyPI `_. The +`issue tracker `_ for the +project hosts discussions on proposed classifiers and requests for new +classifiers. .. _twine: @@ -122,14 +313,16 @@ v0.7), thereby making setuptools the primary choice for Python packaging. twine ===== -`Mailing list `__ [2]_ | +`Docs `__ | `Issues `__ | -`Github `__ | -`PyPI `__ - -Twine is a utility for interacting with PyPI, that offers a secure replacement for -``setup.py upload``. +`GitHub `__ | +`PyPI `__ +Twine is the primary tool developers use to upload packages to the +Python Package Index or other Python package indexes. It is a +command-line program that passes program files and metadata to a web +API. Developers use it because it's the official PyPI upload tool, +it's fast and secure, it's maintained, and it reliably works. .. _virtualenv: @@ -137,16 +330,18 @@ Twine is a utility for interacting with PyPI, that offers a secure replacement f virtualenv ========== -`Docs `__ | -`User list `__ | -`Dev list `__ | +`Docs `__ | `Issues `__ | -`Github `__ | -`PyPI `__ | -User irc:#pypa | -Dev irc:#pypa-dev +`GitHub `__ | +`PyPI `__ -A tool for creating isolated Python environments. +virtualenv is a tool for creating isolated Python :term:`Virtual Environments +`, like :ref:`venv`. Unlike :ref:`venv`, virtualenv can +create virtual environments for other versions of Python, which it locates +using the PATH environment variable. It also provides convenient features for +configuring, maintaining, duplicating, and troubleshooting virtual environments. +For more information, see the section on :ref:`Creating and using Virtual +Environments`. .. _warehouse: @@ -155,13 +350,12 @@ Warehouse ========= `Docs `__ | -`Mailing list `__ [2]_ | `Issues `__ | -`Github `__ | -Dev irc:#pypa-dev - +`GitHub `__ -The new unreleased PyPI application which can be previewed at https://warehouse.python.org/. +The current codebase powering the :term:`Python Package Index +(PyPI)`. It is hosted at `pypi.org `_. The default +source for :ref:`pip` downloads. .. _wheel: @@ -169,48 +363,35 @@ The new unreleased PyPI application which can be previewed at https://warehouse. wheel ===== -`Docs `__ | -`Mailing list `__ [2]_ | -`Issues `__ | -`Bitbucket `__ | -`PyPI `__ | -User irc:#pypa | -Dev irc:#pypa-dev - +`Docs `__ | +`Issues `__ | +`GitHub `__ | +`PyPI `__ Primarily, the wheel project offers the ``bdist_wheel`` :ref:`setuptools` extension for creating :term:`wheel distributions `. Additionally, it offers its own command line utility for creating and installing wheels. +See also `auditwheel `__, a tool +that package developers use to check and fix Python packages they are +making in the binary wheel format. It provides functionality to +discover dependencies, check metadata for compliance, and repair the +wheel and metadata to properly link and include external shared +libraries in a package. + Non-PyPA Projects ################# -.. _bento: - -bento -===== - -`Docs `__ | -`Mailing list `__ | -`Issues `__ | -`Github `__ | -`PyPI `__ - -Bento is a packaging tool solution for python software, targeted as an -alternative to distutils, setuptools, distribute, etc.... Bento's philosophy is -reproducibility, extensibility and simplicity (in that order). - .. _buildout: buildout ======== `Docs `__ | -`Mailing list `__ [2]_ | `Issues `__ | -`PyPI `__ | -irc:#buildout +`PyPI `__ | +`GitHub `__ Buildout is a Python-based build system for creating, assembling and deploying applications from multiple parts, some of which may be non-Python-based. It @@ -221,79 +402,422 @@ lets you create a buildout configuration and reproduce the same software later. conda ===== -`Docs `__ +:doc:`Docs ` + +Conda is a package, dependency, and environment management system for any language — Python, R, +Ruby, C/C++, Fortran, and more. It is written in Python and +widely used in the Python scientific computing community, due to its support for non-Python +compiled libraries and extensions. It is used as the basis of the `Anaconda +`__ Python distribution from Anaconda, Inc. It was originally +aimed at the scientific community, but can also be used on its own, or with the +:doc:`miniconda `, `miniforge `_ or +`pixi `_ systems. It is available for Windows, Mac and Linux systems. -conda is the package management tool for `Anaconda -`__ Python installations. -Anaconda Python is a distribution from `Continuum Analytics -`__ specifically aimed at the scientific -community, and in particular on Windows where the installation of binary -extensions is often difficult. +Conda is a completely separate tool from :ref:`pip`, virtualenv and wheel, but provides +many of their combined features, such as package management, virtual environment +management and deployment of binary extensions and other binary code. -Conda is a completely separate tool to pip, virtualenv and wheel, but provides -many of their combined features in terms of package management, virtual environment -management and deployment of binary extensions. +Conda does not install packages from PyPI -- it can only manage packages built specifically +for conda, which can be made available on a "conda channel", such as those hosted on +`anaconda.org `__, or a local (e.g. intranet) package server. +In addition to the "default" channels managed by `Anaconda, Inc. `__, there are a wide variety of packages from the community supported +`conda-forge project `__ -Conda does not install packages from PyPI and can install only from -the official Continuum repositories, or anaconda.org (a place for -user-contributed *conda* packages), or a local (e.g. intranet) package server. -However, note that pip can be installed into, and work side-by-side with conda -for managing distributions from PyPI. +Note that :ref:`pip` can be installed into, and work side-by-side with conda +for managing :term:`distributions ` from PyPI. It is also possible +to build conda packages from Python source packages using tools such as +`conda skeleton +`__: a tool to automatically make conda packages from Python packages available on PyPI. +.. _devpi: devpi ===== `Docs `__ | -`Mailing List `__ | -`Issues `__ | -`PyPI `__ +:gh:`Issues ` | +`PyPI `__ + +devpi features a powerful PyPI-compatible server and PyPI proxy cache +with a complementary command line tool to drive packaging, testing and +release activities with Python. devpi also provides a browsable and +searchable web interface. +devpi supports mirroring PyPI, multiple +:term:`package indexes ` with inheritance, syncing between +these indexes, index replication and fail-over, and package upload. + +.. _dumb-pypi: + +dumb-pypi +========= -devpi features a powerful PyPI-compatible server and PyPI proxy cache with -a complimentary command line tool to drive packaging, testing and release -activities with Python. +`GitHub `__ | +`PyPI `__ +dumb-pypi is a simple :term:`package index ` static file site +generator, which then must be hosted by a static file webserver to become the +package index. It supports serving the hash, core-metadata, and yank-status. + +.. _enscons: + +enscons +======= + +:gh:`Source ` | +:gh:`Issues ` | +`PyPI `__ + +Enscons is a Python packaging tool based on `SCons`_. It builds +:ref:`pip`-compatible source distributions and wheels without using +distutils or setuptools, including distributions with C +extensions. Enscons has a different architecture and philosophy than +:ref:`distutils`. Rather than adding build features to a Python +packaging system, enscons adds Python packaging to a general purpose +build system. Enscons helps you to build sdists that can be +automatically built by :ref:`pip`, and wheels that are independent of +enscons. + +.. _SCons: https://scons.org/ + +.. _flaskpypiproxy: + +Flask-Pypi-Proxy +================ + +`Docs `__ | +:gh:`GitHub ` | +`PyPI `__ + +.. warning:: Not maintained, project archived + +Flask-Pypi-Proxy is a :term:`package index ` as a cached +proxy for PyPI. .. _hashdist: Hashdist ======== -`Docs `__ | -`Github `__ +`Docs `__ | +`GitHub `__ + +Hashdist is a library for building non-root software +distributions. Hashdist is trying to be “the Debian of choice for +cases where Debian technology doesn’t work”. The best way for +Pythonistas to think about Hashdist may be a more powerful hybrid of +:ref:`virtualenv` and :ref:`buildout`. It is aimed at solving the +problem of installing scientific software, and making package +distribution stateless, cached, and branchable. It is used by some +researchers but has been lacking in maintenance since 2016. + +.. _maturin: + +Maturin +======= + +`Docs `__ | +`GitHub `__ + +Maturin is a build backend for Rust extension modules, also written in +Rust. It supports building wheels for python 3.7+ on Windows, Linux, macOS and +FreeBSD, can upload them to PyPI and has basic PyPy and GraalPy support. + + +.. _meson-python: + +meson-python +============ + +`Docs `__ | +`GitHub `__ + +``meson-python`` is a build backend that uses the Meson_ build system. It enables +Python package authors to use Meson_ as the build system for their package. It +supports a wide variety of languages, including C, and is able to fill the needs +of most complex build configurations. + +.. _Meson: https://github.com/mesonbuild/meson -Hashdist is a library for building non-root software distributions. Hashdist is -trying to be “the Debian of choice for cases where Debian technology doesn’t -work”. The best way for Pythonistas to think about Hashdist may be a more -powerful hybrid of virtualenv and buildout. +.. _multibuild: + +multibuild +========== + +`GitHub `__ + +Multibuild is a set of CI scripts for building and testing Python :term:`wheels ` for +Linux, macOS, and (less flexibly) Windows. Also see :ref:`cibuildwheel`. + +.. _nginx_pypi_cache: + +nginx_pypi_cache +================ + +:gh:`GitHub ` + +nginx_pypi_cache is a :term:`package index ` caching proxy +using `nginx `_. + +.. _pdm: + +pdm +=== + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +PDM is a modern Python package manager. It uses :term:`pyproject.toml` to store +project metadata as defined in :pep:`621`. .. _pex: pex === -`Docs `__ | -`Github `__ | -`PyPI `__ +`Docs `__ | +`GitHub `__ | +`PyPI `__ -pex is both a library and tool for generating ``.pex`` (Python EXecutable) +Pex is a tool for generating :file:`.pex` (Python EXecutable) files, standalone Python environments in the spirit of :ref:`virtualenv`. -``.pex`` files are just carefully constructed zip files with a -``#!/usr/bin/env python`` and special ``__main__.py``, and are designed to make -deployment of Python applications as simple as ``cp``. +PEX files are :doc:`zipapps ` that +make deployment of Python applications as simple as ``cp``. A single PEX +file can support multiple target platforms and can be created from standard +:ref:`pip`-resolvable requirements, a lockfile generated with ``pex3 lock ...`` +or even another PEX. PEX files can optionally have tools embedded that support +turning the PEX file into a standard venv, graphing dependencies and more. + +.. _pip-tools: + +pip-tools +========= + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +pip-tools is a suite of tools meant for Python system administrators +and release managers who particularly want to keep their builds +deterministic yet stay up to date with new versions of their +dependencies. Users can specify particular release of their +dependencies via hash, conveniently make a properly formatted list of +requirements from information in other parts of their program, update +all dependencies (a feature :ref:`pip` currently does not provide), and +create layers of constraints for the program to obey. + +.. _pip2pi: + +pip2pi +========= + +:gh:`GitHub ` | +`PyPI `__ + +pip2pi is a :term:`package index ` server where specific +packages are manually synchronised. + +.. _piwheels: + +piwheels +======== + +`Website `__ | +:doc:`Docs ` | +`GitHub `__ + +piwheels is a website, and software underpinning it, that fetches +source code distribution packages from PyPI and compiles them into +binary wheels that are optimized for installation onto Raspberry Pi +computers. Raspberry Pi OS pre-configures pip to use piwheels.org as +an additional index to PyPI. + +.. _poetry: + +poetry +====== + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +poetry is a command-line tool to handle dependency installation and +isolation as well as building and packaging of Python packages. It +uses ``pyproject.toml`` and, instead of depending on the resolver +functionality within :ref:`pip`, provides its own dependency resolver. +It attempts to speed users' experience of installation and dependency +resolution by locally caching metadata about dependencies. + +.. _proxpi: + +proxpi +====== + +:gh:`GitHub ` | +`PyPI `__ + +proxpi is a simple :term:`package index ` which proxies PyPI +and other indexes with caching. + +.. _pulppython: + +Pulp-python +=========== + +`Docs `__ | +:gh:`GitHub ` | +`PyPI `__ + +Pulp-python is the Python :term:`package index ` plugin for +`Pulp `_. Pulp-python supports mirrors backed by +local or `AWS S3`_, package upload, and proxying to multiple package +indexes. + +.. _pypicloud: + +PyPI Cloud +========== + +`Docs `__ | +:gh:`GitHub ` | +`PyPI `__ + +.. warning:: Not maintained, project archived + +PyPI Cloud is a :term:`package index ` server, backed by +`AWS S3`_ or another cloud storage service, or local files. PyPI Cloud +supports redirect/cached proxying for PyPI, as well as authentication and +authorisation. + +.. _pypiprivate: + +pypiprivate +=========== + +:gh:`GitHub ` | +`PyPI `__ + +pypiprivate serves a local (or `AWS S3`_-hosted) directory of packages as a +:term:`package index `. + +.. _pypiserver: + +pypiserver +========== + +`GitHub `__ | +`PyPI `__ + +pypiserver is a minimalist application that serves as a private Python +:term:`package index ` (from a local directory) within +organizations, implementing a simple API and +browser interface. You can upload private packages using standard +upload tools, and users can download and install them with :ref:`pip`, +without publishing them publicly. Organizations who use pypiserver +usually download packages both from pypiserver and from PyPI. + +.. _pyscaffold: + +PyScaffold +========== + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +PyScaffold is a project generator for bootstrapping Python packages, +ready to be shared on PyPI and installable via :ref:`pip`. +It relies on a set of sane default configurations for established tools +(such as :ref:`setuptools`, pytest_ and Sphinx_) to provide a productive +environment so developers can start coding right away. +PyScaffold can also be used with existing projects to make packaging +easier. + +.. _pywharf: + +pywharf +======= + +:gh:`GitHub ` | +`PyPI `__ + +.. warning:: Not maintained, project archived + +pywharf is a :term:`package index ` server, serving files +locally or from `GitHub `_. + +.. _scikit-build: + +scikit-build +============ + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +Scikit-build is a :ref:`setuptools` wrapper for CPython that builds +C/C++/Fortran/Cython extensions It uses +`cmake `__ (available on PyPI) to provide +better support for additional compilers, build systems, cross compilation, and +locating dependencies and their associated build requirements. To speed up and +parallelize the build of large projects, the user can install `ninja +`__ (also available on PyPI). + +.. _scikit-build-core: + +scikit-build-core +================= + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +Scikit-build-core is a build backend for CPython C/C++/Fortran/Cython +extensions. It enables users to write extensions with `cmake +`__ (available on PyPI) to provide better +support for additional compilers, build systems, cross compilation, and +locating dependencies and their associated build requirements. CMake/Ninja +are automatically downloaded from PyPI if not available on the system. + +.. _shiv: + +shiv +==== + +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +shiv is a command line utility for building fully self contained +Python zipapps as outlined in :pep:`441`, but with all their +dependencies included. Its primary goal is making distributing Python +applications and command line tools fast & easy. + +.. _simpleindex: + +simpleindex +=========== + +:gh:`GitHub ` | +`PyPI `__ + +simpleindex is a :term:`package index ` which routes URLs to +multiple package indexes (including PyPI), serves local (or cloud-hosted, +for example `AWS S3`_, with a custom plugin) directories of packages, and +supports custom plugins. .. _spack: Spack ===== -`Docs `__ | -`Github `__ | -`Paper `__ | +:doc:`Docs ` | +`GitHub `__ | +`Paper `__ | `Slides `__ A flexible package manager designed to support multiple versions, -configurations, platforms, and compilers. Spack is like homebrew, but +configurations, platforms, and compilers. Spack is like Homebrew, but packages are written in Python and parameterized to allow easy swapping of compilers, library versions, build options, etc. Arbitrarily many versions of packages can coexist on the same @@ -301,8 +825,22 @@ system. Spack was designed for rapidly building high performance scientific applications on clusters and supercomputers. Spack is not in PyPI (yet), but it requires no installation and can be -used immediately after cloning from github. +used immediately after cloning from GitHub. + +.. _zestreleaser: + +zest.releaser +============= +`Docs `__ | +`GitHub `__ | +`PyPI `__ + +``zest.releaser`` is a Python package release tool providing an +abstraction layer on top of :ref:`twine`. Python developers use +``zest.releaser`` to automate incrementing package version numbers, +updating changelogs, tagging releases in source control, and uploading +new packages to PyPI. Standard Library Projects ######################### @@ -313,30 +851,24 @@ ensurepip ========= `Docs `__ | -`Issues `__ +`Issues `__ A package in the Python Standard Library that provides support for bootstrapping :ref:`pip` into an existing Python installation or virtual environment. In most cases, end users won't use this module, but rather it will be used during the build of the Python distribution. +.. _httpserver: -.. _distutils: - -distutils -========= - -`Docs `__ | -`User list `__ [2]_ | -`Issues `__ | -User irc:#pypa | -Dev irc:#pypa-dev +http.server +=========== -A package in the Python Standard Library that has support for creating and -installing :term:`distributions `. :ref:`Setuptools` -provides enhancements to distutils, and is much more commonly used than just -using distutils by itself. +:doc:`Docs ` | +:gh:`Issues ` +A package and command-line interface which can host a directory as a +website, for example as a :term:`package index ` (see +:ref:`Hosting your Own Simple Repository`). .. _venv: @@ -344,7 +876,7 @@ venv ==== `Docs `__ | -`Issues `__ +`Issues `__ A package in the Python Standard Library (starting with Python 3.3) for creating :term:`Virtual Environments `. For more @@ -353,10 +885,6 @@ information, see the section on :ref:`Creating and using Virtual Environments`. ---- -.. [1] pip was created by the same developer as virtualenv, and early on adopted - the virtualenv mailing list, and it's stuck ever since. - -.. [2] Multiple projects reuse the distutils-sig mailing list as their user list. - - -.. _distribute: https://pypi.python.org/pypi/distribute +.. _Sphinx: https://www.sphinx-doc.org/en/master/ +.. _pytest: https://docs.pytest.org/en/stable/ +.. _`AWS S3`: https://aws.amazon.com/s3/ diff --git a/source/mirrors.rst b/source/mirrors.rst deleted file mode 100644 index d5650bc84..000000000 --- a/source/mirrors.rst +++ /dev/null @@ -1,66 +0,0 @@ -.. _`PyPI mirrors and caches`: - -======================= -PyPI mirrors and caches -======================= - -:Page Status: Incomplete -:Last Reviewed: 2014-12-24 - -.. contents:: Contents - :local: - - -Mirroring or caching of PyPI can be used to speed up local package installation, -allow offline work, handle corporate firewalls or just plain Internet flakiness. - -Three options are available in this area: - -1. pip provides local caching options, -2. devpi provides higher-level caching option, potentially shared amongst - many users or machines, and -3. bandersnatch provides a local complete mirror of all PyPI :term:`packages - `. - - -Caching with pip ----------------- - -pip provides a number of facilities for speeding up installation by using local -cached copies of :term:`packages `: - -1. `Fast & local installs - `_ by - downloading all the requirements for a project and then pointing pip at - those downloaded files instead of going to PyPI. -2. A variation on the above which pre-builds the installation files for - the requirements using `pip wheel - `_:: - - $ pip wheel --wheel-dir=/tmp/wheelhouse SomeProject - $ pip install --no-index --find-links=/tmp/wheelhouse SomeProject - - -Caching with devpi ------------------- - -devpi is a caching proxy server which you run on your laptop, or some other -machine you know will always be available to you. See the `devpi -documentation for getting started`__. - -__ http://doc.devpi.net/latest/quickstart-pypimirror.html - - -Complete mirror with bandersnatch ----------------------------------- - -bandersnatch will set up a complete local mirror of all PyPI :term:`packages -` (externally-hosted packages are not mirrored). See -the `bandersnatch documentation for getting that going`__. - -__ https://bitbucket.org/pypa/bandersnatch/overview - -A benefit of devpi is that it will create a mirror which includes -:term:`packages ` that are external to PyPI, unlike -bandersnatch which will only cache :term:`packages ` -hosted on PyPI. diff --git a/source/news.rst b/source/news.rst new file mode 100644 index 000000000..a8c70dc1b --- /dev/null +++ b/source/news.rst @@ -0,0 +1,234 @@ +News +==== + +.. note:: This document is not currently updated. Previously, the document + highlighted changes in Python packaging. + + +September 2019 +-------------- +- Added a guide about publishing dists via GitHub Actions. (:pr:`647`) + +August 2019 +----------- +- Updated to use :file:`python3 -m` when installing pipx. (:pr:`631`) + +July 2019 +--------- +- Marked all PEP numbers with the :pep: role. (:pr:`629`) +- Upgraded Sphinx version and removed pypa.io intersphinx. (:pr:`625`) +- Mentioned :file:`find_namespace_packages`. (:pr:`622`) +- Updated directory layout examples for consistency. (:pr:`611`) +- Updated Bandersnatch link to GitHub. (:pr:`623`) + +June 2019 +--------- +- Fixed some typos. (:pr:`620`) + +May 2019 +-------- +- Added :file:`python_requires` usage to packaging tutorial. (:pr:`613`) +- Added a MANIFEST.in guide page. (:pr:`609`) + +April 2019 +---------- +- Added a mention for :file:`shiv` in the key projects section. (:pr:`608`) +- Reduced emphasis on virtualenv. (:pr:`606`) + +March 2019 +---------- +- Moved single-sourcing guide version option to Python 3. (:pr:`605`) +- Covered RTD details for contributing. (:pr:`600`) + +February 2019 +------------- +- Elaborate upon the differences between the tutorial and the real packaging process. (:pr:`602`) +- Added instructions to install Python CLI applications. (:pr:`594`) + +January 2019 +------------ +- Added :file:`--no-deps` to the packaging tutorial. (:pr:`593`) +- Updated Sphinx and Nox. (:pr:`591`) +- Referenced Twine from Python3. (:pr:`581`) + +December 2018 +------------- +- No programmers in the office! + +November 2018 +------------- +- Removed landing page link to PyPI migration guide. (:pr:`575`) +- Changed bumpversion to bump2version. (:pr:`572`) +- Added single-sourcing package version example. (:pr:`573`) +- Added a guide for creating documentation. (:pr:`568`) + +October 2018 +------------ +- Updated Nox package name. (:pr:`566`) +- Mentioned Sphinx extensions in guides. (:pr:`562`) + +September 2018 +-------------- +- Added a section on checking RST markup. (:pr:`554`) +- Updated user installs page. (:pr:`558`) +- Updated Google BigQuery urls. (:pr:`556`) +- Replaced tar command with working command. (:pr:`552`) +- Changed to double quotes in the pip install SomeProject==1.4. (:pr:`550`) + +August 2018 +----------- +- Removed the recommendation to store passwords in cleartext. (:pr:`546`) +- Moved the Overview to a task based lead in along with the others. (:pr:`540`) +- Updated Python version supported by virtualenv. (:pr:`538`) +- Added outline/rough draft of new Overview page. (:pr:`519`) + +July 2018 +--------- + +- Improved binary extension docs. (:pr:`531`) +- Added scikit-build to key projects. (:pr:`530`) + +June 2018 +--------- + +- Fixed categories of interop PEP for pypa.io. (:pr:`527`) +- Updated Markdown descriptions explanation. (:pr:`522`) + +May 2018 +-------- + +- Noted issues with Provides-Dist and Obsoletes-Dist. (:pr:`513`) +- Removed outdated warning about Python version mixing with Pipenv. (:pr:`501`) +- Simplified packaging tutorial. (:pr:`498`) +- Updated Windows users instructions for clarity. (:pr:`493`) +- Updated the license section description for completeness. (:pr:`492`) +- Added specification-style document to contributing section. (:pr:`489`) +- Added documentation types to contributing guide. (:pr:`485`) + +April 2018 +---------- + +- Added README guide. (:pr:`461`) +- Updated instructions and status for PyPI launch. (:pr:`475`) +- Added instructions for Warehouse. (:pr:`471`) +- Removed GPG references from publishing tutorial. (:pr:`466`) +- Added 'What’s in which Python 3.4–3.6?'. (:pr:`468`) +- Added a guide for phasing out Python versions. (:pr:`459`) +- Made default Description-Content-Type variant GFM. (:pr:`462`) + +March 2018 +---------- + +- Updated "installing scientific packages". (:pr:`455`) +- Added :file:`long_description_content_type` to follow PEP 556. (:pr:`457`) +- Clarified a long description classifier on pypi.org. (:pr:`456`) +- Updated Core Metadata spec to follow PEP 556. (:pr:`412`) + +February 2018 +------------- + +- Added python3-venv and python3-pip to Debian installation instructions. (:pr:`445`) +- Updated PyPI migration info. (:pr:`439`) +- Added a warning about managing multiple versions with pipenv. (:pr:`430`) +- Added example of multiple emails to Core Metadata. (:pr:`429`) +- Added explanation of "legacy" in test.pypi.org/legacy. (:pr:`426`) + +January 2018 +------------ + +- Added a link to PyPI's list of classifiers. (:pr:`425`) +- Updated README.rst explanation. (:pr:`419`) + +December 2017 +------------- + +- Replaced :file:`~` with :file:`$HOME` in guides and tutorials. (:pr:`418`) +- Noted which fields can be used with environment markers. (:pr:`416`) +- Updated Requires-Python section. (:pr:`414`) +- Added news page. (:pr:`404`) + +November 2017 +------------- + +- Introduced a new dependency management tutorial based on Pipenv. (:pr:`402`) +- Updated the *Single Sourcing Package Version* tutorial to reflect pip's current + strategy. (:pr:`400`) +- Added documentation about the ``py_modules`` argument to ``setup``. (:pr:`398`) +- Simplified the wording for the :file:`manifest.in` section. (:pr:`395`) + +October 2017 +------------ + +- Added a specification for the :file:`entry_points.txt` file. (:pr:`398`) +- Created a new guide for managing packages using ``pip`` and ``virtualenv``. (:pr:`385`) +- Split the specifications page into multiple pages. (:pr:`386`) + +September 2017 +-------------- + +- Encouraged using ``readme_renderer`` to validate :file:`README.rst`. + (:pr:`379`) +- Recommended using the ``--user-base`` option. (:pr:`374`) + +August 2017 +----------- + +- Added a new, experimental tutorial on installing packages using ``Pipenv``. (:pr:`369`) +- Added a new guide on how to use ``TestPyPI``. (:pr:`366`) +- Added :file:`pypi.org` as a term. (:pr:`365`) + +July 2017 +--------- + +- Added ``flit`` to the key projects list. (:pr:`358`) +- Added ``enscons`` to the list of key projects. (:pr:`357`) +- Updated this guide's ``readme`` with instructions on how to build the guide locally. (:pr:`356`) +- Made the new ``TestPyPI`` URL more visible, adding note to homepage about pypi.org. (:pr:`354`) +- Added a note about the removal of the explicit registration API. (:pr:`347`) + +June 2017 +--------- + +- Added a document on migrating uploads to :file:`PyPI.org`. (:pr:`339`) +- Added documentation for ``python_requires``. (:pr:`338`) +- Added a note about PyPI migration in the *Tool Recommendations* tutorial. (:pr:`335`) +- Added a note that :file:`manifest.in` does not affect wheels. (:pr:`332`) +- Added a license section to the distributing guide. (:pr:`331`) +- Expanded the section on the ``name`` argument. (:pr:`329`) +- Adjusted the landing page. (:pr:`327`, :pr:`326`, :pr:`324`) +- Updated to Sphinx 1.6.2. (:pr:`323`) +- Switched to the PyPA theme. (:pr:`305`) +- Re-organized the documentation into the new structure. (:pr:`318`) + +May 2017 +-------- + +- Added documentation for the ``Description-Content-Type`` field. (:pr:`258`) +- Added contributor and style guide. (:pr:`307`) +- Documented ``pip`` and ``easy_install``'s differences for per-project indexes. (:pr:`233`) + +April 2017 +---------- + +- Added travis configuration for testing pull requests. (:pr:`300`) +- Mentioned the requirement of the ``wheel`` package for creating wheels (:pr:`299`) +- Removed the ``twine register`` reference in the *Distributing Packages* tutorial. (:pr:`271`) +- Added a topic on plugin discovery. (:pr:`294`, :pr:`296`) +- Added a topic on namespace packages. (:pr:`290`) +- Added documentation explaining prominently how to install ``pip`` in ``/usr/local``. (:pr:`230`) +- Updated development mode documentation to mention that order of local packages matters. (:pr:`208`) +- Convert readthedocs link for their ``.org`` -> ``.io`` migration for hosted projects (:pr:`239`) +- Swapped order of :file:`setup.py` arguments for the upload command, as order + is significant. (:pr:`260`) +- Explained how to install from unsupported sources using a helper application. (:pr:`289`) + + +March 2017 +---------- + +- Covered ``manylinux1`` in *Platform Wheels*. (:pr:`283`) + +February 2017 +------------- + +- Added :pep:`518`. (:pr:`281`) diff --git a/source/overview.rst b/source/overview.rst new file mode 100644 index 000000000..8c68036a7 --- /dev/null +++ b/source/overview.rst @@ -0,0 +1,498 @@ +============================ +Overview of Python Packaging +============================ + +.. Editors, see notes at the bottom of the document for maintenance info. + +As a general-purpose programming language, Python is designed to be +used in many ways. You can build web sites or industrial robots or a +game for your friends to play, and much more, all using the same +core technology. + +Python's flexibility is why the first step in every Python project +must be to think about the project's audience and the corresponding +environment where the project will run. It might seem strange to think +about packaging before writing code, but this process does wonders for +avoiding future headaches. + +This overview provides a general-purpose decision tree for reasoning +about Python's plethora of packaging options. Read on to choose the best +technology for your next project. + +Thinking about deployment +------------------------- + +Packages exist to be installed (or *deployed*), so before you package +anything, you'll want to have some answers to the deployment questions +below: + +* Who are your software's users? Will your software be installed by + other developers doing software development, operations people in a + datacenter, or a less software-savvy group? +* Is your software intended to run on servers, desktops, mobile + clients (phones, tablets, etc.), or embedded in dedicated devices? +* Is your software installed individually, or in large deployment batches? + +Packaging is all about target environment and deployment +experience. There are many answers to the questions above and each +combination of circumstances has its own solutions. With this +information, the following overview will guide you to the packaging +technologies best suited to your project. + +Packaging Python libraries and tools +------------------------------------ + +You may have heard about PyPI, ``setup.py``, and ``wheel`` +files. These are just a few of the tools Python's ecosystem provides +for distributing Python code to developers, which you can read about in +:doc:`guides/distributing-packages-using-setuptools`. + +The following approaches to packaging are meant for libraries and +tools used by technical audience in a development setting. If you're +looking for ways to package Python for a non-technical audience and/or +a production setting, skip ahead to :ref:`packaging-applications`. + +Python modules +^^^^^^^^^^^^^^ + +A Python file, provided it only relies on the standard library, can be +redistributed and reused. You will also need to ensure it's written +for the right version of Python, and only relies on the standard +library. + +This is great for sharing simple scripts and snippets between people +who both have compatible Python versions (such as via email, +StackOverflow, or GitHub gists). There are even some entire Python +libraries that offer this as an option, such as +:doc:`bottle.py` and :doc:`boltons +`. + +However, this pattern won't scale for projects that consist of +multiple files, need additional libraries, or need a specific version +of Python, hence the options below. + +Python source distributions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If your code consists of multiple Python files, it's usually organized +into a directory structure. Any directory containing Python files can +comprise an :term:`Import Package`. + +Because packages consist of multiple files, they are harder to +distribute. Most protocols support transferring only one file at a +time (when was the last time you clicked a link and it downloaded +multiple files?). It's easier to get incomplete transfers, and harder +to guarantee code integrity at the destination. + +So long as your code contains nothing but pure Python code, and you +know your deployment environment supports your version of Python, then +you can use Python's native packaging tools to create a *source* +:term:`Distribution Package`, or *sdist* for short. + +Python's *sdists* are compressed archives (``.tar.gz`` files) +containing one or more packages or modules. If your code is +pure-Python, and you only depend on other Python packages, you can +go to the :ref:`source-distribution-format` specification to learn more. + +If you rely on any non-Python code, or non-Python packages (such as +`libxml2 `_ in the case of +`lxml `_, or BLAS libraries in the +case of `numpy `_), you will need to +use the format detailed in the next section, which also has many +advantages for pure-Python libraries. + +.. note:: Python and PyPI support multiple distributions providing + different implementations of the same package. For instance the + unmaintained-but-seminal `PIL distribution + `_ provides the PIL package, and so + does `Pillow `_, an + actively-maintained fork of PIL! + + This Python packaging superpower makes it possible for Pillow to be + a drop-in replacement for PIL, just by changing your project's + ``install_requires`` or ``requirements.txt``. + +Python binary distributions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +So much of Python's practical power comes from its ability to +integrate with the software ecosystem, in particular libraries written +in C, C++, Fortran, Rust, and other languages. + +Not all developers have the right tools or experiences to build these +components written in these compiled languages, so Python created the +:term:`Wheel`, a package format designed to ship libraries with +compiled artifacts. In fact, Python's package installer, ``pip``, +always prefers wheels because installation is always faster, so even +pure-Python packages work better with wheels. + +Binary distributions are best when they come with source distributions +to match. Even if you don't upload wheels of your code for every +operating system, by uploading the sdist, you're enabling users of +other platforms to still build it for themselves. Default to +publishing both sdist and wheel archives together, *unless* you're +creating artifacts for a very specific use case where you know the +recipient only needs one or the other. + +Python and PyPI make it easy to upload both wheels and sdists +together. Just follow the :doc:`tutorials/packaging-projects` +tutorial. + +.. figure:: assets/py_pkg_tools_and_libs.png + :width: 80% + :alt: A summary of Python's packaging capabilities for tools and libraries. + + Python's recommended built-in library and tool packaging + technologies. Excerpted from `The Packaging Gradient (2017) + `_. + +.. _packaging-applications: + +Packaging Python applications +----------------------------- + +So far we've only discussed Python's native distribution tools. Based +on our introduction, you would be correct to infer these built-in +approaches only target environments which have Python, and an +audience who knows how to install Python packages. + +With the variety of operating systems, configurations, and people out +there, this assumption is only safe when targeting a developer +audience. + +Python's native packaging is mostly built for distributing reusable +code, called libraries, between developers. You can piggyback +**tools**, or basic applications for developers, on top of Python's +library packaging, using technologies like +:doc:`setuptools entry_points `. + +Libraries are building blocks, not complete applications. For +distributing applications, there's a whole new world of technologies +out there. + +The next few sections organize these application packaging options +according to their dependencies on the target environment, +so you can choose the right one for your project. + +Depending on a framework +^^^^^^^^^^^^^^^^^^^^^^^^ + +Some types of Python applications, like web site backends and other +network services, are common enough that they have frameworks to +enable their development and packaging. Other types of applications, +like dynamic web frontends and mobile clients, are complex enough to +target that a framework becomes more than a convenience. + +In all these cases, it makes sense to work backwards, from the +framework's packaging and deployment story. Some frameworks include a +deployment system which wraps the technologies outlined in the rest of +the guide. In these cases, you'll want to defer to your framework's +packaging guide for the easiest and most reliable production experience. + +If you ever wonder how these platforms and frameworks work under the +hood, you can always read the sections beyond. + +Service platforms +***************** + +If you're developing for a +"`Platform-as-a-Service `_" +or "PaaS", you are going to want to follow their respective packaging +guides. These types of platforms take care of packaging and deployment, +as long as you follow their patterns. Most software does not fit one of +these templates, hence the existence of all the other options below. + +If you're developing software that will be deployed to machines you +own, users' personal computers, or any other arrangement, read on. + +Web browsers and mobile applications +************************************ + +Python's steady advances are leading it into new spaces. These days +you can write a mobile app or web application frontend in +Python. While the language may be familiar, the packaging and +deployment practices are brand new. + +If you're planning on releasing to these new frontiers, you'll want to +check out the following frameworks, and refer to their packaging +guides: + +* `Kivy `_ +* `Beeware `_ +* `Brython `_ +* `Flexx `_ + +If you are *not* interested in using a framework or platform, or just +wonder about some of the technologies and techniques utilized by the +frameworks above, continue reading below. + +Depending on a pre-installed Python +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Pick an arbitrary computer, and depending on the context, there's a very +good chance Python is already installed. Included by default in most +Linux and Mac operating systems for many years now, you can reasonably +depend on Python preexisting in your data centers or on the personal +machines of developers and data scientists. + +Technologies which support this model: + +* :gh:`PEX ` (Python EXecutable) +* :doc:`zipapp ` (does not help manage dependencies, requires Python 3.5+) +* :gh:`shiv ` (requires Python 3) + +.. note:: Of all the approaches here, depending on a pre-installed + Python relies the most on the target environment. Of course, + this also makes for the smallest package, as small as + single-digit megabytes, or even kilobytes. + + In general, decreasing the dependency on the target system + increases the size of our package, so the solutions here + are roughly arranged by increasing size of output. + +.. _depending-on-a-separate-ecosystem: + +Depending on a separate software distribution ecosystem +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For a long time many operating systems, including Mac and Windows, +lacked built-in package management. Only recently did these OSes gain +so-called "app stores", but even those focus on consumer applications +and offer little for developers. + +Developers long sought remedies, and in this struggle, emerged with +their own package management solutions, such as `Homebrew +`_. The most relevant alternative for Python +developers is a package ecosystem called `Anaconda +`_. Anaconda +is built around Python and is increasingly common in academic, +analytical, and other data-oriented environments, even making its way +`into server-oriented environments +`_. + +Instructions on building and publishing for the Anaconda ecosystem: + +* `Building libraries and applications with conda `_ +* `Transitioning a native Python package to Anaconda `_ + +A similar model involves installing an alternative Python +distribution, but does not support arbitrary operating system-level +packages: + +* `ActiveState ActivePython `_ +* `WinPython `_ + +.. _bringing-your-own-python: + +Bringing your own Python executable +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Computing as we know it is defined by the ability to execute +programs. Every operating system natively supports one or more formats +of programs they can natively execute. + +There are many techniques and technologies which turn your Python +program into one of these formats, most of which involve embedding the +Python interpreter and any other dependencies into a single executable +file. + +This approach, called *freezing*, offers wide compatibility and +seamless user experience, though often requires multiple technologies, +and a good amount of effort. + +A selection of Python freezers: + +* `pyInstaller `_ - Cross-platform +* `cx_Freeze `_ - Cross-platform +* `constructor `_ - For command-line installers +* `py2exe `_ - Windows only +* `py2app `_ - Mac only +* `osnap `_ - Windows and Mac +* `pynsist `_ - Windows only + +Most of the above imply single-user deployments. For multi-component +server applications, see :gh:`Chef Omnibus +`. + + +Bringing your own userspace +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +An increasing number of operating systems -- including Linux, Mac OS, +and Windows -- can be set up to run applications packaged as +lightweight images, using a relatively modern arrangement often +referred to as `operating-system-level virtualization +`_, +or *containerization*. + +These techniques are mostly Python agnostic, because they package +whole OS filesystems, not just Python or Python packages. + +Adoption is most extensive among Linux servers, where the technology +originated and where the technologies below work best: + +* `AppImage `_ +* `Docker `_ +* `Flatpak `_ +* `Snapcraft `_ + +Bringing your own kernel +^^^^^^^^^^^^^^^^^^^^^^^^ + +Most operating systems support some form of classical virtualization, +running applications packaged as images containing a full operating +system of their own. Running these virtual machines, or VMs, is a +mature approach, widespread in data center environments. + +These techniques are mostly reserved for larger scale deployments in +data centers, though certain complex applications can benefit from +this packaging. The technologies are Python agnostic, and include: + +* `Vagrant `_ +* `VHD `_, `AMI `_, and :doc:`other formats ` +* `OpenStack `_ - A cloud management system in Python, with extensive VM support + +Bringing your own hardware +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The most all-encompassing way to ship your software would be to ship +it already-installed on some hardware. This way, your software's user +would require only electricity. + +Whereas the virtual machines described above are primarily reserved +for the tech-savvy, you can find hardware appliances being used by +everyone from the most advanced data centers to the youngest children. + +Embed your code on an :gh:`Adafruit `, +`MicroPython `_, or more-powerful hardware +running Python, then ship it to the datacenter or your users' +homes. They plug and play, and you can call it a day. + +.. figure:: assets/py_pkg_applications.png + :width: 80% + :alt: A summary of technologies used to package Python applications. + + The simplified gamut of technologies used to package Python applications. + +What about... +------------- + +The sections above can only summarize so much, and you might be +wondering about some of the more conspicuous gaps. + +Operating system packages +^^^^^^^^^^^^^^^^^^^^^^^^^ + +As mentioned in :ref:`depending-on-a-separate-ecosystem` above, some operating +systems have package managers of their own. If you're very sure of the +operating system you're targeting, you can depend directly on a format +like `deb `_ (for +Debian, Ubuntu, etc.) or `RPM +`_ (for Red Hat, +Fedora, etc.), and use that built-in package manager to take care of +installation, and even deployment. You can even use `FPM +`_ to +generate both deb and RPMs from the same source. + +In most deployment pipelines, the OS package manager is just one piece +of the puzzle. + +virtualenv +^^^^^^^^^^ + +:doc:`Virtualenvs ` have +been an indispensable tool for multiple generations of Python +developer, but are slowly fading from view, as they are being wrapped +by higher-level tools. With packaging in particular, virtualenvs are +used as a primitive in :doc:`the dh-virtualenv tool +` and +`osnap `_, both of which wrap +virtualenvs in a self-contained way. + +For production deployments, do not rely on running ``python -m pip install`` +from the Internet into a virtualenv, as one might do in a development +environment. The overview above is full of much better solutions. + +Security +^^^^^^^^ + +The further down the gradient you come, the harder it gets to update +components of your package. Everything is more tightly bound together. + +For example, if a kernel security issue emerges, and you're deploying +containers, the host system's kernel can be updated without requiring +a new build on behalf of the application. If you deploy VM images, +you'll need a new build. Whether or not this dynamic makes one option +more secure is still a bit of an old debate, going back to the +still-unsettled matter of `static versus dynamic linking +`_. + +Wrap up +------- + +Packaging in Python has a bit of a reputation for being a bumpy +ride. This impression is mostly a byproduct of Python's +versatility. Once you understand the natural boundaries between each +packaging solution, you begin to realize that the varied landscape is +a small price Python programmers pay for using one of the most +balanced, flexible languages available. + + +.. Editing notes: + + Some notes to keep in mind when updating the Python Packaging Overview: + + This document targets at an intermediate audience, + lower-mid-level to early-advanced Python developers. It's expected + that most developers finding this document will have already + encountered several packaging technologies, through package + managers, app stores, pip, and so forth. They may have even + shipped a few packages of their own. They are smart enough to have + built something to ship, and experienced (or frustrated) enough to + know to search for prior art. + + In the spirit of being a succinct, "to-the-point" overview, we + forego the basics (like, "what is packaging?"). True beginners + rarely try to ship their very first lines of code, and when they + do, they are often working according to a text and/or framework + with its own directions and affordances. + + Meanwhile, the target audience of intermediate + developers/apprentice packagers will benefit most from a framework + that helps them sort out the differences and reasons for such a + wide variety of technologies. + + We want to foster an understanding that packaging technologies are + not so much competing, as they are trying to cover a + highly-variable and often very strict set of requirements. "Complex + and nuanced" is an improvement on "arbitrary and complicated". + + As far as content and tone, the aim is to provide a modicum of + background information in an encyclopedic fashion. Be correct and + practical, but as they say on Wikipedia, "Information should not be + included ... solely because it is true or useful. [An article] + should not be a complete exposition of all possible details, but a + summary of accepted knowledge regarding its subject." Emphasis on + the summary, plus ideally many links to other practical resources + for more details. + + Finally, unlike an encyclopedia, this guide takes some style points + from JupyterLab's metadocumentation, which at the time of writing + says: + + - The documentation should be written in the second person, + referring to the reader as “you” and not using the first person + plural “we.” The author of the documentation is not sitting next to + the user, so using “we” can lead to frustration when things don’t + work as expected. + + - Avoid words that trivialize using JupyterLab + such as “simply” or “just.” Tasks that developers find simple or + easy may not be for users. + + Among other useful points. Read more here: + https://jupyterlab.readthedocs.io/en/latest/developer/documentation.html + + At its initial publication in 2018, this document was largely based + on "The Many Layers of Packaging" essay, here: + http://sedimental.org/the_packaging_gradient.html diff --git a/source/patching.rst b/source/patching.rst deleted file mode 100644 index 9822ddab4..000000000 --- a/source/patching.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. _`Patching & Forking`: - -================== -Patching & Forking -================== - -:Page Status: Incomplete -:Last Reviewed: 2014-12-24 - -.. contents:: Contents - :local: - - -:: - - FIXME - - - locally patch 3rd-part projects to deal with unfixed bugs - - old style pkg_resources "patch releases": 1.3-fork1 - - PEP440's local identifiers: http://www.python.org/dev/peps/pep-0440/#local-version-identifiers - - fork and publish when you need to publish a project that depends on the fork - (DONT use dependency links) diff --git a/source/platforms.rst b/source/platforms.rst deleted file mode 100644 index 12ca1f4f8..000000000 --- a/source/platforms.rst +++ /dev/null @@ -1,8 +0,0 @@ -:orphan: - -==================== -Platform Integtation -==================== - - -This content has been moved to :doc:`science` diff --git a/source/quickstart.rst b/source/quickstart.rst deleted file mode 100644 index 9f9654405..000000000 --- a/source/quickstart.rst +++ /dev/null @@ -1,7 +0,0 @@ -:orphan: - -========== -Quickstart -========== - -This content has moved to the :doc:`installing` and :doc:`distributing`. diff --git a/source/self_hosted_repository.rst b/source/self_hosted_repository.rst deleted file mode 100644 index 41bb1fd6a..000000000 --- a/source/self_hosted_repository.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. _`Hosting your Own Simple Repository`: - -================================== -Hosting your Own Simple Repository -================================== - -:Page Status: Complete -:Last Reviewed: 2015-09-24 - - -If you wish to host your own simple repository [1]_, you can either use a -software package like `devpi`_ or you can use simply create the proper -directory structure and use any web server that can serve static files and -generate an autoindex. - -In either case, since you'll be hosting a repository that is likely not in -your user's default repositories, you should instruct them in your project's -description to configure their installer appropriately. For example with pip:: - - pip install --extra-index-url https://python.example.com/ foobar - -In addition, it is **highly** recommended that you serve your repository with -valid HTTPS. At this time, the security of your user's installations depends on -all repositories using a valid HTTPS setup. - - -"Manual" Repository -=================== - -The directory layout is fairly simple, within a root directory you need to -create a directory for each project. This directory should be the normalized -name (as defined by PEP 503) of the project. Within each of these directories -simply place each of the downloadable files. If you have the projects "Foo" -(with the versions 1.0 and 2.0) and "bar" (with the version 0.1) You should -end up with a structure that looks like:: - - . - ├── bar - │   └── bar-0.1.tar.gz - └── foo - ├── Foo-1.0.tar.gz - └── Foo-2.0.tar.gz - -Once you have this layout, simply configure your webserver to serve the root -directory with autoindex enabled. For an example using the built in Web server -in `Twisted`_, you would simply run ``twistd -n web --path .`` and then -instruct users to add the URL to their installer's configuration. - ----- - -.. [1] For complete documentation of the simple repository protocol, see - PEP 503. - - -.. _devpi: http://doc.devpi.net/latest/ -.. _Twisted: https://twistedmatrix.com/ diff --git a/source/shared/build-backend-tabs.rst b/source/shared/build-backend-tabs.rst new file mode 100644 index 000000000..7fc3a61da --- /dev/null +++ b/source/shared/build-backend-tabs.rst @@ -0,0 +1,34 @@ +.. (comment) This file is included in guides/writing-pyproject-toml.rst and tutorials/packaging-projects.rst. +.. The minimum versions here are the versions that introduced support for PEP 639. + +.. tab:: Hatchling + + .. code-block:: toml + + [build-system] + requires = ["hatchling >= 1.26"] + build-backend = "hatchling.build" + +.. tab:: setuptools + + .. code-block:: toml + + [build-system] + requires = ["setuptools >= 77.0.3"] + build-backend = "setuptools.build_meta" + +.. tab:: Flit + + .. code-block:: toml + + [build-system] + requires = ["flit_core >= 3.12.0, <4"] + build-backend = "flit_core.buildapi" + +.. tab:: PDM + + .. code-block:: toml + + [build-system] + requires = ["pdm-backend >= 2.4.0"] + build-backend = "pdm.backend" diff --git a/source/single_source_version.rst b/source/single_source_version.rst deleted file mode 100644 index bc1c3587e..000000000 --- a/source/single_source_version.rst +++ /dev/null @@ -1,123 +0,0 @@ -.. _`Single sourcing the version`: - -=================================== -Single-sourcing the Project Version -=================================== - -:Page Status: Complete -:Last Reviewed: 2015-12-03 - - -There are many techniques to maintain a single source of truth for the version -number of your project: - -#. Read the file in ``setup.py`` and parse the version with a regex. Example ( - from `pip setup.py `_):: - - def read(*names, **kwargs): - with io.open( - os.path.join(os.path.dirname(__file__), *names), - encoding=kwargs.get("encoding", "utf8") - ) as fp: - return fp.read() - - def find_version(*file_paths): - version_file = read(*file_paths) - version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", - version_file, re.M) - if version_match: - return version_match.group(1) - raise RuntimeError("Unable to find version string.") - - setup( - ... - version=find_version("package", "__init__.py") - ... - ) - - .. note:: - - This technique has the disadvantage of having to deal with complexities of regular expressions. - -#. Use an external build tool that either manages updating both locations, or - offers an API that both locations can use. - - Few tools you could use, in no particular order, and not necessarily complete: - `bumpversion `_, - `changes `_, `zest.releaser `_. - - -#. Set the value to a ``__version__`` global variable in a dedicated module in - your project (e.g. ``version.py``), then have ``setup.py`` read and ``exec`` the - value into a variable. - - Using ``execfile``: - - :: - - execfile('...sample/version.py') - # now we have a `__version__` variable - # later on we use: __version__ - - Using ``exec``: - - :: - - version = {} - with open("...sample/version.py") as fp: - exec(fp.read(), version) - # later on we use: version['__version__'] - - Example using this technique: `warehouse `_. - -#. Place the value in a simple ``VERSION`` text file and have both ``setup.py`` - and the project code read it. - - :: - - with open(os.path.join(mypackage_root_dir, 'VERSION')) as version_file: - version = version_file.read().strip() - - An advantage with this technique is that it's not specific to Python. Any - tool can read the version. - - .. warning:: - - With this approach you must make sure that the ``VERSION`` file is included in - all your source and binary distributions (e.g. add ``include VERSION`` to your - ``MANIFEST.in``). - -#. Set the value in ``setup.py``, and have the project code use the - ``pkg_resources`` API. - - :: - - import pkg_resources - assert pkg_resources.get_distribution('pip').version == '1.2.0' - - Be aware that the ``pkg_resources`` API only knows about what's in the - installation metadata, which is not necessarily the code that's currently - imported. - - -#. Set the value to ``__version__`` in ``sample/__init__.py`` and import - ``sample`` in ``setup.py``. - - :: - - import sample - setup( - ... - version=sample.__version__ - ... - ) - - Although this technique is common, beware that it will fail if - ``sample/__init__.py`` imports packages from ``install_requires`` - dependencies, which will very likely not be installed yet when ``setup.py`` - is run. - - -#. Keep the version number in the tags of a version control system (Git, Mercurial, etc) - instead of in the code, and automatically extract it from there using - `setuptools_scm `_. diff --git a/source/specifications.rst b/source/specifications.rst deleted file mode 100644 index 84d68d507..000000000 --- a/source/specifications.rst +++ /dev/null @@ -1,114 +0,0 @@ - -.. _specifications: - -=================== -PyPA Specifications -=================== - -:Page Status: Complete -:Last Reviewed: 2016-01-22 - -This is a list of currently active interoperability specifications maintained -by the Python Packaging Authority. - -Package distribution metadata -############################# - -Core metadata -============= - -The current core metadata file format, version 1.2, is specified in :pep:`345`. - -However, the version specifiers and environment markers sections of that PEP -have been superceded as described below. In addition, metadata files are -permitted to contain the following additional field: - -Provides-Extra (multiple use) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -A string containing the name of an optional feature. Must be a valid Python -identifier. May be used to make a dependency conditional on whether the -optional feature has been requested. - -Example:: - - Provides-Extra: pdf - Requires-Dist: reportlab; extra == 'pdf' - -A second distribution requires an optional dependency by placing it -inside square brackets, and can request multiple features by separating -them with a comma (,). The requirements are evaluated for each requested -feature and added to the set of requirements for the distribution. - -Example:: - - Requires-Dist: beaglevote[pdf] - Requires-Dist: libexample[test, doc] - -Two feature names `test` and `doc` are reserved to mark dependencies that -are needed for running automated tests and generating documentation, -respectively. - -It is legal to specify ``Provides-Extra:`` without referencing it in any -``Requires-Dist:``. - - -Version Specifiers -================== - -Version numbering requirements and the semantics for specifying comparisons -between versions are defined in :pep:`440`. - -The version specifiers section in this PEP supersedes the version specifiers -section in :pep:`345`. - -Dependency Specifiers -===================== - -The dependency specifier format used to declare a dependency on another -component is defined in :pep:`508`. - -The environment markers section in this PEP supersedes the environment markers -section in :pep:`345`. - -Source Distribution Format -========================== - -The source distribution format (``sdist``) is not currently formally defined. -Instead, its format is implicitly defined by the behaviour of the -standard library's ``distutils`` module when executing the ``setup.py sdist`` -command. - -Binary Distribution Format -========================== - -The binary distribution format (``wheel``) is defined in :pep:`427`. - -Platform Compatibility Tags -=========================== - -The platform compatibility tagging model used for ``wheel`` distribution is -defined in :pep:`425`. - -The scheme defined in that PEP is insufficient for public distribution -of Linux wheel files (and \*nix wheel files in general), so :pep:`513` was -created to define the ``manylinux1`` tag. - -Recording Installed Distributions -================================= - -The format used to record installed packages and their contents is defined in -:pep:`376`. - -Note that only the ``dist-info`` directory and the ``RECORD`` file format from -that PEP are currently implemented in the default packaging toolchain. - - -Package index interfaces -######################## - -Simple repository API -===================== - -The current interface for querying available package versions and retrieving packages -from an index server is defined in :pep:`503`. diff --git a/source/specifications/binary-distribution-format.rst b/source/specifications/binary-distribution-format.rst new file mode 100644 index 000000000..8bb41ab40 --- /dev/null +++ b/source/specifications/binary-distribution-format.rst @@ -0,0 +1,483 @@ +.. highlight:: text + +.. _binary-distribution-format: + +========================== +Binary distribution format +========================== + +This page specifies the binary distribution format for Python packages, +also called the wheel format. + +A wheel is a ZIP-format archive with a specially formatted file name and +the ``.whl`` extension. It contains a single distribution nearly as it +would be installed according to PEP 376 with a particular installation +scheme. Although a specialized installer is recommended, a wheel file +may be installed by simply unpacking into site-packages with the standard +'unzip' tool while preserving enough information to spread its contents +out onto their final paths at any later time. + + +Details +======= + +Installing a wheel 'distribution-1.0-py32-none-any.whl' +------------------------------------------------------- + +Wheel installation notionally consists of two phases: + +- Unpack. + + a. Parse ``distribution-1.0.dist-info/WHEEL``. + b. Check that installer is compatible with Wheel-Version. Warn if + minor version is greater, abort if major version is greater. + c. If Root-Is-Purelib == 'true', unpack archive into purelib + (site-packages). + d. Else unpack archive into platlib (site-packages). + +- Spread. + + a. Unpacked archive includes ``distribution-1.0.dist-info/`` and (if + there is data) ``distribution-1.0.data/``. + b. Move each subtree of ``distribution-1.0.data/`` onto its + destination path. Each subdirectory of ``distribution-1.0.data/`` + is a key into a dict of destination directories, such as + ``distribution-1.0.data/(purelib|platlib|headers|scripts|data)``. + These subdirectories are :ref:`installation paths defined by sysconfig + `. + c. If applicable, update scripts starting with ``#!python`` to point + to the correct interpreter. + d. Update ``distribution-1.0.dist-info/RECORD`` with the installed + paths. + e. Remove empty ``distribution-1.0.data`` directory. + f. Compile any installed .py to .pyc. (Uninstallers should be smart + enough to remove .pyc even if it is not mentioned in RECORD.) + +Recommended installer features +'''''''''''''''''''''''''''''' + +Rewrite ``#!python``. + In wheel, scripts are packaged in + ``{distribution}-{version}.data/scripts/``. If the first line of + a file in ``scripts/`` starts with exactly ``b'#!python'``, rewrite to + point to the correct interpreter. Unix installers may need to add + the +x bit to these files if the archive was created on Windows. + + The ``b'#!pythonw'`` convention is allowed. ``b'#!pythonw'`` indicates + a GUI script instead of a console script. + +Generate script wrappers. + In wheel, scripts packaged on Unix systems will certainly not have + accompanying .exe wrappers. Windows installers may want to add them + during install. + +Recommended archiver features +''''''''''''''''''''''''''''' + +Place ``.dist-info`` at the end of the archive. + Archivers are encouraged to place the ``.dist-info`` files physically + at the end of the archive. This enables some potentially interesting + ZIP tricks including the ability to amend the metadata without + rewriting the entire archive. + + +File Format +----------- + +.. _wheel-file-name-spec: + +File name convention +'''''''''''''''''''' + +The wheel filename is ``{distribution}-{version}(-{build +tag})?-{python tag}-{abi tag}-{platform tag}.whl``. + +distribution + Distribution name, e.g. 'django', 'pyramid'. + +version + Distribution version, e.g. 1.0. + +build tag + Optional build number. Must start with a digit. Acts as a + tie-breaker if two wheel file names are the same in all other + respects (i.e. name, version, and other tags). Sort as an + empty tuple if unspecified, else sort as a two-item tuple with + the first item being the initial digits as an ``int``, and the + second item being the remainder of the tag as a ``str``. + + A common use-case for build numbers is rebuilding a binary + distribution due to a change in the build environment, + like when using the manylinux image to build + distributions using pre-release CPython versions. + + .. warning:: + + Build numbers are not a part of the distribution version and thus are difficult + to reference externally, especially so outside the Python ecosystem of tools and standards. + A common case where a distribution would need to referenced externally is when + resolving a security vulnerability. + + Due to this limitation, new distributions which need to be referenced externally + **should not** use build numbers when building the new distribution. + Instead a **new distribution version** should be created for such cases. + + +language implementation and version tag + E.g. 'py27', 'py2', 'py3'. + +abi tag + E.g. 'cp33m', 'abi3', 'none'. + +platform tag + E.g. 'linux_x86_64', 'any'. + +For example, ``distribution-1.0-1-py27-none-any.whl`` is the first +build of a package called 'distribution', and is compatible with +Python 2.7 (any Python 2.7 implementation), with no ABI (pure Python), +on any CPU architecture. + +The last three components of the filename before the extension are +called "compatibility tags." The compatibility tags express the +package's basic interpreter requirements and are detailed in PEP 425. + +Escaping and Unicode +'''''''''''''''''''' + +As the components of the filename are separated by a dash (``-``, HYPHEN-MINUS), +this character cannot appear within any component. This is handled as follows: + +- In distribution names, any run of ``-_.`` characters (HYPHEN-MINUS, LOW LINE + and FULL STOP) should be replaced with ``_`` (LOW LINE), and uppercase + characters should be replaced with corresponding lowercase ones. This is + equivalent to regular :ref:`name normalization ` followed + by replacing ``-`` with ``_``. Tools consuming wheels must be prepared to accept + ``.`` (FULL STOP) and uppercase letters, however, as these were allowed by an earlier + version of this specification. +- Version numbers should be normalised according to the :ref:`Version specifier + specification `. Normalised version numbers cannot contain ``-``. +- The remaining components may not contain ``-`` characters, so no escaping + is necessary. + +Tools producing wheels should verify that the filename components do not contain +``-``, as the resulting file may not be processed correctly if they do. + +The archive filename is Unicode. It will be some time before the tools +are updated to support non-ASCII filenames, but they are supported in +this specification. + +The filenames *inside* the archive are encoded as UTF-8. Although some +ZIP clients in common use do not properly display UTF-8 filenames, +the encoding is supported by both the ZIP specification and Python's +``zipfile``. + +File contents +''''''''''''' + +The contents of a wheel file, where {distribution} is replaced with the +:ref:`normalized name ` of the package, e.g. +``beaglevote`` and {version} is replaced +with its :ref:`normalized version `, +e.g. ``1.0.0``, (with dash/``-`` characters replaced with underscore/``_`` characters +in both fields) consist of: + +#. ``/``, the root of the archive, contains all files to be installed in + ``purelib`` or ``platlib`` as specified in ``WHEEL``. ``purelib`` and + ``platlib`` are usually both ``site-packages``. +#. ``{distribution}-{version}.dist-info/`` contains metadata. +#. :file:`{distribution}-{version}.dist-info/licenses/` contains license files. +#. ``{distribution}-{version}.data/`` contains one subdirectory + for each non-empty install scheme key not already covered, where + the subdirectory name is an index into a dictionary of install paths + (e.g. ``data``, ``scripts``, ``headers``, ``purelib``, ``platlib``). +#. Python scripts must appear in ``scripts`` and begin with exactly + ``b'#!python'`` in order to enjoy script wrapper generation and + ``#!python`` rewriting at install time. They may have any or no + extension. The ``scripts`` directory may only contain regular files. +#. ``{distribution}-{version}.dist-info/METADATA`` is Metadata version 1.1 + or greater format metadata. +#. ``{distribution}-{version}.dist-info/WHEEL`` is metadata about the archive + itself in the same basic key: value format:: + + Wheel-Version: 1.0 + Generator: bdist_wheel 1.0 + Root-Is-Purelib: true + Tag: py2-none-any + Tag: py3-none-any + Build: 1 + +#. ``Wheel-Version`` is the version number of the Wheel specification. +#. ``Generator`` is the name and optionally the version of the software + that produced the archive. +#. ``Root-Is-Purelib`` is true if the top level directory of the archive + should be installed into purelib; otherwise the root should be installed + into platlib. +#. ``Tag`` is the wheel's expanded compatibility tags; in the example the + filename would contain ``py2.py3-none-any``. +#. ``Build`` is the build number and is omitted if there is no build number. +#. A wheel installer should warn if Wheel-Version is greater than the + version it supports, and must fail if Wheel-Version has a greater + major version than the version it supports. +#. Wheel, being an installation format that is intended to work across + multiple versions of Python, does not generally include .pyc files. +#. Wheel does not contain setup.py or setup.cfg. + +This version of the wheel specification is based on the distutils install +schemes and does not define how to install files to other locations. +The layout offers a superset of the functionality provided by the existing +wininst and egg binary formats. + + +The .dist-info directory +^^^^^^^^^^^^^^^^^^^^^^^^ + +#. Wheel .dist-info directories include at a minimum METADATA, WHEEL, + and RECORD. +#. METADATA is the package metadata, the same format as PKG-INFO as + found at the root of sdists. +#. WHEEL is the wheel metadata specific to a build of the package. +#. RECORD is a list of (almost) all the files in the wheel and their + secure hashes. Unlike PEP 376, every file except RECORD, which + cannot contain a hash of itself, must include its hash. The hash + algorithm must be sha256 or better; specifically, md5 and sha1 are + not permitted, as signed wheel files rely on the strong hashes in + RECORD to validate the integrity of the archive. +#. PEP 376's INSTALLER and REQUESTED are not included in the archive. +#. RECORD.jws is used for digital signatures. It is not mentioned in + RECORD. +#. RECORD.p7s is allowed as a courtesy to anyone who would prefer to + use S/MIME signatures to secure their wheel files. It is not + mentioned in RECORD. +#. During extraction, wheel installers verify all the hashes in RECORD + against the file contents. Apart from RECORD and its signatures, + installation will fail if any file in the archive is not both + mentioned and correctly hashed in RECORD. + +Subdirectories in :file:`.dist-info/` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Subdirectories under :file:`.dist-info/` are reserved for future use. +The following subdirectory names under :file:`.dist-info/` are reserved for specific usage: + +================= ============== +Subdirectory name PEP / Standard +================= ============== +``licenses`` :pep:`639` +``license_files`` :pep:`639` +``LICENSES`` `REUSE licensing framework `__ +``sboms`` :pep:`770` +================= ============== + +The :file:`.dist-info/licenses/` directory +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If the metadata version is 2.4 or greater and one or more ``License-File`` +fields is specified, the :file:`.dist-info/` directory MUST contain a +:file:`licenses/` subdirectory, which MUST contain the files listed in the +``License-File`` fields in the :file:`METADATA` file at their respective paths +relative to the :file:`licenses/` directory. + +The :file:`.dist-info/sboms/` directory +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All files contained within the :file:`.dist-info/sboms/` directory MUST +be Software Bill-of-Materials (SBOM) files that describe software contained +within the distribution archive. + +The .data directory +^^^^^^^^^^^^^^^^^^^ + +Any file that is not normally installed inside site-packages goes into +the .data directory, named as the .dist-info directory but with the +.data/ extension:: + + distribution-1.0.dist-info/ + + distribution-1.0.data/ + +The .data directory contains subdirectories with the scripts, headers, +documentation and so forth from the distribution. During installation the +contents of these subdirectories are moved onto their destination paths. + + +Signed wheel files +------------------ + +Wheel files include an extended RECORD that enables digital +signatures. PEP 376's RECORD is altered to include a secure hash +``digestname=urlsafe_b64encode_nopad(digest)`` (urlsafe base64 +encoding with no trailing = characters) as the second column instead +of an md5sum. All possible entries are hashed, including any +generated files such as .pyc files, but not RECORD which cannot contain its +own hash. For example:: + + file.py,sha256=AVTFPZpEKzuHr7OvQZmhaU3LvwKz06AJw8mT\_pNh2yI,3144 + distribution-1.0.dist-info/RECORD,, + +The signature file(s) RECORD.jws and RECORD.p7s are not mentioned in +RECORD at all since they can only be added after RECORD is generated. +Every other file in the archive must have a correct hash in RECORD +or the installation will fail. + +If JSON web signatures are used, one or more JSON Web Signature JSON +Serialization (JWS-JS) signatures is stored in a file RECORD.jws adjacent +to RECORD. JWS is used to sign RECORD by including the SHA-256 hash of +RECORD as the signature's JSON payload: + +.. code-block:: json + + { "hash": "sha256=ADD-r2urObZHcxBW3Cr-vDCu5RJwT4CaRTHiFmbcIYY" } + +(The hash value is the same format used in RECORD.) + +If RECORD.p7s is used, it must contain a detached S/MIME format signature +of RECORD. + +A wheel installer is not required to understand digital signatures but +MUST verify the hashes in RECORD against the extracted file contents. +When the installer checks file hashes against RECORD, a separate signature +checker only needs to establish that RECORD matches the signature. + +See + +- https://datatracker.ietf.org/doc/html/rfc7515 +- https://datatracker.ietf.org/doc/html/draft-jones-json-web-signature-json-serialization-01 +- https://datatracker.ietf.org/doc/html/rfc7517 +- https://datatracker.ietf.org/doc/html/draft-jones-jose-json-private-key-01 + + +FAQ +=== + + +Wheel defines a .data directory. Should I put all my data there? +----------------------------------------------------------------- + + This specification does not have an opinion on how you should organize + your code. The .data directory is just a place for any files that are + not normally installed inside ``site-packages`` or on the PYTHONPATH. + In other words, you may continue to use ``pkgutil.get_data(package, + resource)`` even though *those* files will usually not be distributed + in *wheel's* ``.data`` directory. + + +Why does wheel include attached signatures? +------------------------------------------- + + Attached signatures are more convenient than detached signatures + because they travel with the archive. Since only the individual files + are signed, the archive can be recompressed without invalidating + the signature or individual files can be verified without having + to download the whole archive. + + +Why does wheel allow JWS signatures? +------------------------------------ + + The JOSE specifications of which JWS is a part are designed to be easy + to implement, a feature that is also one of wheel's primary design + goals. JWS yields a useful, concise pure-Python implementation. + + +Why does wheel also allow S/MIME signatures? +-------------------------------------------- + + S/MIME signatures are allowed for users who need or want to use + existing public key infrastructure with wheel. + + Signed packages are only a basic building block in a secure package + update system. Wheel only provides the building block. + + +What's the deal with "purelib" vs. "platlib"? +--------------------------------------------- + + Wheel preserves the "purelib" vs. "platlib" distinction, which is + significant on some platforms. For example, Fedora installs pure + Python packages to '/usr/lib/pythonX.Y/site-packages' and platform + dependent packages to '/usr/lib64/pythonX.Y/site-packages'. + + A wheel with "Root-Is-Purelib: false" with all its files + in ``{name}-{version}.data/purelib`` is equivalent to a wheel with + "Root-Is-Purelib: true" with those same files in the root, and it + is legal to have files in both the "purelib" and "platlib" categories. + + In practice a wheel should have only one of "purelib" or "platlib" + depending on whether it is pure Python or not and those files should + be at the root with the appropriate setting given for "Root-is-purelib". + + +.. _binary-distribution-format-import-wheel: + +Is it possible to import Python code directly from a wheel file? +---------------------------------------------------------------- + + Technically, due to the combination of supporting installation via + simple extraction and using an archive format that is compatible with + ``zipimport``, a subset of wheel files *do* support being placed directly + on ``sys.path``. However, while this behaviour is a natural consequence + of the format design, actually relying on it is generally discouraged. + + Firstly, wheel *is* designed primarily as a distribution format, so + skipping the installation step also means deliberately avoiding any + reliance on features that assume full installation (such as being able + to use standard tools like ``pip`` and ``virtualenv`` to capture and + manage dependencies in a way that can be properly tracked for auditing + and security update purposes, or integrating fully with the standard + build machinery for C extensions by publishing header files in the + appropriate place). + + Secondly, while some Python software is written to support running + directly from a zip archive, it is still common for code to be written + assuming it has been fully installed. When that assumption is broken + by trying to run the software from a zip archive, the failures can often + be obscure and hard to diagnose (especially when they occur in third + party libraries). The two most common sources of problems with this + are the fact that importing C extensions from a zip archive is *not* + supported by CPython (since doing so is not supported directly by the + dynamic loading machinery on any platform) and that when running from + a zip archive the ``__file__`` attribute no longer refers to an + ordinary filesystem path, but to a combination path that includes + both the location of the zip archive on the filesystem and the + relative path to the module inside the archive. Even when software + correctly uses the abstract resource APIs internally, interfacing with + external components may still require the availability of an actual + on-disk file. + + Like metaclasses, monkeypatching and metapath importers, if you're not + already sure you need to take advantage of this feature, you almost + certainly don't need it. If you *do* decide to use it anyway, be + aware that many projects will require a failure to be reproduced with + a fully installed package before accepting it as a genuine bug. + + +History +======= + +- February 2013: This specification was approved through :pep:`427`. +- February 2021: The rules on escaping in wheel filenames were revised, to bring + them into line with what popular tools actually do. +- December 2024: Clarified that the ``scripts`` folder should only contain + regular files (the expected behaviour of consuming tools when encountering + symlinks or subdirectories in this folder is not formally defined, and hence + may vary between tools). +- December 2024: The :file:`.dist-info/licenses/` directory was specified through + :pep:`639`. +- January 2025: Clarified that name and version needs to be normalized for + ``.dist-info`` and ``.data`` directories. + + +Appendix +======== + +Example urlsafe-base64-nopad implementation:: + + # urlsafe-base64-nopad for Python 3 + import base64 + + def urlsafe_b64encode_nopad(data): + return base64.urlsafe_b64encode(data).rstrip(b'=') + + def urlsafe_b64decode_nopad(data): + pad = b'=' * (4 - (len(data) & 3)) + return base64.urlsafe_b64decode(data + pad) diff --git a/source/specifications/build-details/examples/build-details-v1.0.json b/source/specifications/build-details/examples/build-details-v1.0.json new file mode 100644 index 000000000..dd08b230f --- /dev/null +++ b/source/specifications/build-details/examples/build-details-v1.0.json @@ -0,0 +1,51 @@ +{ + "schema_version": "1.0", + "base_prefix": "/usr", + "base_interpreter": "/usr/bin/python", + "platform": "linux-x86_64", + "language": { + "version": "3.14", + "version_info": { + "major": 3, + "minor": 14, + "micro": 0, + "releaselevel": "alpha", + "serial": 0 + } + }, + "implementation": { + "name": "cpython", + "version": { + "major": 3, + "minor": 14, + "micro": 0, + "releaselevel": "alpha", + "serial": 0 + }, + "hexversion": 51249312, + "cache_tag": "cpython-314", + "_multiarch": "x86_64-linux-gnu" + }, + "abi": { + "flags": ["t", "d"], + "extension_suffix": ".cpython-314-x86_64-linux-gnu.so", + "stable_abi_suffix": ".abi3.so" + }, + "suffixes": { + "source": [".py"], + "bytecode": [".pyc"], + "optimized_bytecode": [".pyc"], + "debug_bytecode": [".pyc"], + "extensions": [".cpython-314-x86_64-linux-gnu.so", ".abi3.so", ".so"] + }, + "libpython": { + "dynamic": "/usr/lib/libpython3.14.so.1.0", + "dynamic_stableabi": "/usr/lib/libpython3.so", + "static": "/usr/lib/python3.14/config-3.14-x86_64-linux-gnu/libpython3.14.a", + "link_extensions": true + }, + "c_api": { + "headers": "/usr/include/python3.14", + "pkgconfig_path": "/usr/lib/pkgconfig" + } +} diff --git a/source/specifications/build-details/index.rst b/source/specifications/build-details/index.rst new file mode 100644 index 000000000..0cd5b5fe5 --- /dev/null +++ b/source/specifications/build-details/index.rst @@ -0,0 +1,52 @@ +.. _build-details: + +========================== +:file:`build-details.json` +========================== + +.. toctree:: + :hidden: + + v1.0 + + +The ``build-details.json`` file is a standardized file format that provides +build-specfic information of a Python installation, such as its version, +extension ABI details, and other information that is specific to that particular +build of Python. + +Starting from Python 3.14, a ``build-details.json`` file is installed in the +platform-independent standard library directory (``stdlib``, e.g. +``/usr/lib/python3.14/build-details.json``). + +Please refer to the :ref:`latest version ` for its +specification. + +.. + Update to point to the latest version! + +.. literalinclude:: examples/build-details-v1.0.json + :caption: Example + :language: json + :linenos: + + +Changelog +--------- + +.. + Order in decreasing order. + +v1.0 +~~~~ + +.. list-table:: + + * - Specification + - :ref:`build-details-v1.0` + + * - Schema + - https://packaging.python.org/en/latest/specifications/schemas/build-details-v1.0.schema.json + + +- Initial version, introduced by :pep:`739`. diff --git a/source/specifications/build-details/v1.0.rst b/source/specifications/build-details/v1.0.rst new file mode 100644 index 000000000..3a8cfe277 --- /dev/null +++ b/source/specifications/build-details/v1.0.rst @@ -0,0 +1,18 @@ +.. _build-details-v1.0: + +=========================== +``build-details.json`` v1.0 +=========================== + + +Specification +------------- + +.. jsonschema:: ../../../extra/specifications/schemas/build-details-v1.0.schema.json + :lift_title: false + + +Example +------- + +.. literalinclude:: examples/build-details-v1.0.json diff --git a/source/specifications/core-metadata.rst b/source/specifications/core-metadata.rst new file mode 100644 index 000000000..39ee7c4cd --- /dev/null +++ b/source/specifications/core-metadata.rst @@ -0,0 +1,954 @@ +.. highlight:: text + +.. _`core-metadata`: + +============================ +Core metadata specifications +============================ + +Fields defined in the following specification should be considered valid, +complete and not subject to change. The required fields are: + +- ``Metadata-Version`` +- ``Name`` +- ``Version`` + +All the other fields are optional. + +The standard file format for metadata (including in :doc:`wheels +` and :doc:`installed projects +`) is based on the format of email headers. +However, email formats have been revised several times, and exactly which email +RFC applies to packaging metadata is not specified. In the absence of a precise +definition, the practical standard is set by what the standard library +:mod:`python:email.parser` module can parse using the +:data:`~.python:email.policy.compat32` policy. + +Whenever metadata is serialised to a byte stream (for example, to save +to a file), strings must be serialised using the UTF-8 encoding. + +Although :pep:`566` defined a way to transform metadata into a JSON-compatible +dictionary, this is not yet used as a standard interchange format. The need for +tools to work with years worth of existing packages makes it difficult to shift +to a new format. + +.. note:: *Interpreting old metadata:* In :pep:`566`, the version specifier + field format specification was relaxed to accept the syntax used by popular + publishing tools (namely to remove the requirement that version specifiers + must be surrounded by parentheses). Metadata consumers may want to use the + more relaxed formatting rules even for metadata files that are nominally + less than version 2.1. + + +.. _core-metadata-metadata-version: + +Metadata-Version +================ + +.. versionadded:: 1.0 + +Version of the file format; legal values are "1.0", "1.1", "1.2", "2.1", +"2.2", "2.3", and "2.4". + +Automated tools consuming metadata SHOULD warn if ``metadata_version`` is +greater than the highest version they support, and MUST fail if +``metadata_version`` has a greater major version than the highest +version they support (as described in the +:ref:`Version specifier specification `, +the major version is the value before the first dot). + +For broader compatibility, build tools MAY choose to produce +distribution metadata using the lowest metadata version that includes +all of the needed fields. + +Example:: + + Metadata-Version: 2.4 + + +.. _core-metadata-name: + +Name +==== + +.. versionadded:: 1.0 +.. versionchanged:: 2.1 + Added restrictions on format from the :ref:`name format `. + +The name of the distribution. The name field is the primary identifier for a +distribution. It must conform to the :ref:`name format specification +`. + +Example:: + + Name: BeagleVote + +For comparison purposes, the names should be :ref:`normalized ` before comparing. + +.. _core-metadata-version: + +Version +======= + +.. versionadded:: 1.0 + +A string containing the distribution's version number. This +field must be in the format specified in the +:ref:`Version specifier specification `. + +Example:: + + Version: 1.0a2 + + +.. _core-metadata-dynamic: + +Dynamic (multiple use) +====================== + +.. versionadded:: 2.2 + +A string containing the name of another core metadata field. The field +names ``Name``, ``Version``, and ``Metadata-Version`` may not be specified +in this field. + +When found in the metadata of a source distribution, the following +rules apply: + +1. If a field is *not* marked as ``Dynamic``, then the value of the field + in any wheel built from the sdist MUST match the value in the sdist. + If the field is not in the sdist, and not marked as ``Dynamic``, then + it MUST NOT be present in the wheel. +2. If a field is marked as ``Dynamic``, it may contain any valid value in + a wheel built from the sdist (including not being present at all). + +If the sdist metadata version is older than version 2.2, then all fields should +be treated as if they were specified with ``Dynamic`` (i.e. there are no special +restrictions on the metadata of wheels built from the sdist). + +In any context other than a source distribution, ``Dynamic`` is for information +only, and indicates that the field value was calculated at wheel build time, +and may not be the same as the value in the sdist or in other wheels for the +project. + +Full details of the semantics of ``Dynamic`` are described in :pep:`643`. + +.. _core-metadata-platform: + +Platform (multiple use) +======================= + +.. versionadded:: 1.0 + +A Platform specification describing an operating system supported by +the distribution which is not listed in the "Operating System" Trove classifiers. +See "Classifier" below. + +Examples:: + + Platform: ObscureUnix + Platform: RareDOS + +.. _core-metadata-supported-platform: + +Supported-Platform (multiple use) +================================= + +.. versionadded:: 1.1 + +Binary distributions containing a PKG-INFO file will use the +Supported-Platform field in their metadata to specify the OS and +CPU for which the binary distribution was compiled. The semantics of +the Supported-Platform field are not specified in this PEP. + +Example:: + + Supported-Platform: RedHat 7.2 + Supported-Platform: i386-win32-2791 + + +.. _core-metadata-summary: + +Summary +======= + +.. versionadded:: 1.0 + +A one-line summary of what the distribution does. + +Example:: + + Summary: A module for collecting votes from beagles. + +.. Some of these headings used to have a suffix "(optional)". This became part + of links (...#description-optional). We have changed the headings (required + fields are now listed at the start of the specification), but added explicit + link targets like this one, so that links to the individual sections are not + broken. + + +.. _description-optional: +.. _core-metadata-description: + +Description +=========== + +.. versionadded:: 1.0 +.. versionchanged:: 2.1 + This field may be specified in the message body instead. + +A longer description of the distribution that can run to several +paragraphs. Software that deals with metadata should not assume +any maximum size for this field, though people shouldn't include +their instruction manual as the description. + +The contents of this field can be written using reStructuredText +markup [1]_. For programs that work with the metadata, supporting +markup is optional; programs can also display the contents of the +field as-is. This means that authors should be conservative in +the markup they use. + +To support empty lines and lines with indentation with respect to +the RFC 822 format, any CRLF character has to be suffixed by 7 spaces +followed by a pipe ("|") char. As a result, the Description field is +encoded into a folded field that can be interpreted by RFC822 +parser [2]_. + +Example:: + + Description: This project provides powerful math functions + |For example, you can use `sum()` to sum numbers: + | + |Example:: + | + | >>> sum(1, 2) + | 3 + | + +This encoding implies that any occurrences of a CRLF followed by 7 spaces +and a pipe char have to be replaced by a single CRLF when the field is unfolded +using a RFC822 reader. + +Alternatively, the distribution's description may instead be provided in the +message body (i.e., after a completely blank line following the headers, with +no indentation or other special formatting necessary). + + +.. _description-content-type-optional: +.. _core-metadata-description-content-type: + +Description-Content-Type +======================== + +.. versionadded:: 2.1 + +A string stating the markup syntax (if any) used in the distribution's +description, so that tools can intelligently render the description. + +Historically, PyPI supported descriptions in plain text and `reStructuredText +(reST) `_, +and could render reST into HTML. However, it is common for distribution +authors to write the description in `Markdown +`_ (:rfc:`7763`) as many code hosting sites render +Markdown READMEs, and authors would reuse the file for the description. PyPI +didn't recognize the format and so could not render the description correctly. +This resulted in many packages on PyPI with poorly-rendered descriptions when +Markdown is left as plain text, or worse, was attempted to be rendered as reST. +This field allows the distribution author to specify the format of their +description, opening up the possibility for PyPI and other tools to be able to +render Markdown and other formats. + +The format of this field is the same as the ``Content-Type`` header in HTTP +(i.e.: +`RFC 1341 `_). +Briefly, this means that it has a ``type/subtype`` part and then it can +optionally have a number of parameters: + +Format:: + + Description-Content-Type: /; charset=[; = ...] + +The ``type/subtype`` part has only a few legal values: + +- ``text/plain`` +- ``text/x-rst`` +- ``text/markdown`` + +The ``charset`` parameter can be used to specify the character encoding of +the description. The only legal value is ``UTF-8``. If omitted, it is assumed to +be ``UTF-8``. + +Other parameters might be specific to the chosen subtype. For example, for the +``markdown`` subtype, there is an optional ``variant`` parameter that allows +specifying the variant of Markdown in use (defaults to ``GFM`` if not +specified). Currently, two variants are recognized: + +- ``GFM`` for :rfc:`GitHub-flavored Markdown <7764#section-3.2>` +- ``CommonMark`` for :rfc:`CommonMark <7764#section-3.5>` + +Example:: + + Description-Content-Type: text/plain; charset=UTF-8 + +Example:: + + Description-Content-Type: text/x-rst; charset=UTF-8 + +Example:: + + Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM + +Example:: + + Description-Content-Type: text/markdown + +If a ``Description-Content-Type`` is not specified, then applications should +attempt to render it as ``text/x-rst; charset=UTF-8`` and fall back to +``text/plain`` if it is not valid rst. + +If a ``Description-Content-Type`` is an unrecognized value, then the assumed +content type is ``text/plain`` (Although PyPI will probably reject anything +with an unrecognized value). + +If the ``Description-Content-Type`` is ``text/markdown`` and ``variant`` is not +specified or is set to an unrecognized value, then the assumed ``variant`` is +``GFM``. + +So for the last example above, the ``charset`` defaults to ``UTF-8`` and the +``variant`` defaults to ``GFM`` and thus it is equivalent to the example +before it. + + +.. _keywords-optional: +.. _core-metadata-keywords: + +Keywords +======== + +.. versionadded:: 1.0 + +A list of additional keywords, separated by commas, to be used to assist +searching for the distribution in a larger catalog. + +Example:: + + Keywords: dog,puppy,voting,election + +.. note:: + + The specification previously showed keywords separated by spaces, + but distutils and setuptools implemented it with commas. + These tools have been very widely used for many years, so it was + easier to update the specification to match the de facto standard. + +.. _author-optional: +.. _core-metadata-author: + +Author +====== + +.. versionadded:: 1.0 + +A string containing the author's name at a minimum; additional +contact information may be provided. + +Example:: + + Author: C. Schultz, Universal Features Syndicate, + Los Angeles, CA + + +.. _author-email-optional: +.. _core-metadata-author-email: + +Author-email +============ + +.. versionadded:: 1.0 + +A string containing the author's e-mail address. It can contain +a name and e-mail address in the legal forms for a RFC-822 +``From:`` header. + +Example:: + + Author-email: "C. Schultz" + +Per RFC-822, this field may contain multiple comma-separated e-mail +addresses:: + + Author-email: cschultz@example.com, snoopy@peanuts.com + + +.. _maintainer-optional: +.. _core-metadata-maintainer: + +Maintainer +========== + +.. versionadded:: 1.2 + +A string containing the maintainer's name at a minimum; additional +contact information may be provided. + +Note that this field is intended for use when a project is being +maintained by someone other than the original author: it should be +omitted if it is identical to ``Author``. + +Example:: + + Maintainer: C. Schultz, Universal Features Syndicate, + Los Angeles, CA + + +.. _maintainer-email-optional: +.. _core-metadata-maintainer-email: + +Maintainer-email +================ + +.. versionadded:: 1.2 + +A string containing the maintainer's e-mail address. It can contain +a name and e-mail address in the legal forms for a RFC-822 +``From:`` header. + +Note that this field is intended for use when a project is being +maintained by someone other than the original author: it should be +omitted if it is identical to ``Author-email``. + +Example:: + + Maintainer-email: "C. Schultz" + +Per RFC-822, this field may contain multiple comma-separated e-mail +addresses:: + + Maintainer-email: cschultz@example.com, snoopy@peanuts.com + + +.. _license-optional: +.. _core-metadata-license: + +License +======= + +.. versionadded:: 1.0 +.. deprecated:: 2.4 + in favour of ``License-Expression``. + +.. warning:: + As of Metadata 2.4, ``License`` and ``License-Expression`` are mutually + exclusive. If both are specified, tools which parse metadata will disregard + ``License`` and PyPI will reject uploads. + See `PEP 639 `__. + +Text indicating the license covering the distribution where the license +is not a selection from the "License" Trove classifiers. See +:ref:`"Classifier" ` below. +This field may also be used to specify a +particular version of a license which is named via the ``Classifier`` +field, or to indicate a variation or exception to such a license. + +Examples:: + + License: This software may only be obtained by sending the + author a postcard, and then the user promises not + to redistribute it. + + License: GPL version 3, excluding DRM provisions + + +.. _license-expression-optional: +.. _core-metadata-license-expression: + +License-Expression +================== + +.. versionadded:: 2.4 + +Text string that is a valid SPDX +:term:`license expression `, +as specified in :doc:`/specifications/license-expression`. + +Examples:: + + License-Expression: MIT + License-Expression: BSD-3-Clause + License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause) + License-Expression: MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause) + License-Expression: GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause + License-Expression: LicenseRef-Special-License OR CC0-1.0 OR Unlicense + License-Expression: LicenseRef-Proprietary + + +.. _license-file-optional: +.. _core-metadata-license-file: + +License-File (multiple use) +=========================== + +.. versionadded:: 2.4 + +Each entry is a string representation of the path of a license-related file. +The path is located within the project source tree, relative to the project +root directory. For details see :pep:`639`. + +Examples:: + + License-File: LICENSE + License-File: AUTHORS + License-File: LICENSE.txt + License-File: licenses/LICENSE.MIT + License-File: licenses/LICENSE.CC0 + + +.. _metadata-classifier: +.. _core-metadata-classifier: + +Classifier (multiple use) +========================= + +.. versionadded:: 1.1 + +Each entry is a string giving a single classification value +for the distribution. Classifiers are described in :pep:`301`, +and the Python Package Index publishes a dynamic list of +`currently defined classifiers `__. + +.. note:: + The use of ``License ::`` classifiers is deprecated as of Metadata 2.4, + use ``License-Expression`` instead. See + `PEP 639 `_. + +This field may be followed by an environment marker after a semicolon. + +Examples:: + + Classifier: Development Status :: 4 - Beta + Classifier: Environment :: Console (Text Based) + + +.. _core-metadata-requires-dist: + +Requires-Dist (multiple use) +============================ + +.. versionadded:: 1.2 +.. versionchanged:: 2.1 + The field format specification was relaxed to accept the syntax used by + popular publishing tools. + +Each entry contains a string naming some other distutils +project required by this distribution. + +The format of a requirement string contains from one to four parts: + +* A project name, in the same format as the ``Name:`` field. + The only mandatory part. +* A comma-separated list of 'extra' names. These are defined by + the required project, referring to specific features which may + need extra dependencies. The names MUST conform to the restrictions + specified by the ``Provides-Extra:`` field. +* A version specifier. Tools parsing the format should accept optional + parentheses around this, but tools generating it should not use + parentheses. +* An environment marker after a semicolon. This means that the + requirement is only needed in the specified conditions. + +See :pep:`508` for full details of the allowed format. + +The project names should correspond to names as found +on the `Python Package Index`_. + +Version specifiers must follow the rules described in +:doc:`version-specifiers`. + +Examples:: + + Requires-Dist: pkginfo + Requires-Dist: PasteDeploy + Requires-Dist: zope.interface (>3.5.0) + Requires-Dist: pywin32 >1.0; sys_platform == 'win32' + + +.. _core-metadata-requires-python: + +Requires-Python +=============== + +.. versionadded:: 1.2 + +This field specifies the Python version(s) that the distribution is +compatible with. Installation tools may look at this when +picking which version of a project to install. + +The value must be in the format specified in :doc:`version-specifiers`. + +For example, if a distribution uses :ref:`f-strings ` +then it may prevent installation on Python < 3.6 by specifying:: + + Requires-Python: >=3.6 + +This field cannot be followed by an environment marker. + +.. _core-metadata-requires-external: + +Requires-External (multiple use) +================================ + +.. versionadded:: 1.2 +.. versionchanged:: 2.1 + The field format specification was relaxed to accept the syntax used by + popular publishing tools. + +Each entry contains a string describing some dependency in the +system that the distribution is to be used. This field is intended to +serve as a hint to downstream project maintainers, and has no +semantics which are meaningful to the ``distutils`` distribution. + +The format of a requirement string is a name of an external +dependency, optionally followed by a version declaration within +parentheses. + +This field may be followed by an environment marker after a semicolon. + +Because they refer to non-Python software releases, version numbers +for this field are **not** required to conform to the format +specified in the :ref:`Version specifier specification `: +they should correspond to the version scheme used by the external dependency. + +Notice that there is no particular rule on the strings to be used. + +Examples:: + + Requires-External: C + Requires-External: libpng (>=1.5) + Requires-External: make; sys_platform != "win32" + + +.. _core-metadata-project-url: + +Project-URL (multiple-use) +========================== + +.. versionadded:: 1.2 + +A string containing a browsable URL for the project and a label for it, +separated by a comma. + +Example:: + + Project-URL: Bug Tracker, http://bitbucket.org/tarek/distribute/issues/ + +The label is free text limited to 32 characters. + +Starting with :pep:`753`, project metadata consumers (such as the Python +Package Index) can use a standard normalization process to discover "well-known" +labels, which can then be given special presentations when being rendered +for human consumption. See :ref:`well-known-project-urls`. + +.. _metadata_provides_extra: +.. _core-metadata-provides-extra: +.. _provides-extra-optional-multiple-use: + +Provides-Extra (multiple use) +============================= + +.. versionadded:: 2.1 +.. versionchanged:: 2.3 + :pep:`685` restricted valid values to be unambiguous (i.e. no normalization + required). For older metadata versions, value restrictions were brought into + line with ``Name:`` and normalization rules were introduced. + +A string containing the name of an optional feature. A valid name consists only +of lowercase ASCII letters, ASCII numbers, and hyphen. It must start and end +with a letter or number. Hyphens cannot be followed by another hyphen. Names are +limited to those which match the following regex (which guarantees unambiguity):: + + ^[a-z0-9]+(-[a-z0-9]+)*$ + + +The specified name may be used to make a dependency conditional on whether the +optional feature has been requested. + +Example:: + + Provides-Extra: pdf + Requires-Dist: reportlab; extra == 'pdf' + +A second distribution requires an optional dependency by placing it +inside square brackets, and can request multiple features by separating +them with a comma (,). The requirements are evaluated for each requested +feature and added to the set of requirements for the distribution. + +Example:: + + Requires-Dist: beaglevote[pdf] + Requires-Dist: libexample[test, doc] + +Two feature names ``test`` and ``doc`` are reserved to mark dependencies that +are needed for running automated tests and generating documentation, +respectively. + +It is legal to specify ``Provides-Extra:`` without referencing it in any +``Requires-Dist:``. + +When writing data for older metadata versions, names MUST be normalized +following the same rules used for the ``Name:`` field when performing +comparisons. Tools writing metadata MUST raise an error if two +``Provides-Extra:`` entries would clash after being normalized. + +When reading data for older metadata versions, tools SHOULD warn when values +for this field would be invalid under newer metadata versions. If a value would +be invalid following the rules for ``Name:`` in any core metadata version, the +user SHOULD be warned and the value ignored to avoid ambiguity. Tools MAY choose +to raise an error when reading an invalid name for older metadata versions. + + +Rarely Used Fields +================== + +The fields in this section are currently rarely used, as their design +was inspired by comparable mechanisms in Linux package management systems, +and it isn't at all clear how tools should interpret them in the context +of an open index server such as `PyPI `__. + +As a result, popular installation tools ignore them completely, which in +turn means there is little incentive for package publishers to set them +appropriately. However, they're retained in the metadata specification, +as they're still potentially useful for informational purposes, and can +also be used for their originally intended purpose in combination with +a curated package repository. + +.. _core-metadata-provides-dist: + +Provides-Dist (multiple use) +---------------------------- + +.. versionadded:: 1.2 +.. versionchanged:: 2.1 + The field format specification was relaxed to accept the syntax used by + popular publishing tools. + +Each entry contains a string naming a Distutils project which +is contained within this distribution. This field *must* include +the project identified in the ``Name`` field, followed by the +version : Name (Version). + +A distribution may provide additional names, e.g. to indicate that +multiple projects have been bundled together. For instance, source +distributions of the ``ZODB`` project have historically included +the ``transaction`` project, which is now available as a separate +distribution. Installing such a source distribution satisfies +requirements for both ``ZODB`` and ``transaction``. + +A distribution may also provide a "virtual" project name, which does +not correspond to any separately-distributed project: such a name +might be used to indicate an abstract capability which could be supplied +by one of multiple projects. E.g., multiple projects might supply +RDBMS bindings for use by a given ORM: each project might declare +that it provides ``ORM-bindings``, allowing other projects to depend +only on having at most one of them installed. + +A version declaration may be supplied and must follow the rules described +in :doc:`version-specifiers`. The distribution's version number will be implied +if none is specified. + +This field may be followed by an environment marker after a semicolon. + +Examples:: + + Provides-Dist: OtherProject + Provides-Dist: AnotherProject==3.4 + Provides-Dist: virtual_package; python_version >= "3.4" + +.. _core-metadata-obsoletes-dist: + +Obsoletes-Dist (multiple use) +----------------------------- + +.. versionadded:: 1.2 +.. versionchanged:: 2.1 + The field format specification was relaxed to accept the syntax used by + popular publishing tools. + +Each entry contains a string describing a distutils project's distribution +which this distribution renders obsolete, meaning that the two projects +should not be installed at the same time. + +Version declarations can be supplied. Version numbers must be in the +format specified in :doc:`version-specifiers`. + +This field may be followed by an environment marker after a semicolon. + +The most common use of this field will be in case a project name +changes, e.g. Gorgon 2.3 gets subsumed into Torqued Python 1.0. +When you install Torqued Python, the Gorgon distribution should be +removed. + +Examples:: + + Obsoletes-Dist: Gorgon + Obsoletes-Dist: OtherProject (<3.0) + Obsoletes-Dist: Foo; os_name == "posix" + + +Deprecated Fields +================= + +Deprecated fields should be avoided, but they are valid metadata fields. They +may be removed in future versions of the core metadata standard (at which point +they will only be valid in files that specify a metadata version prior to the +removal). Tools SHOULD warn users when deprecated fields are used. + +.. _home-page-optional: +.. _core-metadata-home-page: + +Home-page +--------- + +.. versionadded:: 1.0 + +.. deprecated:: 1.2 + + Per :pep:`753`, use :ref:`core-metadata-project-url` instead. + +A string containing the URL for the distribution's home page. + +Example:: + + Home-page: http://www.example.com/~cschultz/bvote/ + +.. _core-metadata-download-url: + +Download-URL +------------ + +.. versionadded:: 1.1 + +.. deprecated:: 1.2 + + Per :pep:`753`, use :ref:`core-metadata-project-url` instead. + +A string containing the URL from which this version of the distribution +can be downloaded. (This means that the URL can't be something like +"``.../BeagleVote-latest.tgz``", but instead must be +"``.../BeagleVote-0.45.tgz``".) + +Requires +-------- + +.. versionadded:: 1.1 +.. deprecated:: 1.2 + in favour of ``Requires-Dist`` + +Each entry contains a string describing some other module or package required +by this package. + +The format of a requirement string is identical to that of a module or package +name usable with the ``import`` statement, optionally followed by a version +declaration within parentheses. + +A version declaration is a series of conditional operators and version numbers, +separated by commas. Conditional operators must be one of "<", ">"', "<=", +">=", "==", and "!=". Version numbers must be in the format accepted by the +``distutils.version.StrictVersion`` class: two or three dot-separated numeric +components, with an optional "pre-release" tag on the end consisting of the +letter 'a' or 'b' followed by a number. Example version numbers are "1.0", +"2.3a2", "1.3.99", + +Any number of conditional operators can be specified, e.g. the string ">1.0, +!=1.3.4, <2.0" is a legal version declaration. + +All of the following are possible requirement strings: "rfc822", "zlib +(>=1.1.4)", "zope". + +There’s no canonical list of what strings should be used; the Python community +is left to choose its own standards. + +Examples:: + + Requires: re + Requires: sys + Requires: zlib + Requires: xml.parsers.expat (>1.0) + Requires: psycopg + + +Provides +-------- + +.. versionadded:: 1.1 +.. deprecated:: 1.2 + in favour of ``Provides-Dist`` + +Each entry contains a string describing a package or module that will be +provided by this package once it is installed. These strings should match the +ones used in Requirements fields. A version declaration may be supplied +(without a comparison operator); the package’s version number will be implied +if none is specified. + +Examples:: + + Provides: xml + Provides: xml.utils + Provides: xml.utils.iso8601 + Provides: xml.dom + Provides: xmltools (1.3) + + +Obsoletes +--------- + +.. versionadded:: 1.1 +.. deprecated:: 1.2 + in favour of ``Obsoletes-Dist`` + +Each entry contains a string describing a package or module that this package +renders obsolete, meaning that the two packages should not be installed at the +same time. Version declarations can be supplied. + +The most common use of this field will be in case a package name changes, e.g. +Gorgon 2.3 gets subsumed into Torqued Python 1.0. When you install Torqued +Python, the Gorgon package should be removed. + +Example:: + + Obsoletes: Gorgon + + +History +======= + +- March 2001: Core metadata 1.0 was approved through :pep:`241`. +- April 2003: Core metadata 1.1 was approved through :pep:`314`: +- February 2010: Core metadata 1.2 was approved through :pep:`345`. +- February 2018: Core metadata 2.1 was approved through :pep:`566`. + + - Added ``Description-Content-Type`` and ``Provides-Extra``. + - Added canonical method for transforming metadata to JSON. + - Restricted the grammar of the ``Name`` field. + +- October 2020: Core metadata 2.2 was approved through :pep:`643`. + + - Added the ``Dynamic`` field. + +- March 2022: Core metadata 2.3 was approved through :pep:`685`. + + - Restricted extra names to be normalized. + +- August 2024: Core metadata 2.4 was approved through :pep:`639`. + + - Added the ``License-Expression`` field. + - Added the ``License-File`` field. + +---- + +.. [1] reStructuredText markup: + https://docutils.sourceforge.io/ + +.. _`Python Package Index`: https://pypi.org/ + +.. [2] RFC 822 Long Header Fields: + :rfc:`822#section-3.1.1` diff --git a/source/specifications/dependency-groups.rst b/source/specifications/dependency-groups.rst new file mode 100644 index 000000000..22e4cba0d --- /dev/null +++ b/source/specifications/dependency-groups.rst @@ -0,0 +1,252 @@ +.. _dependency-groups: + +================= +Dependency Groups +================= + +This specification defines Dependency Groups, a mechanism for storing package +requirements in ``pyproject.toml`` files such that they are not included in +project metadata when it is built. + +Dependency Groups are suitable for internal development use-cases like linting +and testing, as well as for projects which are not built for distribution, like +collections of related scripts. + +Fundamentally, Dependency Groups should be thought of as being a standardized +subset of the capabilities of ``requirements.txt`` files (which are +``pip``-specific). + +Specification +============= + +Examples +-------- + +This is a simple table which shows ``docs`` and ``test`` groups:: + + [dependency-groups] + docs = ["sphinx"] + test = ["pytest>7", "coverage"] + +and a similar table which defines ``docs``, ``test``, and ``coverage`` groups:: + + [dependency-groups] + docs = ["sphinx"] + coverage = ["coverage[toml]"] + test = ["pytest>7", {include-group = "coverage"}] + +The ``[dependency-groups]`` Table +--------------------------------- + +Dependency Groups are defined as a table in ``pyproject.toml`` named +``dependency-groups``. The ``dependency-groups`` table contains an arbitrary +number of user-defined keys, each of which has, as its value, a list of +requirements. + +``[dependency-groups]`` keys, sometimes also called "group names", must be +:ref:`valid non-normalized names `. Tools which handle Dependency +Groups MUST :ref:`normalize ` these names before +comparisons. + +Tools SHOULD prefer to present the original, non-normalized name to users, and +if duplicate names are detected after normalization, tools SHOULD emit an +error. + +Requirement lists, the values in ``[dependency-groups]``, may contain strings, +tables (``dict`` in Python), or a mix of strings and tables. Strings must be +valid :ref:`dependency specifiers `, and tables must be +valid Dependency Group Includes. + +Dependency Group Include +------------------------ + +A Dependency Group Include includes another Dependency Group in the current +group. + +An include is a table with exactly one key, ``"include-group"``, whose value is +a string, the name of another Dependency Group. + +Includes are defined to be exactly equivalent to the contents of the named +Dependency Group, inserted into the current group at the location of the include. +For example, if ``foo = ["a", "b"]`` is one group, and +``bar = ["c", {include-group = "foo"}, "d"]`` is another, then ``bar`` should +evaluate to ``["c", "a", "b", "d"]`` when Dependency Group Includes are expanded. + +Dependency Group Includes may specify the same package multiple times. +Tools SHOULD NOT deduplicate or otherwise alter the list contents produced by the +include. For example, given the following table: + +.. code-block:: toml + + [dependency-groups] + group-a = ["foo"] + group-b = ["foo>1.0"] + group-c = ["foo<1.0"] + all = [ + "foo", + {include-group = "group-a"}, + {include-group = "group-b"}, + {include-group = "group-c"}, + ] + +The resolved value of ``all`` SHOULD be ``["foo", "foo", "foo>1.0", "foo<1.0"]``. +Tools should handle such a list exactly as they would handle any other case in +which they are asked to process the same requirement multiple times with +different version constraints. + +Dependency Group Includes may include groups containing Dependency Group Includes, +in which case those includes should be expanded as well. Dependency Group Includes +MUST NOT include cycles, and tools SHOULD report an error if they detect a cycle. + +Package Building +---------------- + +Build backends MUST NOT include Dependency Group data in built distributions as +package metadata. This means that sdist ``PKG-INFO`` and wheel ``METADATA`` +files should not include referenceable fields containing Dependency Groups. + +It is, however, valid to use Dependency Groups in the evaluation of dynamic +metadata, and ``pyproject.toml`` files included in sdists will still contain +``[dependency-groups]``. However, the table's contents are not part of a built +package's interfaces. + +Installing Dependency Groups & Extras +------------------------------------- + +There is no syntax or specification-defined interface for installing or +referring to Dependency Groups. Tools are expected to provide dedicated +interfaces for this purpose. + +Tools MAY choose to provide the same or similar interfaces for interacting +with Dependency Groups as they do for managing extras. Tools authors are +advised that the specification does not forbid having an extra whose name +matches a Dependency Group. Separately, users are advised to avoid creating +Dependency Groups whose names match extras, and tools MAY treat such matching +as an error. + +Validation and Compatibility +---------------------------- + +Tools supporting Dependency Groups may want to validate data before using it. +When implementing such validation, authors should be aware of the possibility +of future extensions to the specification, so that they do not unnecessarily +emit errors or warnings. + +Tools SHOULD error when evaluating or processing unrecognized data in +Dependency Groups. + +Tools SHOULD NOT eagerly validate the contents of *all* Dependency Groups +unless they have a need to do so. + +This means that in the presence of the following data, most tools should allow +the ``foo`` group to be used and only error if the ``bar`` group is used: + +.. code-block:: toml + + [dependency-groups] + foo = ["pyparsing"] + bar = [{set-phasers-to = "stun"}] + +.. note:: + + There are several known cases of tools which have good cause to be + stricter. Linters and validators are an example, as their purpose is to + validate the contents of all Dependency Groups. + +Reference Implementation +======================== + +The following Reference Implementation prints the contents of a Dependency +Group to stdout, newline delimited. +The output is therefore valid ``requirements.txt`` data. + +.. code-block:: python + + import re + import sys + import tomllib + from collections import defaultdict + + from packaging.requirements import Requirement + + + def _normalize_name(name: str) -> str: + return re.sub(r"[-_.]+", "-", name).lower() + + + def _normalize_group_names(dependency_groups: dict) -> dict: + original_names = defaultdict(list) + normalized_groups = {} + + for group_name, value in dependency_groups.items(): + normed_group_name = _normalize_name(group_name) + original_names[normed_group_name].append(group_name) + normalized_groups[normed_group_name] = value + + errors = [] + for normed_name, names in original_names.items(): + if len(names) > 1: + errors.append(f"{normed_name} ({', '.join(names)})") + if errors: + raise ValueError(f"Duplicate dependency group names: {', '.join(errors)}") + + return normalized_groups + + + def _resolve_dependency_group( + dependency_groups: dict, group: str, past_groups: tuple[str, ...] = () + ) -> list[str]: + if group in past_groups: + raise ValueError(f"Cyclic dependency group include: {group} -> {past_groups}") + + if group not in dependency_groups: + raise LookupError(f"Dependency group '{group}' not found") + + raw_group = dependency_groups[group] + if not isinstance(raw_group, list): + raise ValueError(f"Dependency group '{group}' is not a list") + + realized_group = [] + for item in raw_group: + if isinstance(item, str): + # packaging.requirements.Requirement parsing ensures that this is a valid + # PEP 508 Dependency Specifier + # raises InvalidRequirement on failure + Requirement(item) + realized_group.append(item) + elif isinstance(item, dict): + if tuple(item.keys()) != ("include-group",): + raise ValueError(f"Invalid dependency group item: {item}") + + include_group = _normalize_name(next(iter(item.values()))) + realized_group.extend( + _resolve_dependency_group( + dependency_groups, include_group, past_groups + (group,) + ) + ) + else: + raise ValueError(f"Invalid dependency group item: {item}") + + return realized_group + + + def resolve(dependency_groups: dict, group: str) -> list[str]: + if not isinstance(dependency_groups, dict): + raise TypeError("Dependency Groups table is not a dict") + if not isinstance(group, str): + raise TypeError("Dependency group name is not a str") + return _resolve_dependency_group(dependency_groups, group) + + + if __name__ == "__main__": + with open("pyproject.toml", "rb") as fp: + pyproject = tomllib.load(fp) + + dependency_groups_raw = pyproject["dependency-groups"] + dependency_groups = _normalize_group_names(dependency_groups_raw) + print("\n".join(resolve(pyproject["dependency-groups"], sys.argv[1]))) + +History +======= + +- October 2024: This specification was approved through :pep:`735`. diff --git a/source/specifications/dependency-specifiers.rst b/source/specifications/dependency-specifiers.rst new file mode 100644 index 000000000..e8e3229ff --- /dev/null +++ b/source/specifications/dependency-specifiers.rst @@ -0,0 +1,546 @@ +.. highlight:: text + +.. _dependency-specifiers: + +===================== +Dependency specifiers +===================== + +This document describes the dependency specifiers format as originally specified +in :pep:`508`. + +The job of a dependency is to enable tools like pip [#pip]_ to find the right +package to install. Sometimes this is very loose - just specifying a name, and +sometimes very specific - referring to a specific file to install. Sometimes +dependencies are only relevant in one platform, or only some versions are +acceptable, so the language permits describing all these cases. + +The language defined is a compact line based format which is already in +widespread use in pip requirements files, though we do not specify the command +line option handling that those files permit. There is one caveat - the +URL reference form, specified in :ref:`Versioning specifier specification ` +is not actually implemented in pip, but we use that format rather +than pip's current native format. + +Specification +============= + +Examples +-------- + +All features of the language shown with a name based lookup:: + + requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7" + +A minimal URL based lookup:: + + pip @ https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686 + +Concepts +-------- + +A dependency specification always specifies a distribution name. It may +include extras, which expand the dependencies of the named distribution to +enable optional features. The version installed can be controlled using +version limits, or giving the URL to a specific artifact to install. Finally +the dependency can be made conditional using environment markers. + +Grammar +------- + +We first cover the grammar briefly and then drill into the semantics of each +section later. + +A distribution specification is written in ASCII text. We use a parsley +[#parsley]_ grammar to provide a precise grammar. It is expected that the +specification will be embedded into a larger system which offers framing such +as comments, multiple line support via continuations, or other such features. + +The full grammar including annotations to build a useful parse tree is +included at the end of this document. + +Versions may be specified according to the rules of the +:ref:`Version specifier specification `. (Note: +URI is defined in :rfc:`std-66 <3986>`):: + + version_cmp = wsp* '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' + version = wsp* ( letterOrDigit | '-' | '_' | '.' | '*' | '+' | '!' )+ + version_one = version_cmp version wsp* + version_many = version_one (',' version_one)* (',' wsp*)? + versionspec = ( '(' version_many ')' ) | version_many + urlspec = '@' wsp* + +Environment markers allow making a specification only take effect in some +environments:: + + marker_op = version_cmp | (wsp+ 'in' wsp+) | (wsp+ 'not' wsp+ 'in' wsp+) + python_str_c = (wsp | letter | digit | '(' | ')' | '.' | '{' | '}' | + '-' | '_' | '*' | '#' | ':' | ';' | ',' | '/' | '?' | + '[' | ']' | '!' | '~' | '`' | '@' | '$' | '%' | '^' | + '&' | '=' | '+' | '|' | '<' | '>' ) + dquote = '"' + squote = '\\'' + python_str = (squote (python_str_c | dquote)* squote | + dquote (python_str_c | squote)* dquote) + env_var = ('python_version' | 'python_full_version' | + 'os_name' | 'sys_platform' | 'platform_release' | + 'platform_system' | 'platform_version' | + 'platform_machine' | 'platform_python_implementation' | + 'implementation_name' | 'implementation_version' | + 'extra' | 'extras' | 'dependency_groups' # ONLY when defined by a containing layer + ) + marker_var = wsp* (env_var | python_str) + marker_expr = marker_var marker_op marker_var + | wsp* '(' marker wsp* ')' + marker_and = marker_expr wsp* 'and' marker_expr + | marker_expr + marker_or = marker_and wsp* 'or' marker_and + | marker_and + marker = marker_or + quoted_marker = ';' wsp* marker + +Optional components of a distribution may be specified using the extras +field:: + + identifier_end = letterOrDigit | (('-' | '_' | '.' )* letterOrDigit) + identifier = letterOrDigit identifier_end* + name = identifier + extras_list = identifier (wsp* ',' wsp* identifier)* + extras = '[' wsp* extras_list? wsp* ']' + +Restrictions on names for extras is defined in :pep:`685`. + +Giving us a rule for name based requirements:: + + name_req = name wsp* extras? wsp* versionspec? wsp* quoted_marker? + +And a rule for direct reference specifications:: + + url_req = name wsp* extras? wsp* urlspec (wsp+ quoted_marker?)? + +Leading to the unified rule that can specify a dependency.:: + + specification = wsp* ( url_req | name_req ) wsp* + +Whitespace +---------- + +Non line-breaking whitespace is mostly optional with no semantic meaning. The +sole exception is detecting the end of a URL requirement. + +.. _dependency-specifiers-names: + +Names +----- + +Python distribution names are currently defined in :pep:`345`. Names +act as the primary identifier for distributions. They are present in all +dependency specifications, and are sufficient to be a specification on their +own. However, PyPI places strict restrictions on names - they must match a +case insensitive regex or they won't be accepted. Accordingly, in this +document we limit the acceptable values for identifiers to that regex. A full +redefinition of name may take place in a future metadata PEP. The regex (run +with re.IGNORECASE) is:: + + ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$ + +.. _dependency-specifiers-extras: + +Extras +------ + +An extra is an optional part of a distribution. Distributions can specify as +many extras as they wish, and each extra results in the declaration of +additional dependencies of the distribution **when** the extra is used in a +dependency specification. For instance:: + + requests[security,tests] + +Extras union in the dependencies they define with the dependencies of the +distribution they are attached to. The example above would result in requests +being installed, and requests own dependencies, and also any dependencies that +are listed in the "security" extra of requests. + +If multiple extras are listed, all the dependencies are unioned together. + +.. _dependency-specifiers-versions: + +Versions +-------- + +See the :ref:`Version specifier specification ` for +more detail on both version numbers and version comparisons. Version +specifications limit the versions of a distribution that can be +used. They only apply to distributions looked up by name, rather than +via a URL. Version comparison are also used in the markers feature. The +optional brackets around a version are present for compatibility with +:pep:`345` but should not be generated, only accepted. + +.. _dependency-specifiers-environment-markers: + +Environment Markers +------------------- + +Environment markers allow a dependency specification to provide a rule that +describes when the dependency should be used. For instance, consider a package +that needs argparse. In Python 2.7 argparse is always present. On older Python +versions it has to be installed as a dependency. This can be expressed as so:: + + argparse;python_version<"2.7" + +A marker expression evaluates to either True or False. When it evaluates to +False, the dependency specification should be ignored. + +The marker language is inspired by Python itself, chosen for the ability to +safely evaluate it without running arbitrary code that could become a security +vulnerability. Markers were first standardised in :pep:`345`. This document +fixes some issues that were observed in the design described in :pep:`426`. + +Comparisons in marker expressions are typed by the comparison operator and the +type of the marker value. The operators that are not in + perform the same as they do for strings or sets in Python based on +whether the marker value is a string or set itself. The operators +use the version comparison rules of the +:ref:`Version specifier specification ` when those are +defined (that is when both sides have a valid version specifier). If there is no +defined behaviour of this specification and the operator exists in Python, then +the operator falls back to the Python behaviour for the types involved. +Otherwise an error should be raised. e.g. the following will result in errors:: + + "dog" ~= "fred" + python_version ~= "surprise" + +User supplied constants are always encoded as strings with either ``'`` or +``"`` quote marks. Note that backslash escapes are not defined, but existing +implementations do support them. They are not included in this +specification because they add complexity and there is no observable need for +them today. Similarly we do not define non-ASCII character support: all the +runtime variables we are referencing are expected to be ASCII-only. + +The variables in the marker grammar such as "os_name" resolve to values looked +up in the Python runtime. With the exception of "extra" all values are defined +on all Python versions today - it is an error in the implementation of markers +if a value is not defined. + +Unknown variables must raise an error rather than resulting in a comparison +that evaluates to True or False. + +Variables whose value cannot be calculated on a given Python implementation +should evaluate to ``0`` for versions, and an empty string for all other +variables. + +The "extra" variable is special. It is used by wheels to signal which +specifications apply to a given extra in the wheel ``METADATA`` file, but +since the ``METADATA`` file is based on a draft version of :pep:`426`, there is +no current specification for this. Regardless, outside of a context where this +special handling is taking place, the "extra" variable should result in an +error like all other unknown variables. + +The "extras" and "dependency_groups" variables are also special. They are used +to specify any requested extras or dependency groups when installing from a lock +file. Outside of the context of lock files, these two variables should result in +an error like all other unknown variables. + +.. list-table:: + :header-rows: 1 + + * - Marker + - Python equivalent + - Type + - Sample values + * - ``os_name`` + - :py:data:`os.name` + - String + - ``posix``, ``java`` + * - ``sys_platform`` + - :py:data:`sys.platform` + - String + - ``linux``, ``linux2``, ``darwin``, ``java1.8.0_51`` (note that "linux" + is from Python3 and "linux2" from Python2) + * - ``platform_machine`` + - :py:func:`platform.machine()` + - String + - ``x86_64`` + * - ``platform_python_implementation`` + - :py:func:`platform.python_implementation()` + - String + - ``CPython``, ``Jython`` + * - ``platform_release`` + - :py:func:`platform.release()` + - String + - ``3.14.1-x86_64-linode39``, ``14.5.0``, ``1.8.0_51`` + * - ``platform_system`` + - :py:func:`platform.system()` + - String + - ``Linux``, ``Windows``, ``Java`` + * - ``platform_version`` + - :py:func:`platform.version()` + - String + - ``#1 SMP Fri Apr 25 13:07:35 EDT 2014`` + ``Java HotSpot(TM) 64-Bit Server VM, 25.51-b03, Oracle Corporation`` + ``Darwin Kernel Version 14.5.0: Wed Jul 29 02:18:53 PDT 2015; root:xnu-2782.40.9~2/RELEASE_X86_64`` + * - ``python_version`` + - ``'.'.join(platform.python_version_tuple()[:2])`` + - :ref:`Version ` + - ``3.4``, ``2.7`` + * - ``python_full_version`` + - :py:func:`platform.python_version()` + - :ref:`Version ` + - ``3.4.0``, ``3.5.0b1`` + * - ``implementation_name`` + - :py:data:`sys.implementation.name ` + - String + - ``cpython`` + * - ``implementation_version`` + - see definition below + - :ref:`Version ` + - ``3.4.0``, ``3.5.0b1`` + * - ``extra`` + - An error except when defined by the context interpreting the + specification. + - String + - ``toml`` + * - ``extras`` + - An error except when defined by the context interpreting the + specification. + - Set of strings + - ``{"toml"}`` + * - ``dependency_groups`` + - An error except when defined by the context interpreting the + specification. + - Set of strings + - ``{"test"}`` + +The ``implementation_version`` marker variable is derived from +:py:data:`sys.implementation.version `: + +.. code-block:: python + + def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + else: + implementation_version = "0" + +This environment markers section, initially defined through :pep:`508`, supersedes the environment markers +section in :pep:`345`. + +.. _dependency-specifiers-grammar: + +Complete Grammar +================ + +The complete parsley grammar:: + + wsp = ' ' | '\t' + version_cmp = wsp* <'<=' | '<' | '!=' | '==' | '>=' | '>' | '~=' | '==='> + version = wsp* <( letterOrDigit | '-' | '_' | '.' | '*' | '+' | '!' )+> + version_one = version_cmp:op version:v wsp* -> (op, v) + version_many = version_one:v1 (',' version_one)*:v2 (',' wsp*)? -> [v1] + v2 + versionspec = ('(' version_many:v ')' ->v) | version_many + urlspec = '@' wsp* + marker_op = version_cmp | (wsp* 'in') | (wsp* 'not' wsp+ 'in') + python_str_c = (wsp | letter | digit | '(' | ')' | '.' | '{' | '}' | + '-' | '_' | '*' | '#' | ':' | ';' | ',' | '/' | '?' | + '[' | ']' | '!' | '~' | '`' | '@' | '$' | '%' | '^' | + '&' | '=' | '+' | '|' | '<' | '>' ) + dquote = '"' + squote = '\\'' + python_str = (squote <(python_str_c | dquote)*>:s squote | + dquote <(python_str_c | squote)*>:s dquote) -> s + env_var = ('python_version' | 'python_full_version' | + 'os_name' | 'sys_platform' | 'platform_release' | + 'platform_system' | 'platform_version' | + 'platform_machine' | 'platform_python_implementation' | + 'implementation_name' | 'implementation_version' | + 'extra' | 'extras' | 'dependency_groups' # ONLY when defined by a containing layer + ):varname -> lookup(varname) + marker_var = wsp* (env_var | python_str) + marker_expr = marker_var:l marker_op:o marker_var:r -> (o, l, r) + | wsp* '(' marker:m wsp* ')' -> m + marker_and = marker_expr:l wsp* 'and' marker_expr:r -> ('and', l, r) + | marker_expr:m -> m + marker_or = marker_and:l wsp* 'or' marker_and:r -> ('or', l, r) + | marker_and:m -> m + marker = marker_or + quoted_marker = ';' wsp* marker + identifier_end = letterOrDigit | (('-' | '_' | '.' )* letterOrDigit) + identifier = < letterOrDigit identifier_end* > + name = identifier + extras_list = identifier:i (wsp* ',' wsp* identifier)*:ids -> [i] + ids + extras = '[' wsp* extras_list?:e wsp* ']' -> e + name_req = (name:n wsp* extras?:e wsp* versionspec?:v wsp* quoted_marker?:m + -> (n, e or [], v or [], m)) + url_req = (name:n wsp* extras?:e wsp* urlspec:v (wsp+ | end) quoted_marker?:m + -> (n, e or [], v, m)) + specification = wsp* ( url_req | name_req ):s wsp* -> s + # The result is a tuple - name, list-of-extras, + # list-of-version-constraints-or-a-url, marker-ast or None + + + URI_reference = + URI = scheme ':' hier_part ('?' query )? ( '#' fragment)? + hier_part = ('//' authority path_abempty) | path_absolute | path_rootless | path_empty + absolute_URI = scheme ':' hier_part ( '?' query )? + relative_ref = relative_part ( '?' query )? ( '#' fragment )? + relative_part = '//' authority path_abempty | path_absolute | path_noscheme | path_empty + scheme = letter ( letter | digit | '+' | '-' | '.')* + authority = ( userinfo '@' )? host ( ':' port )? + userinfo = ( unreserved | pct_encoded | sub_delims | ':')* + host = IP_literal | IPv4address | reg_name + port = digit* + IP_literal = '[' ( IPv6address | IPvFuture) ']' + IPvFuture = 'v' hexdig+ '.' ( unreserved | sub_delims | ':')+ + IPv6address = ( + ( h16 ':'){6} ls32 + | '::' ( h16 ':'){5} ls32 + | ( h16 )? '::' ( h16 ':'){4} ls32 + | ( ( h16 ':')? h16 )? '::' ( h16 ':'){3} ls32 + | ( ( h16 ':'){0,2} h16 )? '::' ( h16 ':'){2} ls32 + | ( ( h16 ':'){0,3} h16 )? '::' h16 ':' ls32 + | ( ( h16 ':'){0,4} h16 )? '::' ls32 + | ( ( h16 ':'){0,5} h16 )? '::' h16 + | ( ( h16 ':'){0,6} h16 )? '::' ) + h16 = hexdig{1,4} + ls32 = ( h16 ':' h16) | IPv4address + IPv4address = dec_octet '.' dec_octet '.' dec_octet '.' dec_octet + nz = ~'0' digit + dec_octet = ( + digit # 0-9 + | nz digit # 10-99 + | '1' digit{2} # 100-199 + | '2' ('0' | '1' | '2' | '3' | '4') digit # 200-249 + | '25' ('0' | '1' | '2' | '3' | '4' | '5') )# %250-255 + reg_name = ( unreserved | pct_encoded | sub_delims)* + path = ( + path_abempty # begins with '/' or is empty + | path_absolute # begins with '/' but not '//' + | path_noscheme # begins with a non-colon segment + | path_rootless # begins with a segment + | path_empty ) # zero characters + path_abempty = ( '/' segment)* + path_absolute = '/' ( segment_nz ( '/' segment)* )? + path_noscheme = segment_nz_nc ( '/' segment)* + path_rootless = segment_nz ( '/' segment)* + path_empty = pchar{0} + segment = pchar* + segment_nz = pchar+ + segment_nz_nc = ( unreserved | pct_encoded | sub_delims | '@')+ + # non-zero-length segment without any colon ':' + pchar = unreserved | pct_encoded | sub_delims | ':' | '@' + query = ( pchar | '/' | '?')* + fragment = ( pchar | '/' | '?')* + pct_encoded = '%' hexdig + unreserved = letter | digit | '-' | '.' | '_' | '~' + reserved = gen_delims | sub_delims + gen_delims = ':' | '/' | '?' | '#' | '(' | ')?' | '@' + sub_delims = '!' | '$' | '&' | '\\'' | '(' | ')' | '*' | '+' | ',' | ';' | '=' + hexdig = digit | 'a' | 'A' | 'b' | 'B' | 'c' | 'C' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' + +A test program - if the grammar is in a string ``grammar``: + +.. code-block:: python + + import os + import sys + import platform + + from parsley import makeGrammar + + grammar = """ + wsp ... + """ + tests = [ + "A", + "A.B-C_D", + "aa", + "name", + "name<=1", + "name>=3", + "name>=3,", + "name>=3,<2", + "name@http://foo.com", + "name [fred,bar] @ http://foo.com ; python_version=='2.7'", + "name[quux, strange];python_version<'2.7' and platform_version=='2'", + "name; os_name=='a' or os_name=='b'", + # Should parse as (a and b) or c + "name; os_name=='a' and os_name=='b' or os_name=='c'", + # Overriding precedence -> a and (b or c) + "name; os_name=='a' and (os_name=='b' or os_name=='c')", + # should parse as a or (b and c) + "name; os_name=='a' or os_name=='b' and os_name=='c'", + # Overriding precedence -> (a or b) and c + "name; (os_name=='a' or os_name=='b') and os_name=='c'", + ] + + def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + bindings = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'python_full_version': platform.python_version(), + 'python_version': '.'.join(platform.python_version_tuple()[:2]), + 'sys_platform': sys.platform, + } + + compiled = makeGrammar(grammar, {'lookup': bindings.__getitem__}) + for test in tests: + parsed = compiled(test).specification() + print("%s -> %s" % (test, parsed)) + + +History +======= + +- November 2015: This specification was approved through :pep:`508`. +- July 2019: The definition of ``python_version`` was `changed + `_ from ``platform.python_version()[:3]`` to + ``'.'.join(platform.python_version_tuple()[:2])``, to accommodate potential + future versions of Python with 2-digit major and minor versions + (e.g. 3.10). [#future_versions]_ +- June 2024: The definition of ``version_many`` was changed to allow trailing + commas, matching with the behavior of the Python implementation that has been + in use since late 2022. +- April 2025: Added ``extras`` and ``dependency_groups`` for + :ref:`lock-file-spec` as approved through :pep:`751`. + + +References +========== + +.. [#pip] pip, the recommended installer for Python packages + (http://pip.readthedocs.org/en/stable/) + +.. [#parsley] The parsley PEG library. + (https://pypi.python.org/pypi/parsley/) + +.. [#future_versions] Future Python versions might be problematic with the + definition of Environment Marker Variable ``python_version`` + (https://github.com/python/peps/issues/560) + + + +.. _python-version-change: https://mail.python.org/pipermail/distutils-sig/2018-January/031920.html diff --git a/source/specifications/direct-url-data-structure.rst b/source/specifications/direct-url-data-structure.rst new file mode 100644 index 000000000..0d243652d --- /dev/null +++ b/source/specifications/direct-url-data-structure.rst @@ -0,0 +1,418 @@ +.. highlight:: json + +.. _direct-url-data-structure: + +========================= +Direct URL Data Structure +========================= + +This document specifies a JSON-serializable abstract data structure that can represent +URLs to python projects and distribution artifacts such as VCS source trees, local +source trees, source distributions and wheels. + +At time of writing, it is not formally specified how to merge the parts of this +data structure into a single URL that can be passed to tools. A common representation is the +pip URL format (`VCS Support `_), other examples are provided in the +:ref:`Version specifier specification `. + +Specification +============= + +The Direct URL Data Structure MUST be a dictionary, serializable to JSON according to +:rfc:`8259`. + +It MUST contain at least two fields. The first one is ``url``, with +type ``string``. Its content must be a valid URL according to the +`WHATWG URL Standard `_. + +Depending on what ``url`` refers to, the second field MUST be one of ``vcs_info`` +(if ``url`` is a VCS reference), ``archive_info`` (if +``url`` is a source archive or a wheel), or ``dir_info`` (if ``url`` is a +local directory). These info fields have a (possibly empty) subdictionary as +value, with the possible keys defined below. + +Security Considerations +----------------------- + +When persisted, ``url`` MUST be stripped of any sensitive authentication information, +for security reasons. + +The user:password section of the URL MAY however +be composed of environment variables, matching the following regular +expression: + +.. code-block:: text + + \$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})? + +Additionally, the user:password section of the URL MAY be a +well-known, non security sensitive string. A typical example is ``git`` +in the case of a URL such as ``ssh://git@gitlab.com/user/repo``. + +.. _direct-url-data-structure-vcs: + +VCS URLs +-------- + +When ``url`` refers to a VCS repository, the ``vcs_info`` key MUST be present +as a dictionary with the following keys: + +- A ``vcs`` key (type ``string``) MUST be present, containing the name of the VCS + (i.e. one of ``git``, ``hg``, ``bzr``, ``svn``). Other VCS's SHOULD be registered by + writing a PEP to amend this specification. + The ``url`` value MUST be compatible with the corresponding VCS, + so an installer can hand it off without transformation to a + checkout/download command of the VCS. +- A ``requested_revision`` key (type ``string``) MAY be present naming a + branch/tag/ref/commit/revision/etc (in a format compatible with the VCS). This field + MUST match the revision requested by the user and MUST NOT exist when the user did + not select a specific revision. +- A ``commit_id`` key (type ``string``) MUST be present, containing the + exact commit/revision number that was/is to be installed. + If the VCS supports commit-hash + based revision identifiers, such commit-hash MUST be used as + ``commit_id`` in order to reference an immutable + version of the source code. + +.. _direct-url-data-structure-archive: + +Archive URLs +------------ + +When ``url`` refers to a source archive or a wheel, the ``archive_info`` key +MUST be present as a dictionary with the following keys: + +- A ``hashes`` key SHOULD be present as a dictionary mapping a hash name to a hex + encoded digest of the file. + + Multiple hashes can be included, and it is up to the consumer to decide what to do + with multiple hashes (it may validate all of them or a subset of them, or nothing at + all). + + These hash names SHOULD always be normalized to be lowercase. + + Any hash algorithm available via :py:mod:`hashlib` (specifically any that can be passed to + :py:func:`hashlib.new()` and do not require additional parameters) can be used as a key for + the hashes dictionary. At least one secure algorithm from + :py:data:`hashlib.algorithms_guaranteed` SHOULD always be included. At time of writing, + ``sha256`` specifically is recommended. + +- A deprecated ``hash`` key (type ``string``) MAY be present for backwards compatibility + purposes, with value ``=``. + +Producers of the data structure SHOULD emit the ``hashes`` key whether one or multiple +hashes are available. Producers SHOULD continue to emit the ``hash`` key in contexts +where they did so before, so as to keep backwards compatibility for existing clients. + +When both the ``hash`` and ``hashes`` keys are present, the hash represented in the +``hash`` key MUST also be present in the ``hashes`` dictionary, so consumers can +consider the ``hashes`` key only if it is present, and fall back to ``hash`` otherwise. + +.. _direct-url-data-structure-local-directory: + +Local directories +----------------- + +When ``url`` refers to a local directory, the ``dir_info`` key MUST be +present as a dictionary with the following key: + +- ``editable`` (type: ``boolean``): ``true`` if the distribution was/is to be installed + in editable mode, ``false`` otherwise. If absent, default to ``false``. + +When ``url`` refers to a local directory, it MUST have the ``file`` scheme and +be compliant with :rfc:`8089`. In +particular, the path component must be absolute. Symbolic links SHOULD be +preserved when making relative paths absolute. + +.. _direct-url-data-structure-subdirectories: + +Projects in subdirectories +-------------------------- + +A top-level ``subdirectory`` field MAY be present containing a directory path, +relative to the root of the VCS repository, source archive or local directory, +to specify where ``pyproject.toml`` or ``setup.py`` is located. + +.. _direct-url-data-structure-registered-vcs: + +Registered VCS +============== + +This section lists the registered VCS's; expanded, VCS-specific information +on how to use the ``vcs``, ``requested_revision``, and other fields of +``vcs_info``; and in +some cases additional VCS-specific fields. +Tools MAY support other VCS's although it is RECOMMENDED to register +them by writing a PEP to amend this specification. The ``vcs`` field SHOULD be the command name +(lowercased). Additional fields that would be necessary to +support such VCS SHOULD be prefixed with the VCS command name. + +Git +--- + +Home page + https://git-scm.com/ + +vcs command + git + +``vcs`` field + git + +``requested_revision`` field + A tag name, branch name, Git ref, commit hash, shortened commit hash, + or other commit-ish. + +``commit_id`` field + A commit hash (40 hexadecimal characters sha1). + +.. note:: + + Tools can use the ``git show-ref`` and ``git symbolic-ref`` commands + to determine if the ``requested_revision`` corresponds to a Git ref. + In turn, a ref beginning with ``refs/tags/`` corresponds to a tag, and + a ref beginning with ``refs/remotes/origin/`` after cloning corresponds + to a branch. + +Mercurial +--------- + +Home page + https://www.mercurial-scm.org/ + +vcs command + hg + +``vcs`` field + hg + +``requested_revision`` field + A tag name, branch name, changeset ID, shortened changeset ID. + +``commit_id`` field + A changeset ID (40 hexadecimal characters). + +Bazaar +------ + +Home page + https://www.breezy-vcs.org/ + +vcs command + bzr + +``vcs`` field + bzr + +``requested_revision`` field + A tag name, branch name, revision id. + +``commit_id`` field + A revision id. + +Subversion +---------- + +Home page + https://subversion.apache.org/ + +vcs command + svn + +``vcs`` field + svn + +``requested_revision`` field + ``requested_revision`` must be compatible with ``svn checkout`` ``--revision`` option. + In Subversion, branch or tag is part of ``url``. + +``commit_id`` field + Since Subversion does not support globally unique identifiers, + this field is the Subversion revision number in the corresponding + repository. + +JSON Schema +=========== + +The following JSON Schema can be used to validate the contents of ``direct_url.json``: + +.. code-block:: + + { + "$schema": "https://json-schema.org/draft/2019-09/schema", + "title": "Direct URL Data", + "description": "Data structure that can represent URLs to python projects and distribution artifacts such as VCS source trees, local source trees, source distributions and wheels.", + "definitions": { + "URL": { + "type": "string", + "format": "uri" + }, + "DirInfo": { + "type": "object", + "properties": { + "editable": { + "type": ["boolean", "null"] + } + } + }, + "VCSInfo": { + "type": "object", + "properties": { + "vcs": { + "type": "string", + "enum": [ + "git", + "hg", + "bzr", + "svn" + ] + }, + "requested_revision": { + "type": "string" + }, + "commit_id": { + "type": "string" + }, + "resolved_revision": { + "type": "string" + } + }, + "required": [ + "vcs", + "commit_id" + ] + }, + "ArchiveInfo": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "pattern": "^\\w+=[a-f0-9]+$", + "deprecated": true + }, + "hashes": { + "type": "object", + "patternProperties": { + "^[a-f0-9]+$": { + "type": "string" + } + } + } + } + } + }, + "allOf": [ + { + "type": "object", + "properties": { + "url": { + "$ref": "#/definitions/URL" + } + }, + "required": [ + "url" + ] + }, + { + "anyOf": [ + { + "type": "object", + "properties": { + "dir_info": { + "$ref": "#/definitions/DirInfo" + } + }, + "required": [ + "dir_info" + ] + }, + { + "type": "object", + "properties": { + "vcs_info": { + "$ref": "#/definitions/VCSInfo" + } + }, + "required": [ + "vcs_info" + ] + }, + { + "type": "object", + "properties": { + "archive_info": { + "$ref": "#/definitions/ArchiveInfo" + } + }, + "required": [ + "archive_info" + ] + } + ] + } + ] + } + +Examples +======== + +Source archive: + +.. code:: + + { + "url": "https://github.com/pypa/pip/archive/1.3.1.zip", + "archive_info": { + "hashes": { + "sha256": "2dc6b5a470a1bde68946f263f1af1515a2574a150a30d6ce02c6ff742fcc0db8" + } + } + } + +Git URL with tag and commit-hash: + +.. code:: + + { + "url": "https://github.com/pypa/pip.git", + "vcs_info": { + "vcs": "git", + "requested_revision": "1.3.1", + "commit_id": "7921be1537eac1e97bc40179a57f0349c2aee67d" + } + } + +Local directory: + +.. code:: + + { + "url": "file:///home/user/project", + "dir_info": {} + } + +Local directory in editable mode: + +.. code:: + + { + "url": "file:///home/user/project", + "dir_info": { + "editable": true + } + } + + +History +======= + +- March 2020: This specification was approved through :pep:`610`, defining + the ``direct_url.json`` metadata file. +- January 2023: Added the ``archive_info.hashes`` key (`discussion + `_). + + + +.. _archive-info-hashes: https://discuss.python.org/t/22299 +.. _pip-vcs-support: https://pip.pypa.io/en/stable/topics/vcs-support/ +.. _whatwg-url-standard: https://url.spec.whatwg.org/ diff --git a/source/specifications/direct-url.rst b/source/specifications/direct-url.rst new file mode 100644 index 000000000..babecd842 --- /dev/null +++ b/source/specifications/direct-url.rst @@ -0,0 +1,69 @@ + +.. _direct-url: + +========================================================== +Recording the Direct URL Origin of installed distributions +========================================================== + +This document specifies a :file:`direct_url.json` file in the +``*.dist-info`` directory of an installed distribution, to record the +Direct URL Origin of the distribution. The general structure and usage of +``*.dist-info`` directories is described in :ref:`recording-installed-packages`. + + +Specification +============= + +The :file:`direct_url.json` file MUST be created in the :file:`*.dist-info` +directory by installers when installing a distribution from a requirement +specifying a direct URL reference (including a VCS URL). + +This file MUST NOT be created when installing a distribution from an other +type of requirement (i.e. name plus version specifier). + +This JSON file MUST be a UTF-8 encoded, :rfc:`8259` compliant, serialization of the +:doc:`direct-url-data-structure`. + +.. note:: + + When the requested URL has the file:// scheme and points to a local directory that happens to contain a + VCS checkout, installers MUST NOT attempt to infer any VCS information and + therefore MUST NOT output any VCS related information (such as ``vcs_info``) + in :file:`direct_url.json`. + +.. note:: + + As a general rule, installers should as much as possible preserve the + information that was provided in the requested URL when generating + :file:`direct_url.json`. For example user:password environment variables + should be preserved and ``requested_revision`` should reflect the revision that was + provided in the requested URL as faithfully as possible. This information is + however *enriched* with more precise data, such as ``commit_id``. + + +Example pip commands and their effect on direct_url.json +======================================================== + +Commands that generate a ``direct_url.json``: + +* ``pip install https://example.com/app-1.0.tgz`` +* ``pip install https://example.com/app-1.0.whl`` +* ``pip install "app @ git+https://example.com/repo/app.git#subdirectory=setup"`` +* ``pip install ./app`` +* ``pip install file:///home/user/app`` +* ``pip install --editable "app @ git+https://example.com/repo/app.git#subdirectory=setup"`` + (in which case, ``url`` will be the local directory where the git repository has been + cloned to, and ``dir_info`` will be present with ``"editable": true`` and no + ``vcs_info`` will be set) +* ``pip install -e ./app`` + +Commands that *do not* generate a ``direct_url.json`` + +* ``pip install app`` +* ``pip install app --no-index --find-links https://example.com/`` + + +History +======= + +- March 2020: This specification was approved through :pep:`610`. diff --git a/source/specifications/entry-points.rst b/source/specifications/entry-points.rst new file mode 100644 index 000000000..dea039492 --- /dev/null +++ b/source/specifications/entry-points.rst @@ -0,0 +1,171 @@ +.. _entry-points: + +========================== +Entry points specification +========================== + +*Entry points* are a mechanism for an installed distribution to advertise +components it provides to be discovered and used by other code. For +example: + +- Distributions can specify ``console_scripts`` entry points, each referring to + a function. When *pip* (or another console_scripts aware installer) installs + the distribution, it will create a command-line wrapper for each entry point. +- Applications can use entry points to load plugins; e.g. Pygments (a syntax + highlighting tool) can use additional lexers and styles from separately + installed packages. For more about this, see + :doc:`/guides/creating-and-discovering-plugins`. + +The entry point file format was originally developed to allow packages built +with setuptools to provide integration point metadata that would be read at +runtime with :py:mod:`importlib.metadata`. It is now defined as a PyPA interoperability +specification in order to allow build tools other than ``setuptools`` to publish +:py:mod:`importlib.metadata` compatible entry point metadata, and runtime libraries other +than :py:mod:`importlib.metadata` to portably read published entry point metadata +(potentially with different caching and conflict resolution strategies). + +Data model +========== + +Conceptually, an entry point is defined by three required properties: + +- The **group** that an entry point belongs to indicates what sort of object it + provides. For instance, the group ``console_scripts`` is for entry points + referring to functions which can be used as a command, while + ``pygments.styles`` is the group for classes defining pygments styles. + The consumer typically defines the expected interface. To avoid clashes, + consumers defining a new group should use names starting with a PyPI name + owned by the consumer project, followed by ``.``. Group names must be one or + more groups of letters, numbers and underscores, separated by dots (regex + ``^\w+(\.\w+)*$``). + +- The **name** identifies this entry point within its group. The precise meaning + of this is up to the consumer. For console scripts, the name of the entry point + is the command that will be used to launch it. Within a distribution, entry + point names should be unique. If different distributions provide the same + name, the consumer decides how to handle such conflicts. The name may contain + any characters except ``=``, but it cannot start or end with any whitespace + character, or start with ``[``. For new entry points, it is recommended to + use only letters, numbers, underscores, dots and dashes (regex ``[\w.-]+``). + +- The **object reference** points to a Python object. It is either in the form + ``importable.module``, or ``importable.module:object.attr``. Each of the parts + delimited by dots and the colon is a valid Python identifier. + It is intended to be looked up like this:: + + import importlib + modname, qualname_separator, qualname = object_ref.partition(':') + obj = importlib.import_module(modname) + if qualname_separator: + for attr in qualname.split('.'): + obj = getattr(obj, attr) + +.. note:: + Some tools call this kind of object reference by itself an 'entry point', for + want of a better term, especially where it points to a function to launch a + program. + +There is also an optional property: the **extras** are a set of strings +identifying optional features of the distribution providing the entry point. +If these are specified, the entry point requires the dependencies of those +'extras'. See the metadata field :ref:`metadata_provides_extra`. + +Using extras for an entry point is no longer recommended. Consumers should +support parsing them from existing distributions, but may then ignore them. +New publishing tools need not support specifying extras. The functionality of +handling extras was tied to setuptools' model of managing 'egg' packages, but +newer tools such as pip and virtualenv use a different model. + +File format +=========== + +Entry points are defined in a file called :file:`entry_points.txt` in the +:file:`*.dist-info` directory of the distribution. This is the directory +described in :ref:`recording-installed-packages` for installed distributions, +and in :ref:`binary-distribution-format` for wheels. +The file uses the UTF-8 character encoding. + +The file contents are in INI format, as read by Python's :mod:`configparser` +module. However, configparser treats names as case-insensitive by default, +whereas entry point names are case sensitive. A case-sensitive config parser +can be made like this:: + + import configparser + + class CaseSensitiveConfigParser(configparser.ConfigParser): + optionxform = staticmethod(str) + +The entry points file must always use ``=`` to delimit names from values +(whereas configparser also allows using ``:``). + +The sections of the config file represent entry point groups, the names are +names, and the values encode both the object reference and the optional extras. +If extras are used, they are a comma-separated list inside square brackets. + +Within a value, readers must accept and ignore spaces (including multiple +consecutive spaces) before or after the colon, between the object reference and +the left square bracket, between the extra names and the square brackets and +colons delimiting them, and after the right square bracket. The syntax for +extras is formally specified as part of :pep:`508` (as ``extras``) and +restrictions on values specified in :pep:`685`. +For tools writing the file, it is recommended only to insert a space between the +object reference and the left square bracket. + +For example: + +.. code-block:: ini + + [console_scripts] + foo = foomod:main + # One which depends on extras: + foobar = foomod:main_bar [bar,baz] + + # pytest plugins refer to a module, so there is no ':obj' + [pytest11] + nbval = nbval.plugin + +Use for scripts +=============== + +Two groups of entry points have special significance in packaging: +``console_scripts`` and ``gui_scripts``. In both groups, the name of the entry +point should be usable as a command in a system shell after the package is +installed. The object reference points to a function which will be called with +no arguments when this command is run. The function may return an integer to be +used as a process exit code, and returning ``None`` is equivalent to returning +``0``. + +For instance, the entry point ``mycmd = mymod:main`` would create a command +``mycmd`` launching a script like this:: + + import sys + from mymod import main + sys.exit(main()) + +The difference between ``console_scripts`` and ``gui_scripts`` only affects +Windows systems. ``console_scripts`` are wrapped in a console executable, +so they are attached to a console and can use :py:data:`sys.stdin`, +:py:data:`sys.stdout` and :py:data:`sys.stderr` for input and output. +``gui_scripts`` are wrapped in a GUI executable, so they can be started without +a console, but cannot use standard streams unless application code redirects them. +Other platforms do not have the same distinction. + +Install tools are expected to set up wrappers for both ``console_scripts`` and +``gui_scripts`` in the scripts directory of the install scheme. They are not +responsible for putting this directory in the ``PATH`` environment variable +which defines where command-line tools are found. + +As files are created from the names, and some filesystems are case-insensitive, +packages should avoid using names in these groups which differ only in case. +The behaviour of install tools when names differ only in case is undefined. + + +History +======= + +- October 2017: This specification was written to formalize the existing + entry points feature of setuptools (discussion_). + + + +.. _discussion: https://mail.python.org/pipermail/distutils-sig/2017-October/031585.html diff --git a/source/specifications/externally-managed-environments.rst b/source/specifications/externally-managed-environments.rst new file mode 100644 index 000000000..65fc14a62 --- /dev/null +++ b/source/specifications/externally-managed-environments.rst @@ -0,0 +1,474 @@ + +.. _externally-managed-environments: + +=============================== +Externally Managed Environments +=============================== + +While some Python installations are entirely managed by the user that installed +Python, others may be provided and managed by another means (such as the +operating system package manager in a Linux distribution, or as a bundled +Python environment in an application with a dedicated installer). + +Attempting to use conventional Python packaging tools to manipulate such +environments can be confusing at best and outright break the entire underlying +operating system at worst. Documentation and interoperability guides only go +so far in resolving such problems. + +This specification defines an ``EXTERNALLY-MANAGED`` marker file that allows a +Python installation to indicate to Python-specific tools such as ``pip`` that they +neither install nor remove packages into the interpreter’s default installation +environment, and should instead guide the end user towards using +:ref:`virtual-environments`. + +It also standardizes an interpretation of the ``sysconfig`` schemes so +that, if a Python-specific package manager is about to install a +package in an interpreter-wide context, it can do so in a manner that +will avoid conflicting with the external package manager and reduces +the risk of breaking software shipped by the external package manager. + + +Terminology +=========== + +A few terms used in this specification have multiple meanings in the +contexts that it spans. For clarity, this specification uses the following +terms in specific ways: + +distro + Short for "distribution," a collection of various sorts of + software, ideally designed to work properly together, including + (in contexts relevant to this document) the Python interpreter + itself, software written in Python, and software written in other + languages. That is, this is the sense used in phrases such as + "Linux distro" or "Berkeley Software Distribution." + + A distro can be an operating system (OS) of its own, such as + Debian, Fedora, or FreeBSD. It can also be an overlay distribution + that installs on top of an existing OS, such as Homebrew or + MacPorts. + + This document uses the short term "distro," because the term + "distribution" has another meaning in Python packaging contexts: a + source or binary distribution package of a single piece of Python + language software, that is, in the sense of + ``setuptools.dist.Distribution`` or "sdist". To avoid confusion, + this document does not use the plain term "distribution" at all. + In the Python packaging sense, it uses the full phrase + "distribution package" or just "package" (see below). + + The provider of a distro - the team or company that collects and + publishes the software and makes any needed modifications - is its + **distributor**. +package + A unit of software that can be installed and used within Python. + That is, this refers to what Python-specific packaging tools tend + to call a :term:`distribution package` or simply a "distribution"; + the colloquial abbreviation "package" is used in the sense of the + Python Package Index. + + This document does not use "package" in the sense of an importable + name that contains Python modules, though in many cases, a + distribution package consists of a single importable package of + the same name. + + This document generally does not use the term "package" to refer + to units of installation by a distro's package manager (such as + ``.deb`` or ``.rpm`` files). When needed, it uses phrasing such as + "a distro's package." (Again, in many cases, a Python package is + shipped inside a distro's package named something like ``python-`` + plus the Python package name.) +Python-specific package manager + A tool for installing, upgrading, and/or removing Python packages + in a manner that conforms to Python packaging standards. + The most popular Python-specific package + manager is pip_; other examples include the old `Easy + Install command `_ as well as direct usage of a + ``setup.py`` command. + + .. _pip: https://pip.pypa.io/en/stable/ + .. _easy-install: https://setuptools.readthedocs.io/en/latest/deprecated/easy_install.html + + (Note that the ``easy_install`` command was removed in + setuptools version 52, released 23 January 2021.) + + + (Conda_ is a bit of a special case, as the ``conda`` + command can install much more than just Python packages, making it + more like a distro package manager in some senses. Since the + ``conda`` command generally only operates on Conda-created + environments, most of the concerns in this document do not apply + to ``conda`` when acting as a Python-specific package manager.) + + .. _conda: https://conda.io +distro package manager + A tool for installing, upgrading, and/or removing a distro's + packages in an installed instance of that distro, which is capable + of installing Python packages as well as non-Python packages, and + therefore generally has its own database of installed software + unrelated to the :ref:`database of installed distributions + `. Examples include ``apt``, ``dpkg``, + ``dnf``, ``rpm``, ``pacman``, and ``brew``. The salient feature is + that if a package was installed by a distro package manager, removing or + upgrading it in a way that would satisfy a Python-specific package + manager will generally leave a distro package manager in an + inconsistent state. + + This document also uses phrases like "external package manager" or + "system's package manager" to refer to a distro package manager in + certain contexts. +shadow + To shadow an installed Python package is to cause some other + package to be preferred for imports without removing any files + from the shadowed package. This requires multiple entries on + ``sys.path``: if package A 2.0 installs module ``a.py`` in one + ``sys.path`` entry, and package A 1.0 installs module ``a.py`` in + a later ``sys.path`` entry, then ``import a`` returns the module + from the former, and we say that A 2.0 shadows A 1.0. + +Overview +======== + +This specification is twofold. + +First, it describes **a way for distributors of a Python interpreter to +mark that interpreter as having its packages managed by means external +to Python**, such that Python-specific tools like pip should not +change the installed packages in the interpreter's global ``sys.path`` +in any way (add, upgrade/downgrade, or remove) unless specifically +overridden. It also provides a means for the distributor to indicate +how to use a virtual environment as an alternative. + +This is an opt-in mechanism: by default, the Python interpreter +compiled from upstream sources will not be so marked, and so running +``pip install`` with a self-compiled interpreter, or with a distro +that has not explicitly marked its interpreter, will work as it always +has worked. + +Second, it sets the rule that when installing packages to an +interpreter's global context (either to an unmarked interpreter, or if +overriding the marking), **Python-specific package managers should +modify or delete files only within the directories of the sysconfig +scheme in which they would create files**. This permits a distributor +of a Python interpreter to set up two directories, one for its own +managed packages, and one for unmanaged packages installed by the end +user, and ensure that installing unmanaged packages will not delete +(or overwrite) files owned by the external package manager. + + +Marking an interpreter as using an external package manager +=========================================================== + +Before a Python-specific package installer (that is, a tool such as +pip - not an external tool such as apt) installs a package into a +certain Python context, it should make the following checks by +default: + +1. Is it running outside of a virtual environment? It can determine + this by whether ``sys.prefix == sys.base_prefix``. + +2. Is there an ``EXTERNALLY-MANAGED`` file in the directory identified + by ``sysconfig.get_path("stdlib", sysconfig.get_default_scheme())``? + +If both of these conditions are true, the installer should exit with +an error message indicating that package installation into this Python +interpreter's directory are disabled outside of a virtual environment. + +The installer should have a way for the user to override these rules, +such as a command-line flag ``--break-system-packages``. This option +should not be enabled by default and should carry some connotation +that its use is risky. + +The ``EXTERNALLY-MANAGED`` file is an INI-style metadata file intended +to be parsable by the standard library configparser_ module. If the +file can be parsed by +``configparser.ConfigParser(interpolation=None)`` using the UTF-8 +encoding, and it contains a section ``[externally-managed]``, then the +installer should look for an error message specified in the file and +output it as part of its error. If the first element of the tuple +returned by ``locale.getlocale(locale.LC_MESSAGES)``, i.e., the +language code, is not ``None``, it should look for the error message +as the value of a key named ``Error-`` followed by the language code. +If that key does not exist, and if the language code contains +underscore or hyphen, it should look for a key named ``Error-`` +followed by the portion of the language code before the underscore or +hyphen. If it cannot find either of those, or if the language code is +``None``, it should look for a key simply named ``Error``. + +.. _configparser: https://docs.python.org/3/library/configparser.html + +If the installer cannot find an error message in the file (either +because the file cannot be parsed or because no suitable error key +exists), then the installer should just use a pre-defined error +message of its own, which should suggest that the user create a +virtual environment to install packages. + +Software distributors who have a non-Python-specific package manager +that manages libraries in the ``sys.path`` of their Python package +should, in general, ship an ``EXTERNALLY-MANAGED`` file in their +standard library directory. For instance, Debian may ship a file in +``/usr/lib/python3.9/EXTERNALLY-MANAGED`` consisting of something like + +.. code-block:: ini + + [externally-managed] + Error=To install Python packages system-wide, try apt install + python3-xyz, where xyz is the package you are trying to + install. + + If you wish to install a non-Debian-packaged Python package, + create a virtual environment using python3 -m venv path/to/venv. + Then use path/to/venv/bin/python and path/to/venv/bin/pip. Make + sure you have python3-full installed. + + If you wish to install a non-Debian packaged Python application, + it may be easiest to use pipx install xyz, which will manage a + virtual environment for you. Make sure you have pipx installed. + + See /usr/share/doc/python3.9/README.venv for more information. + +which provides useful and distro-relevant information +to a user trying to install a package. Optionally, +translations can be provided in the same file: + +.. code-block:: ini + + Error-de_DE=Wenn ist das Nunstück git und Slotermeyer? + + Ja! Beiherhund das Oder die Virtualenvironment gersput! + +In certain contexts, such as single-application container images that +aren't updated after creation, a distributor may choose not to ship an +``EXTERNALLY-MANAGED`` file, so that users can install whatever they +like (as they can today) without having to manually override this +rule. + +Writing to only the target ``sysconfig`` scheme +=============================================== + +Usually, a Python package installer installs to directories in a +scheme returned by the ``sysconfig`` standard library package. +Ordinarily, this is the scheme returned by +``sysconfig.get_default_scheme()``, but based on configuration (e.g. +``pip install --user``), it may use a different scheme. + +Whenever the installer is installing to a ``sysconfig`` scheme, this +specification declares that the installer should never modify or delete files +outside of that scheme. For instance, if it's upgrading a package, and +the package is already installed in a directory outside that scheme +(perhaps in a directory from another scheme), it should leave the +existing files alone. + +If the installer does end up shadowing an existing installation during +an upgrade, we recommend that it produces a warning at the end of its +run. + +If the installer is installing to a location outside of a +``sysconfig`` scheme (e.g., ``pip install --target``), then this +subsection does not apply. + +Recommendations for distros +=========================== + +This section is non-normative. It provides best practices we believe +distros should follow unless they have a specific reason otherwise. + +Mark the installation as externally managed +------------------------------------------- + +Distros should create an ``EXTERNALLY-MANAGED`` file in their +``stdlib`` directory. + +Guide users towards virtual environments +---------------------------------------- + +The file should contain a useful and distro-relevant error message +indicating both how to install system-wide packages via the distro's +package manager and how to set up a virtual environment. If your +distro is often used by users in a state where the ``python3`` command +is available (and especially where ``pip`` or ``get-pip`` is +available) but ``python3 -m venv`` does not work, the message should +indicate clearly how to make ``python3 -m venv`` work properly. + +Consider packaging pipx_, a tool for installing Python-language +applications, and suggesting it in the error. pipx automatically +creates a virtual environment for that application alone, which is a +much better default for end users who want to install some +Python-language software (which isn't available in the distro) but are +not themselves Python users. Packaging pipx in the distro avoids the +irony of instructing users to ``pip install --user +--break-system-packages pipx`` to *avoid* breaking system packages. +Consider arranging things so your distro's package / environment for +Python for end users (e.g., ``python3`` on Fedora or ``python3-full`` +on Debian) depends on pipx. + +.. _pipx: https://github.com/pypa/pipx + +Keep the marker file in container images +---------------------------------------- + +Distros that produce official images for single-application containers +(e.g., Docker container images) should keep the +``EXTERNALLY-MANAGED`` file, preferably in a way that makes it not +go away if a user of that image installs package updates inside +their image (think ``RUN apt-get dist-upgrade``). + +Create separate distro and local directories +-------------------------------------------- + +Distros should place two separate paths on the system interpreter's +``sys.path``, one for distro-installed packages and one for packages +installed by the local system administrator, and configure +``sysconfig.get_default_scheme()`` to point at the latter path. This +ensures that tools like pip will not modify distro-installed packages. +The path for the local system administrator should come before the +distro path on ``sys.path`` so that local installs take preference +over distro packages. + +For example, Fedora and Debian (and their derivatives) both implement +this split by using ``/usr/local`` for locally-installed packages and +``/usr`` for distro-installed packages. Fedora uses +``/usr/local/lib/python3.x/site-packages`` vs. +``/usr/lib/python3.x/site-packages``. (Debian uses +``/usr/local/lib/python3/dist-packages`` vs. +``/usr/lib/python3/dist-packages`` as an additional layer of +separation from a locally-compiled Python interpreter: if you build +and install upstream CPython in ``/usr/local/bin``, it will look at +``/usr/local/lib/python3/site-packages``, and Debian wishes to make +sure that packages installed via the locally-built interpreter don't +show up on ``sys.path`` for the distro interpreter.) + +Note that the ``/usr/local`` vs. ``/usr`` split is analogous to how +the ``PATH`` environment variable typically includes +``/usr/local/bin:/usr/bin`` and non-distro software installs to +``/usr/local`` by default. This split is `recommended by the +Filesystem Hierarchy Standard`__. + +.. __: https://refspecs.linuxfoundation.org/FHS_3.0/fhs/ch04s09.html + +There are two ways you could do this. One is, if you are building and +packaging Python libraries directly (e.g., your packaging helpers +unpack a wheel or call ``setup.py install``), arrange +for those tools to use a directory that is not in a ``sysconfig`` +scheme but is still on ``sys.path``. + +The other is to arrange for the default ``sysconfig`` scheme to change +when running inside a package build versus when running on an +installed system. The ``sysconfig`` customization hooks from +bpo-43976_ should make this easy (once accepted and implemented): +make your packaging tool set an +environment variable or some other detectable configuration, and +define a ``get_preferred_schemes`` function to return a different +scheme when called from inside a package build. Then you can use ``pip +install`` as part of your distro packaging. + +.. _bpo-43976: https://bugs.python.org/issue43976 + +We propose adding a ``--scheme=...`` option to instruct pip to run +against a specific scheme. (See `Implementation Notes`_ below for how +pip currently determines schemes.) Once that's available, for local +testing and possibly for actual packaging, you would be able to run +something like ``pip install --scheme=posix_distro`` to explicitly +install a package into your distro's location (bypassing +``get_preferred_schemes``). One could also, if absolutely needed, use +``pip uninstall --scheme=posix_distro`` to use pip to remove packages +from the system-managed directory. + +To install packages with pip, you would also need to either suppress +the ``EXTERNALLY-MANAGED`` marker file to allow pip to run or to +override it on the command line. You may want to use the same means +for suppressing the marker file in build chroots as you do in +container images. + +The advantage of setting these up to be automatic (suppressing the +marker file in your build environment and having +``get_preferred_schemes`` automatically return your distro's scheme) +is that an unadorned ``pip install`` will work inside a package build, +which generally means that an unmodified upstream build script that +happens to internally call ``pip install`` will do the right thing. +You can, of course, just ensure that your packaging process always +calls ``pip install --scheme=posix_distro --break-system-packages``, +which would work too. + +The best approach here depends a lot on your distro's conventions and +mechanisms for packaging. + +Similarly, the ``sysconfig`` paths that are not for importable Python +code - that is, ``include``, ``platinclude``, ``scripts``, and +``data`` - should also have two variants, one for use by +distro-packaged software and one for use for locally-installed +software, and the distro should be set up such that both are usable. +For instance, a typical FHS-compliant distro will use +``/usr/local/include`` for the default scheme's ``include`` and +``/usr/include`` for distro-packaged headers and place both on the +compiler's search path, and it will use ``/usr/local/bin`` for the +default scheme's ``scripts`` and ``/usr/bin`` for distro-packaged +entry points and place both on ``$PATH``. + + +Implementation Notes +==================== + +This section is non-normative and contains notes relevant to both the +specification and potential implementations. + +Currently (as of May 2021), pip does not directly expose a way to choose +a target ``sysconfig`` scheme, but it has three ways of looking up schemes +when installing: + +``pip install`` + Calls ``sysconfig.get_default_scheme()``, which is usually (in + upstream CPython and most current distros) the same as + ``get_preferred_scheme('prefix')``. + +``pip install --prefix=/some/path`` + Calls ``sysconfig.get_preferred_scheme('prefix')``. + +``pip install --user`` + Calls ``sysconfig.get_preferred_scheme('user')``. + +Finally, ``pip install --target=/some/path`` writes directly to +``/some/path`` without looking up any schemes. + +Debian currently carries a `patch to change the default install +location inside a virtual environment`__, using a few heuristics +(including checking for the ``VIRTUAL_ENV`` environment variable), +largely so that the directory used in a virtual environment remains +``site-packages`` and not ``dist-packages``. This does not +particularly affect this proposal, because the implementation of that +patch does not actually change the default ``sysconfig`` scheme, and +notably does not change the result of +``sysconfig.get_path("stdlib")``. + +.. __: https://sources.debian.org/src/python3.7/3.7.3-2+deb10u3/debian/patches/distutils-install-layout.diff/ + +Fedora currently carries a `patch to change the default install +location when not running inside rpmbuild`__, which they use to +implement the two-system-wide-directories approach. This is +conceptually the sort of hook envisioned by bpo-43976_, except +implemented as a code patch to ``distutils`` instead of as a changed +``sysconfig`` scheme. + +.. __: https://src.fedoraproject.org/rpms/python3.9/blob/f34/f/00251-change-user-install-location.patch + +The implementation of ``is_virtual_environment`` above, as well as the +logic to load the ``EXTERNALLY-MANAGED`` file and find the error +message from it, may as well get added to the standard library +(``sys`` and ``sysconfig``, respectively), to centralize their +implementations, but they don't need to be added yet. + + + + +Copyright +========= + +This document is placed in the public domain or under the +CC0-1.0-Universal license, whichever is more permissive. + + + +History +======= + +- June 2022: This specification was approved through :pep:`668`. diff --git a/source/specifications/glob-patterns.rst b/source/specifications/glob-patterns.rst new file mode 100644 index 000000000..abdb15b0f --- /dev/null +++ b/source/specifications/glob-patterns.rst @@ -0,0 +1,115 @@ +================= +``glob`` patterns +================= + +Some PyPA specifications, e.g. :ref:`pyproject.toml's license-files +`, accept certain types of *glob patterns* +to match a given string containing wildcards and character ranges against +files and directories. This specification defines which patterns are acceptable +and how they should be handled. + + +Valid glob patterns +=================== + +For PyPA purposes, a *valid glob pattern* MUST be a string matched against +filesystem entries as specified below: + +- Alphanumeric characters, underscores (``_``), hyphens (``-``) and dots (``.``) + MUST be matched verbatim. + +- Special glob characters: ``*``, ``?``, ``**`` and character ranges: ``[]`` + containing only the verbatim matched characters MUST be supported. + Within ``[...]``, the hyphen indicates a locale-agnostic range (e.g. ``a-z``, + order based on Unicode code points). + Hyphens at the start or end are matched literally. + +- Path delimiters MUST be the forward slash character (``/``). + +- Patterns always refer to *relative paths*, + e.g., when used in :file:`pyproject.toml`, patterns should always be + relative to the directory containing that file. + Therefore the leading slash character MUST NOT be used. + +- Parent directory indicators (``..``) MUST NOT be used. + +Any characters or character sequences not covered by this specification are +invalid. Projects MUST NOT use such values. +Tools consuming glob patterns SHOULD reject invalid values with an error. + +Literal paths (e.g. :file:`LICENSE`) are valid globs which means they +can also be defined. + +Tools consuming glob patterns: + +- MUST treat each value as a glob pattern, and MUST raise an error if the + pattern contains invalid glob syntax. +- MUST raise an error if any individual user-specified pattern does not match + at least one file. + +Examples of valid glob patterns: + +.. code-block:: python + + "LICEN[CS]E*" + "AUTHORS*" + "licenses/LICENSE.MIT" + "licenses/LICENSE.CC0" + "LICENSE.txt" + "licenses/*" + +Examples of invalid glob patterns: + +.. code-block:: python + + "..\LICENSE.MIT" + # .. must not be used. + # \ is an invalid path delimiter, / must be used. + + "LICEN{CSE*" + # the { character is not allowed + + +Reference implementation in Python +================================== + +It is possible to defer the majority of the pattern matching against the file +system to the :mod:`glob` module in Python's standard library. It is necessary +however to perform additional validations. + +The code below is as a simple reference implementation: + +.. code-block:: python + + import os + import re + from glob import glob + + + def find_pattern(pattern: str) -> list[str]: + """ + >>> find_pattern("/LICENSE.MIT") + Traceback (most recent call last): + ... + ValueError: Pattern '/LICENSE.MIT' should be relative... + >>> find_pattern("../LICENSE.MIT") + Traceback (most recent call last): + ... + ValueError: Pattern '../LICENSE.MIT' cannot contain '..'... + >>> find_pattern("LICEN{CSE*") + Traceback (most recent call last): + ... + ValueError: Pattern 'LICEN{CSE*' contains invalid characters... + """ + if ".." in pattern: + raise ValueError(f"Pattern {pattern!r} cannot contain '..'") + if pattern.startswith((os.sep, "/")) or ":\\" in pattern: + raise ValueError( + f"Pattern {pattern!r} should be relative and must not start with '/'" + ) + if re.match(r'^[\w\-\.\/\*\?\[\]]+$', pattern) is None: + raise ValueError(f"Pattern '{pattern}' contains invalid characters.") + found = glob(pattern, recursive=True) + if not found: + raise ValueError(f"Pattern '{pattern}' did not match any files.") + return found diff --git a/source/specifications/index-hosted-attestations.rst b/source/specifications/index-hosted-attestations.rst new file mode 100644 index 000000000..d078e87bd --- /dev/null +++ b/source/specifications/index-hosted-attestations.rst @@ -0,0 +1,368 @@ + +.. _index-hosted-attestations: + +========================= +Index hosted attestations +========================= + +.. note:: This specification was originally defined in :pep:`740`. + +.. note:: + + :pep:`740` includes changes to the HTML and JSON index APIs. + These changes are documented in the :ref:`simple-repository-api` + under :ref:`simple-repository-api-base` and :ref:`json-serialization`. + +Specification +============= + +.. _upload-endpoint: + +Upload endpoint changes +----------------------- + +.. important:: + + The "legacy" upload API is not standardized. + See `PyPI's Upload API documentation `_ + for how attestations are uploaded. + +.. _attestation-object: + +Attestation objects +------------------- + +An attestation object is a JSON object with several required keys; applications +or signers may include additional keys so long as all explicitly +listed keys are provided. The required layout of an attestation +object is provided as pseudocode below. + +.. code-block:: python + + @dataclass + class Attestation: + version: Literal[1] + """ + The attestation object's version, which is always 1. + """ + + verification_material: VerificationMaterial + """ + Cryptographic materials used to verify `envelope`. + """ + + envelope: Envelope + """ + The enveloped attestation statement and signature. + """ + + + @dataclass + class Envelope: + statement: bytes + """ + The attestation statement. + + This is represented as opaque bytes on the wire (encoded as base64), + but it MUST be an JSON in-toto v1 Statement. + """ + + signature: bytes + """ + A signature for the above statement, encoded as base64. + """ + + @dataclass + class VerificationMaterial: + certificate: str + """ + The signing certificate, as `base64(DER(cert))`. + """ + + transparency_entries: list[object] + """ + One or more transparency log entries for this attestation's signature + and certificate. + """ + +A full data model for each object in ``transparency_entries`` is provided in +:ref:`appendix`. Attestation objects **SHOULD** include one or more +transparency log entries, and **MAY** include additional keys for other +sources of signed time (such as an :rfc:`3161` Time Stamping Authority or a +`Roughtime `__ server). + +Attestation objects are versioned; this PEP specifies version 1. Each version +is tied to a single cryptographic suite to minimize unnecessary cryptographic +agility. In version 1, the suite is as follows: + +* Certificates are specified as X.509 certificates, and comply with the + profile in :rfc:`5280`. +* The message signature algorithm is ECDSA, with the P-256 curve for public keys + and SHA-256 as the cryptographic digest function. + +Future PEPs may change this suite (and the overall shape of the attestation +object) by selecting a new version number. + +.. _payload-and-signature-generation: + +Attestation statement and signature generation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The *attestation statement* is the actual claim that is cryptographically signed +over within the attestation object (i.e., the ``envelope.statement``). + +The attestation statement is encoded as a +`v1 in-toto Statement object `__, +in JSON form. When serialized the statement is treated as an opaque binary blob, +avoiding the need for canonicalization. + +In addition to being a v1 in-toto Statement, the attestation statement is constrained +in the following ways: + +* The in-toto ``subject`` **MUST** contain only a single subject. +* ``subject[0].name`` is the distribution's filename, which **MUST** be + a valid :ref:`source distribution ` or + :ref:`wheel distribution ` filename. +* ``subject[0].digest`` **MUST** contain a SHA-256 digest. Other digests + **MAY** be present. The digests **MUST** be represented as hexadecimal strings. +* The following ``predicateType`` values are supported: + + * `SLSA Provenance `__: ``https://slsa.dev/provenance/v1`` + * `PyPI Publish Attestation `__: ``https://docs.pypi.org/attestations/publish/v1`` + +The signature over this statement is constructed using the +`v1 DSSE signature protocol `__, +with a ``PAYLOAD_TYPE`` of ``application/vnd.in-toto+json`` and a ``PAYLOAD_BODY`` of the JSON-encoded +statement above. No other ``PAYLOAD_TYPE`` is permitted. + +.. _provenance-object: + +Provenance objects +------------------ + +The index will serve uploaded attestations along with metadata that can assist +in verifying them in the form of JSON serialized objects. + +These *provenance objects* will be available via both the Simple Index +and JSON-based Simple API as described above, and will have the following layout: + +.. code-block:: json + + { + "version": 1, + "attestation_bundles": [ + { + "publisher": { + "kind": "important-ci-service", + "claims": {}, + "vendor-property": "foo", + "another-property": 123 + }, + "attestations": [ + { /* attestation 1 ... */ }, + { /* attestation 2 ... */ } + ] + } + ] + } + +or, as pseudocode: + +.. code-block:: python + + @dataclass + class Publisher: + kind: string + """ + The kind of Trusted Publisher. + """ + + claims: object | None + """ + Any context-specific claims retained by the index during Trusted Publisher + authentication. + """ + + _rest: object + """ + Each publisher object is open-ended, meaning that it MAY contain additional + fields beyond the ones specified explicitly above. This field signals that, + but is not itself present. + """ + + @dataclass + class AttestationBundle: + publisher: Publisher + """ + The publisher associated with this set of attestations. + """ + + attestations: list[Attestation] + """ + The set of attestations included in this bundle. + """ + + @dataclass + class Provenance: + version: Literal[1] + """ + The provenance object's version, which is always 1. + """ + + attestation_bundles: list[AttestationBundle] + """ + One or more attestation "bundles". + """ + +* ``version`` is ``1``. Like attestation objects, provenance objects are + versioned, and this PEP only defines version ``1``. +* ``attestation_bundles`` is a **required** JSON array, containing one + or more "bundles" of attestations. Each bundle corresponds to a + signing identity (such as a Trusted Publishing identity), and contains + one or more attestation objects. + + As noted in the ``Publisher`` model, + each ``AttestationBundle.publisher`` object is specific to its Trusted Publisher + but must include at minimum: + + * A ``kind`` key, which **MUST** be a JSON string that uniquely identifies the + kind of Trusted Publisher. + * A ``claims`` key, which **MUST** be a JSON object containing any context-specific + claims retained by the index during Trusted Publisher authentication. + + All other keys in the publisher object are publisher-specific. + + Each array of attestation objects is a superset of the ``attestations`` + array supplied by the uploaded through the ``attestations`` field at upload + time, as described in :ref:`upload-endpoint` and + :ref:`changes-to-provenance-objects`. + +.. _changes-to-provenance-objects: + +Changes to provenance objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Provenance objects are *not* immutable, and may change over time. Reasons +for changes to the provenance object include but are not limited to: + +* Addition of new attestations for a pre-existing signing identity: the index + **MAY** choose to allow additional attestations by pre-existing signing + identities, such as newer attestation versions for already uploaded + files. + +* Addition of new signing identities and associated attestations: the index + **MAY** choose to support attestations from sources other than the file's + uploader, such as third-party auditors or the index itself. These attestations + may be performed asynchronously, requiring the index to insert them into + the provenance object *post facto*. + +.. _attestation-verification: + +Attestation verification +------------------------ + +Verifying an attestation object against a distribution file requires verification of each of the +following: + +* ``version`` is ``1``. The verifier **MUST** reject any other version. +* ``verification_material.certificate`` is a valid signing certificate, as + issued by an *a priori* trusted authority (such as a root of trust already + present within the verifying client). +* ``verification_material.certificate`` identifies an appropriate signing + subject, such as the machine identity of the Trusted Publisher that published + the package. +* ``envelope.statement`` is a valid in-toto v1 Statement, with a subject + and digest that **MUST** match the distribution's filename and contents. + For the distribution's filename, matching **MUST** be performed by parsing + using the appropriate source distribution or wheel filename format, as + the statement's subject may be equivalent but normalized. +* ``envelope.signature`` is a valid signature for ``envelope.statement`` + corresponding to ``verification_material.certificate``, + as reconstituted via the + `v1 DSSE signature protocol `__. + +In addition to the above required steps, a verifier **MAY** additionally verify +``verification_material.transparency_entries`` on a policy basis, e.g. requiring +at least one transparency log entry or a threshold of entries. When verifying +transparency entries, the verifier **MUST** confirm that the inclusion time for +each entry lies within the signing certificate's validity period. + +.. _appendix: + +Appendix: Data models for Transparency Log Entries +==================================================== + +This appendix contains pseudocoded data models for transparency log entries +in attestation objects. Each transparency log entry serves as a source +of signed inclusion time, and can be verified either online or offline. + +.. code-block:: python + + @dataclass + class TransparencyLogEntry: + log_index: int + """ + The global index of the log entry, used when querying the log. + """ + + log_id: str + """ + An opaque, unique identifier for the log. + """ + + entry_kind: str + """ + The kind (type) of log entry. + """ + + entry_version: str + """ + The version of the log entry's submitted format. + """ + + integrated_time: int + """ + The UNIX timestamp from the log from when the entry was persisted. + """ + + inclusion_proof: InclusionProof + """ + The actual inclusion proof of the log entry. + """ + + + @dataclass + class InclusionProof: + log_index: int + """ + The index of the entry in the tree it was written to. + """ + + root_hash: str + """ + The digest stored at the root of the Merkle tree at the time of proof + generation. + """ + + tree_size: int + """ + The size of the Merkle tree at the time of proof generation. + """ + + hashes: list[str] + """ + A list of hashes required to complete the inclusion proof, sorted + in order from leaf to root. The leaf and root hashes are not themselves + included in this list; the root is supplied via `root_hash` and the client + must calculate the leaf hash. + """ + + checkpoint: str + """ + The signed tree head's signature, at the time of proof generation. + """ + + cosigned_checkpoints: list[str] + """ + Cosigned checkpoints from zero or more log witnesses. + """ diff --git a/source/specifications/index.rst b/source/specifications/index.rst new file mode 100644 index 000000000..68d95ab98 --- /dev/null +++ b/source/specifications/index.rst @@ -0,0 +1,19 @@ +.. _`packaging-specifications`: + +PyPA specifications +################### + +This is a list of currently active interoperability specifications maintained +by the Python Packaging Authority. The process for updating these standards, +and for proposing new ones, is documented on +`pypa.io `__. + +.. toctree:: + :titlesonly: + + section-distribution-metadata + section-installation-metadata + section-distribution-formats + section-package-indices + section-python-description-formats + section-reproducible-environments diff --git a/source/specifications/inline-script-metadata.rst b/source/specifications/inline-script-metadata.rst new file mode 100644 index 000000000..6fa832a3e --- /dev/null +++ b/source/specifications/inline-script-metadata.rst @@ -0,0 +1,219 @@ +.. _inline-script-metadata: + +====================== +Inline script metadata +====================== + +This specification defines a metadata format that can be embedded in single-file +Python scripts to assist launchers, IDEs and other external tools which may need +to interact with such scripts. + + +Specification +============= + +This specification defines a metadata comment block format (loosely inspired by +`reStructuredText Directives`__). + +__ https://docutils.sourceforge.io/docs/ref/rst/directives.html + +Any Python script may have top-level comment blocks that MUST start with the +line ``# /// TYPE`` where ``TYPE`` determines how to process the content. That +is: a single ``#``, followed by a single space, followed by three forward +slashes, followed by a single space, followed by the type of metadata. Block +MUST end with the line ``# ///``. That is: a single ``#``, followed by a single +space, followed by three forward slashes. The ``TYPE`` MUST only consist of +ASCII letters, numbers and hyphens. + +Every line between these two lines (``# /// TYPE`` and ``# ///``) MUST be a +comment starting with ``#``. If there are characters after the ``#`` then the +first character MUST be a space. The embedded content is formed by taking away +the first two characters of each line if the second character is a space, +otherwise just the first character (which means the line consists of only a +single ``#``). + +Precedence for an ending line ``# ///`` is given when the next line is not +a valid embedded content line as described above. For example, the following +is a single fully valid block: + +.. code:: python + + # /// some-toml + # embedded-csharp = """ + # /// + # /// text + # /// + # /// + # public class MyClass { } + # """ + # /// + +A starting line MUST NOT be placed between another starting line and its ending +line. In such cases tools MAY produce an error. Unclosed blocks MUST be ignored. + +When there are multiple comment blocks of the same ``TYPE`` defined, tools MUST +produce an error. + +Tools reading embedded metadata MAY respect the standard Python encoding +declaration. If they choose not to do so, they MUST process the file as UTF-8. + +This is the canonical regular expression that MAY be used to parse the +metadata: + +.. code:: text + + (?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$ + +In circumstances where there is a discrepancy between the text specification +and the regular expression, the text specification takes precedence. + +Tools MUST NOT read from metadata blocks with types that have not been +standardized by this specification. + +script type +----------- + +The first type of metadata block is named ``script``, which contains +script metadata (dependency data and tool configuration). + +This document MAY include the top-level fields ``dependencies`` and ``requires-python``, +and MAY optionally include a ``[tool]`` table. + +The ``[tool]`` table MAY be used by any tool, script runner or otherwise, to configure +behavior. It has the same semantics as the :ref:`[tool] table in pyproject.toml +`. + +The top-level fields are: + +* ``dependencies``: A list of strings that specifies the runtime dependencies + of the script. Each entry MUST be a valid + :ref:`dependency specifier `. +* ``requires-python``: A string that specifies the Python version(s) with which + the script is compatible. The value of this field MUST be a valid + :ref:`version specifier `. + +Script runners MUST error if the specified ``dependencies`` cannot be provided. +Script runners SHOULD error if no version of Python that satisfies the specified +``requires-python`` can be provided. + +Example +------- + +The following is an example of a script with embedded metadata: + +.. code:: python + + # /// script + # requires-python = ">=3.11" + # dependencies = [ + # "requests<3", + # "rich", + # ] + # /// + + import requests + from rich.pretty import pprint + + resp = requests.get("https://peps.python.org/api/peps.json") + data = resp.json() + pprint([(k, v["title"]) for k, v in data.items()][:10]) + + +Reference Implementation +======================== + +The following is an example of how to read the metadata on Python 3.11 or +higher. + +.. code:: python + + import re + import tomllib + + REGEX = r'(?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$' + + def read(script: str) -> dict | None: + name = 'script' + matches = list( + filter(lambda m: m.group('type') == name, re.finditer(REGEX, script)) + ) + if len(matches) > 1: + raise ValueError(f'Multiple {name} blocks found') + elif len(matches) == 1: + content = ''.join( + line[2:] if line.startswith('# ') else line[1:] + for line in matches[0].group('content').splitlines(keepends=True) + ) + return tomllib.loads(content) + else: + return None + +Often tools will edit dependencies like package managers or dependency update +automation in CI. The following is a crude example of modifying the content +using the ``tomlkit`` library__. + +__ https://tomlkit.readthedocs.io/en/latest/ + +.. code:: python + + import re + + import tomlkit + + REGEX = r'(?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$' + + def add(script: str, dependency: str) -> str: + match = re.search(REGEX, script) + content = ''.join( + line[2:] if line.startswith('# ') else line[1:] + for line in match.group('content').splitlines(keepends=True) + ) + + config = tomlkit.parse(content) + config['dependencies'].append(dependency) + new_content = ''.join( + f'# {line}' if line.strip() else f'#{line}' + for line in tomlkit.dumps(config).splitlines(keepends=True) + ) + + start, end = match.span('content') + return script[:start] + new_content + script[end:] + +Note that this example used a library that preserves TOML formatting. This is +not a requirement for editing by any means but rather is a "nice to have" +feature. + +The following is an example of how to read a stream of arbitrary metadata +blocks. + +.. code:: python + + import re + from typing import Iterator + + REGEX = r'(?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$' + + def stream(script: str) -> Iterator[tuple[str, str]]: + for match in re.finditer(REGEX, script): + yield match.group('type'), ''.join( + line[2:] if line.startswith('# ') else line[1:] + for line in match.group('content').splitlines(keepends=True) + ) + + +Recommendations +=============== + +Tools that support managing different versions of Python should attempt to use +the highest available version of Python that is compatible with the script's +``requires-python`` metadata, if defined. + + +History +======= + +- October 2023: This specification was conditionally approved through :pep:`723`. +- January 2024: Through amendments to :pep:`723`, the ``pyproject`` metadata + block type was renamed to ``script``, and the ``[run]`` table was dropped, + making the ``dependencies`` and ``requires-python`` keys + top-level. Additionally, the specification is no longer provisional. diff --git a/source/specifications/license-expression.rst b/source/specifications/license-expression.rst new file mode 100644 index 000000000..50860b7aa --- /dev/null +++ b/source/specifications/license-expression.rst @@ -0,0 +1,56 @@ +================== +License Expression +================== + +:pep:`639` defined a new :ref:`pyproject.toml's license ` +value and added a corresponding :ref:`core metadata License-Expression field +`. +This specification defines which license expressions are acceptable. + + +Specification +============= + +License can be defined as a text string that is a valid SPDX +:term:`license expression `, +as documented in the `SPDX specification `__, +either Version 2.2 or a later compatible version. + +A license expression can use the following license identifiers: + +- Any SPDX-listed license short-form identifiers that are published in + the `SPDX License List `__, + version 3.17 or any later compatible version. + +- The custom ``LicenseRef-[idstring]`` string(s), where ``[idstring]`` is + a unique string containing letters, numbers, ``.`` and/or ``-``, + to identify licenses that are not included in the SPDX license list. + The custom identifiers must follow the SPDX specification, + `clause 10.1 `__ of the given specification version. + + +Examples of valid license expressions: + +.. code-block:: yaml + + MIT + BSD-3-Clause + MIT AND (Apache-2.0 OR BSD-2-Clause) + MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause) + GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause + LicenseRef-Special-License OR CC0-1.0 OR Unlicense + LicenseRef-Proprietary + + +Examples of invalid license expressions: + +.. code-block:: yaml + + Use-it-after-midnight # No `LicenseRef` prefix + Apache-2.0 OR 2-BSD-Clause # 2-BSD-Clause is not a valid SPDX identifier + LicenseRef-License with spaces # spaces are not allowed + LicenseRef-License_with_underscores # underscore are not allowed + +.. _spdxcustom: https://spdx.github.io/spdx-spec/v2.2.2/other-licensing-information-detected/ +.. _spdxlist: https://spdx.org/licenses/ +.. _spdxpression: https://spdx.github.io/spdx-spec/v2.2.2/SPDX-license-expressions/ diff --git a/source/specifications/name-normalization.rst b/source/specifications/name-normalization.rst new file mode 100644 index 000000000..ba3246b63 --- /dev/null +++ b/source/specifications/name-normalization.rst @@ -0,0 +1,55 @@ +======================= +Names and normalization +======================= + +This specification defines the format that names for packages and extras are +required to follow. It also describes how to normalize them, which should be +done before lookups and comparisons. + + +.. _name-format: + +Name format +=========== + +A valid name consists only of ASCII letters and numbers, period, +underscore and hyphen. It must start and end with a letter or number. +This means that valid project names are limited to those which match the +following regex (run with :py:data:`re.IGNORECASE`):: + + ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$ + + +.. _name-normalization: + +Name normalization +================== + +The name should be lowercased with all runs of the characters ``.``, ``-``, or +``_`` replaced with a single ``-`` character. This can be implemented in Python +with the re module: + +.. code-block:: python + + import re + + def normalize(name): + return re.sub(r"[-_.]+", "-", name).lower() + +This means that the following names are all equivalent: + +* ``friendly-bard`` (normalized form) +* ``Friendly-Bard`` +* ``FRIENDLY-BARD`` +* ``friendly.bard`` +* ``friendly_bard`` +* ``friendly--bard`` +* ``FrIeNdLy-._.-bArD`` (a *terrible* way to write a name, but it is valid) + +History +======= + +- September 2015: The specification of name normalized was approved through + :pep:`503 <503#normalized-names>`. +- November 2015: The specification of valid names was approved through + :pep:`508 <508#names>`. diff --git a/source/specifications/platform-compatibility-tags.rst b/source/specifications/platform-compatibility-tags.rst new file mode 100644 index 000000000..d617323d4 --- /dev/null +++ b/source/specifications/platform-compatibility-tags.rst @@ -0,0 +1,450 @@ + +.. _platform-compatibility-tags: + +=========================== +Platform compatibility tags +=========================== + +Platform compatibility tags allow build tools to mark distributions as being +compatible with specific platforms, and allows installers to understand which +distributions are compatible with the system they are running on. + + +Overview +======== + +The tag format is ``{python tag}-{abi tag}-{platform tag}``. + +python tag + 'py27', 'cp33' +abi tag + 'cp32dmu', 'none' +platform tag + 'linux_x86_64', 'any' + +For example, the tag ``py27-none-any`` indicates compatibility with Python 2.7 +(any Python 2.7 implementation) with no abi requirement, on any platform. + +The ``wheel`` built package format includes these tags in its filenames, +of the form +``{distribution}-{version}(-{build tag})?-{python tag}-{abitag}-{platform tag}.whl``. +Other package formats may have their own conventions. + +Any potential spaces in any tag should be replaced with ``_``. + + +Python Tag +========== + +The Python tag indicates the implementation and version required by +a distribution. Major implementations have abbreviated codes, initially: + +* py: Generic Python (does not require implementation-specific features) +* cp: CPython +* ip: IronPython +* pp: PyPy +* jy: Jython + +Other Python implementations should use :py:data:`sys.implementation.name `. + +The version is ``py_version_nodot``. CPython gets away with no dot, +but if one is needed the underscore ``_`` is used instead. PyPy should +probably use its own versions here ``pp18``, ``pp19``. + +The version can be just the major version ``2`` or ``3`` ``py2``, ``py3`` for +many pure-Python distributions. + +Importantly, major-version-only tags like ``py2`` and ``py3`` are not +shorthand for ``py20`` and ``py30``. Instead, these tags mean the packager +intentionally released a cross-version-compatible distribution. + +A single-source Python 2/3 compatible distribution can use the compound +tag ``py2.py3``. See `Compressed Tag Sets`_, below. + + +ABI Tag +======= + +The ABI tag indicates which Python ABI is required by any included +extension modules. For implementation-specific ABIs, the implementation +is abbreviated in the same way as the Python Tag, e.g. ``cp33d`` would be +the CPython 3.3 ABI with debugging. + +The CPython stable ABI is ``abi3`` as in the shared library suffix. + +Implementations with a very unstable ABI may use the first 6 bytes (as +8 base64-encoded characters) of the SHA-256 hash of their source code +revision and compiler flags, etc, but will probably not have a great need +to distribute binary distributions. Each implementation's community may +decide how to best use the ABI tag. + + +Platform Tag +============ + +Basic platform tags +------------------- + +In its simplest form, the platform tag is :py:func:`sysconfig.get_platform()` with +all hyphens ``-`` and periods ``.`` replaced with underscore ``_``. +Until the removal of :ref:`distutils` in Python 3.12, this +was ``distutils.util.get_platform()``. For example: + +* win32 +* linux_i386 +* linux_x86_64 + + +.. _manylinux: + +``manylinux`` +------------- + +The simple scheme above is insufficient for public distribution of wheel files +to Linux platforms, due to the large ecosystem of Linux platforms and subtle +differences between them. + +Instead, for those platforms, the ``manylinux`` standard represents a common +subset of Linux platforms, and allows building wheels tagged with the +``manylinux`` platform tag which can be used across most common Linux +distributions. + +The current standard is the future-proof :file:`manylinux_{x}_{y}` standard. It defines +tags of the form :file:`manylinux_{x}_{y}_{arch}`, where ``x`` and ``y`` are glibc major +and minor versions supported (e.g. ``manylinux_2_24_xxx`` should work on any +distro using glibc 2.24+), and ``arch`` is the architecture, matching the value +of :py:func:`sysconfig.get_platform()` on the system as in the "simple" form above. + +The following older tags are still supported for backward compatibility: + +* ``manylinux1`` supports glibc 2.5 on ``x86_64`` and ``i686`` architectures. +* ``manylinux2010`` supports glibc 2.12 on ``x86_64`` and ``i686``. +* ``manylinux2014`` supports glibc 2.17 on ``x86_64``, ``i686``, ``aarch64``, + ``armv7l``, ``ppc64``, ``ppc64le``, and ``s390x``. + +In general, distributions built for older versions of the specification are +forwards-compatible (meaning that ``manylinux1`` distributions should continue +to work on modern systems) but not backwards-compatible (meaning that +``manylinux2010`` distributions are not expected to work on platforms that +existed before 2010). + +Package maintainers should attempt to target the most compatible specification +possible, with the caveat that the provided build environment for +``manylinux1`` and ``manylinux2010`` have reached end-of-life meaning that +these images will no longer receive security updates. + +The following table shows the minimum versions of relevant projects to support +the various ``manylinux`` standards: + +========== ============== ================= ================= ================= +Tool ``manylinux1`` ``manylinux2010`` ``manylinux2014`` ``manylinux_x_y`` +========== ============== ================= ================= ================= +pip ``>=8.1.0`` ``>=19.0`` ``>=19.3`` ``>=20.3`` +auditwheel ``>=1.0.0`` ``>=2.0.0`` ``>=3.0.0`` ``>=3.3.0`` [#]_ +========== ============== ================= ================= ================= + +.. [#] Only support for ``manylinux_2_24`` has been added in auditwheel 3.3.0 + + +``musllinux`` +------------- + +The ``musllinux`` family of tags is similar to ``manylinux``, but for Linux +platforms that use the musl_ libc rather than glibc (a prime example being Alpine +Linux). The schema is :file:`musllinux_{x}_{y}_{arch}``, supporting musl ``x.y`` and higher +on the architecture ``arch``. + +The musl version values can be obtained by executing the musl libc shared +library the Python interpreter is currently running on, and parsing the output: + +.. code-block:: python + + import re + import subprocess + + def get_musl_major_minor(so: str) -> tuple[int, int] | None: + """Detect musl runtime version. + + Returns a two-tuple ``(major, minor)`` that indicates musl + library's version, or ``None`` if the given libc .so does not + output expected information. + + The libc library should output something like this to stderr:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + proc = subprocess.run([so], stderr=subprocess.PIPE, text=True) + lines = (line.strip() for line in proc.stderr.splitlines()) + lines = [line for line in lines if line] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + match = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if match: + return (int(match.group(1)), int(match.group(2))) + return None + +There are currently two possible ways to find the musl library’s location that a +Python interpreter is running on, either with the system ldd_ command, or by +parsing the ``PT_INTERP`` section’s value from the executable’s ELF_ header. + +.. _macos: + +macOS +----- + +macOS uses the ``macosx`` family of tags (the ``x`` suffix is a historical +artefact of Apple's official macOS naming scheme). The schema for compatibility +tags is :file:`macosx_{x}_{y}_{arch}`, indicating that the wheel is compatible +with macOS ``x.y`` or later on the architecture ``arch``. + +The values of ``x`` and ``y`` correspond to the major and minor version number of +the macOS release, respectively. They must both be positive integers, with the +``x`` value being ``>= 10``. The version number always includes a major *and* +minor version, even if Apple's official version numbering only refers to +the major value. For example, ``macosx_11_0_arm64`` indicates compatibility +with macOS 11 or later. + +macOS binaries can be compiled for a single architecture, or can include support +for multiple architectures in the same binary (sometimes called "fat" binaries). +To indicate support for a single architecture, the value of ``arch`` must match +the value of :py:func:`platform.machine()` on the system. To indicate +support multiple architectures, the ``arch`` tag should be an identifier from +the following list that describes the set of supported architectures: + +============== ======================================== +``arch`` Architectures supported +============== ======================================== +``universal2`` ``arm64``, ``x86_64`` +``universal`` ``i386``, ``ppc``, ``ppc64``, ``x86_64`` +``intel`` ``i386``, ``x86_64`` +``fat`` ``i386``, ``ppc`` +``fat3`` ``i386``, ``ppc``, ``x86_64`` +``fat64`` ``ppc64``, ``x86_64`` +============== ======================================== + +The minimum supported macOS version may also be constrained by architecture. For +example, macOS 11 (Big Sur) was the first release to support arm64. These +additional constraints are enforced transparently by the macOS compilation +toolchain when building binaries that support multiple architectures. + +.. _android: + +Android +------- + +Android uses the schema :file:`android_{apilevel}_{abi}`, indicating +compatibility with the given Android API level or later, on the given ABI. For +example, ``android_27_arm64_v8a`` indicates support for API level 27 or later, +on ``arm64_v8a`` devices. Android makes no distinction between physical devices +and emulated devices. + +The API level should be a positive integer. This is *not* the same thing as +the user-facing Android version. For example, the release known as Android +12 (code named "Snow Cone") uses API level 31 or 32, depending on the specific +Android version in use. Android's release documentation contains the `full list +of Android versions and their corresponding API levels +`__. + +There are 4 `supported ABIs `__. +Normalized according to the rules above, they are: + +* ``armeabi_v7a`` +* ``arm64_v8a`` +* ``x86`` +* ``x86_64`` + +Virtually all current physical devices use one of the ARM architectures. ``x86`` +and ``x86_64`` are supported for use in the emulator. ``x86`` has not been +supported as a development platform since 2020, and no new emulator images have +been released since then. + +.. _ios: + +iOS +--- + +iOS uses the schema :file:`ios_{x}_{y}_{arch}_{sdk}`, indicating compatibility with +iOS ``x.y`` or later, on the ``arch`` architecture, using the ``sdk`` SDK. + +The value of ``x`` and ``y`` correspond to the major and minor version number of +the iOS release, respectively. They must both be positive integers. The version +number always includes a major *and* minor version, even if Apple's official +version numbering only refers to the major value. For example, a +``ios_13_0_arm64_iphonesimulator`` indicates compatibility with iOS 13 or later. + +The value of ``arch`` must match the value of :py:func:`platform.machine()` on +the system. + +The value of ``sdk`` must be either ``iphoneos`` (for physical devices), or +``iphonesimulator`` (for device simulators). These SDKs have the same API +surface, but are incompatible at the binary level, even if they are running on +the same CPU architecture. Code compiled for an arm64 simulator will not run on +an arm64 device. + +The combination of :file:`{arch}_{sdk}` is referred to as the "multiarch". There +are three possible values for multiarch: + +* ``arm64_iphoneos``, for physical iPhone/iPad devices. This includes every + iOS device manufactured since ~2015; +* ``arm64_iphonesimulator``, for simulators running on Apple Silicon macOS + hardware; and +* ``x86_64_iphonesimulator``, for simulators running on x86_64 hardware. + +Use +=== + +The tags are used by installers to decide which built distribution +(if any) to download from a list of potential built distributions. +The installer maintains a list of (pyver, abi, arch) tuples that it +will support. If the built distribution's tag is ``in`` the list, then +it can be installed. + +It is recommended that installers try to choose the most feature complete +built distribution available (the one most specific to the installation +environment) by default before falling back to pure Python versions +published for older Python releases. Installers are also recommended to +provide a way to configure and re-order the list of allowed compatibility +tags; for example, a user might accept only the ``*-none-any`` tags to only +download built packages that advertise themselves as being pure Python. + +Another desirable installer feature might be to include "re-compile from +source if possible" as more preferable than some of the compatible but +legacy pre-built options. + +This example list is for an installer running under CPython 3.3 on a +linux_x86_64 system. It is in order from most-preferred (a distribution +with a compiled extension module, built for the current version of +Python) to least-preferred (a pure-Python distribution built with an +older version of Python): + +1. cp33-cp33m-linux_x86_64 +2. cp33-abi3-linux_x86_64 +3. cp3-abi3-linux_x86_64 +4. cp33-none-linux_x86_64* +5. cp3-none-linux_x86_64* +6. py33-none-linux_x86_64* +7. py3-none-linux_x86_64* +8. cp33-none-any +9. cp3-none-any +10. py33-none-any +11. py3-none-any +12. py32-none-any +13. py31-none-any +14. py30-none-any + +* Built distributions may be platform specific for reasons other than C + extensions, such as by including a native executable invoked as + a subprocess. + +Sometimes there will be more than one supported built distribution for a +particular version of a package. For example, a packager could release +a package tagged ``cp33-abi3-linux_x86_64`` that contains an optional C +extension and the same distribution tagged ``py3-none-any`` that does not. +The index of the tag in the supported tags list breaks the tie, and the +package with the C extension is installed in preference to the package +without because that tag appears first in the list. + +Compressed Tag Sets +=================== + +To allow for compact filenames of bdists that work with more than +one compatibility tag triple, each tag in a filename can instead be a +'.'-separated, sorted, set of tags. For example, pip, a pure-Python +package that is written to run under Python 2 and 3 with the same source +code, could distribute a bdist with the tag ``py2.py3-none-any``. +The full list of simple tags is:: + + for x in pytag.split('.'): + for y in abitag.split('.'): + for z in platformtag.split('.'): + yield '-'.join((x, y, z)) + +A bdist format that implements this scheme should include the expanded +tags in bdist-specific metadata. This compression scheme can generate +large numbers of unsupported tags and "impossible" tags that are supported +by no Python implementation e.g. "cp33-cp31u-win64", so use it sparingly. + +FAQ +=== + +What tags are used by default? + Tools should use the most-preferred architecture dependent tag + e.g. ``cp33-cp33m-win32`` or the most-preferred pure python tag + e.g. ``py33-none-any`` by default. If the packager overrides the + default it indicates that they intended to provide cross-Python + compatibility. + +What tag do I use if my distribution uses a feature exclusive to the newest version of Python? + Compatibility tags aid installers in selecting the *most compatible* + build of a *single version* of a distribution. For example, when + there is no Python 3.3 compatible build of ``beaglevote-1.2.0`` + (it uses a Python 3.4 exclusive feature) it may still use the + ``py3-none-any`` tag instead of the ``py34-none-any`` tag. A Python + 3.3 user must combine other qualifiers, such as a requirement for the + older release ``beaglevote-1.1.0`` that does not use the new feature, + to get a compatible build. + +Why isn't there a ``.`` in the Python version number? + CPython has lasted 20+ years without a 3-digit major release. This + should continue for some time. Other implementations may use _ as + a delimiter, since both - and . delimit the surrounding filename. + +Why normalise hyphens and other non-alphanumeric characters to underscores? + To avoid conflicting with the ``.`` and ``-`` characters that separate + components of the filename, and for better compatibility with the + widest range of filesystem limitations for filenames (including + being usable in URL paths without quoting). + +Why not use special character rather than ``.`` or ``-``? + Either because that character is inconvenient or potentially confusing + in some contexts (for example, ``+`` must be quoted in URLs, ``~`` is + used to denote the user's home directory in POSIX), or because the + advantages weren't sufficiently compelling to justify changing the + existing reference implementation for the wheel format defined in :pep:`427` + (for example, using ``,`` rather than ``.`` to separate components + in a compressed tag). + +Who will maintain the registry of abbreviated implementations? + New two-letter abbreviations can be requested on the python-dev + mailing list. As a rule of thumb, abbreviations are reserved for + the current 4 most prominent implementations. + +Does the compatibility tag go into METADATA or PKG-INFO? + No. The compatibility tag is part of the built distribution's + metadata. METADATA / PKG-INFO should be valid for an entire + distribution, not a single build of that distribution. + +Why didn't you mention my favorite Python implementation? + The abbreviated tags facilitate sharing compiled Python code in a + public index. Your Python implementation can use this specification + too, but with longer tags. + Recall that all "pure Python" built distributions just use ``py``. + +Why is the ABI tag (the second tag) sometimes "none" in the reference implementation? + Since Python 2 does not have an easy way to get to the SOABI + (the concept comes from newer versions of Python 3) the reference + implementation at the time of writing guesses "none". Ideally it + would detect "py27(d|m|u)" analogous to newer versions of Python, + but in the meantime "none" is a good enough way to say "don't know". + + +History +======= + +- February 2013: The original version of this specification was approved through + :pep:`425`. +- January 2016: The ``manylinux1`` tag was approved through :pep:`513`. +- April 2018: The ``manylinux2010`` tag was approved through :pep:`571`. +- July 2019: The ``manylinux2014`` tag was approved through :pep:`599`. +- November 2019: The ``manylinux_x_y`` perennial tag was approved through + :pep:`600`. +- April 2021: The ``musllinux_x_y`` tag was approved through :pep:`656`. +- December 2023: The tags for iOS were approved through :pep:`730`. +- March 2024: The tags for Android were approved through :pep:`738`. + + +.. _musl: https://musl.libc.org +.. _ldd: https://www.man7.org/linux/man-pages/man1/ldd.1.html +.. _elf: https://refspecs.linuxfoundation.org/elf/elf.pdf diff --git a/source/specifications/pylock-toml.rst b/source/specifications/pylock-toml.rst new file mode 100644 index 000000000..d21294cf9 --- /dev/null +++ b/source/specifications/pylock-toml.rst @@ -0,0 +1,842 @@ +.. _pylock-toml-spec: +.. _lock-file-spec: + +============================= +``pylock.toml`` Specification +============================= + +The ``pylock.toml`` file format is for specifying dependencies to enable +reproducible installation in a Python environment. + +.. note:: This specification was originally defined in :pep:`751`. + + +--------- +File Name +--------- + +A lock file MUST be named :file:`pylock.toml` or match the regular expression +``r"^pylock\.([^.]+)\.toml$"`` if a name for the lock file is desired or if +multiple lock files exist (i.e. the regular expression +``r"^pylock\.([^.]+\.)?toml$"`` for any file name). The prefix and suffix of a +named file MUST be lowercase when possible, for easy detection and removal, +e.g.: + +.. code-block:: Python + + if len(filename) > 11 and filename.startswith("pylock.") and filename.endswith(".toml"): + name = filename.removeprefix("pylock.").removesuffix(".toml") + +The expectation is that services that automatically install from lock files will +search for: + +1. The lock file with the service's name and doing the default install +2. A multi-use :file:`pylock.toml` with a dependency group with the name of the service +3. The default install of :file:`pylock.toml` + +E.g. a cloud host service named "spam" would first look for +:file:`pylock.spam.toml` to install from, and if that file didn't exist then install +from :file:`pylock.toml` and look for a dependency group named "spam" to use if +present. + +The lock file(s) SHOULD be located in the directory as appropriate for the scope +of the lock file. Locking against a single :file:`pyproject.toml`, for instance, +would place the :file:`pylock.toml` in the same directory. If the lock file covered +multiple projects in a monorepo, then the expectation is the :file:`pylock.toml` +file would be in the directory that held all the projects being locked. + + +----------- +File Format +----------- + +The format of the file is TOML_. + +Tools SHOULD write their lock files in a consistent way to minimize noise in +diff output. Keys in tables -- including the top-level table -- SHOULD be +recorded in a consistent order (if inspiration is desired, this specification has tried to +write down keys in a logical order). As well, tools SHOULD sort arrays in +consistent order. Usage of inline tables SHOULD also be kept consistent. + + +.. _pylock-lock-version: + +``lock-version`` +================ + +- **Type**: string; value of ``"1.0"`` +- **Required?**: yes +- **Inspiration**: :ref:`core-metadata-metadata-version` +- Record the file format version that the file adheres to. +- This PEP specifies the initial version -- and only valid value until future + updates to the standard change it -- as ``"1.0"``. +- If a tool supports the major version but not the minor version, a tool + SHOULD warn when an unknown key is seen. +- If a tool doesn't support a major version, it MUST raise an error. + + +.. _pylock-environments: + +``environments`` +================ + +- **Type**: Array of strings +- **Required?**: no +- **Inspiration**: uv_ +- A list of :ref:`dependency-specifiers-environment-markers` for + which the lock file is considered compatible with. +- Tools SHOULD write exclusive/non-overlapping environment markers to ease in + understanding. + + +.. _pylock-requires-python: + +``requires-python`` +=================== + +- **Type**: string +- **Required?**: no +- **Inspiration**: PDM_, Poetry_, uv_ +- Specifies the :ref:`core-metadata-requires-python` for the minimum + Python version compatible for any environment supported by the lock file + (i.e. the minimum viable Python version for the lock file). + + +.. _pylock-extras: + +``extras`` +========== + +- **Type**: Array of strings +- **Required?**: no; defaults to ``[]`` +- **Inspiration**: :ref:`core-metadata-provides-extra` +- The list of :ref:`extras ` supported + by this lock file. +- Lockers MAY choose to not support writing lock files that support extras and + dependency groups (i.e. tools may only support exporting a single-use lock + file). +- Tools supporting extras MUST also support dependency groups. +- Tools should explicitly set this key to an empty array to signal that the + inputs used to generate the lock file had no extras (e.g. a + :ref:`pyproject.toml ` file had no + :ref:`[project.optional-dependencies] ` + table), signalling that the lock file is, in effect, multi-use even if it only + looks to be single-use. + + +.. _pylock-dependency-groups: + +``dependency-groups`` +===================== + +- **Type**: Array of strings +- **Required?**: no; defaults to ``[]`` +- **Inspiration**: :ref:`pyproject-tool-table` +- The list of :ref:`dependency-groups` publicly supported by this lock + file (i.e. dependency groups users are expected to be able to specify via a + tool's UI). +- Lockers MAY choose to not support writing lock files that support extras and + dependency groups (i.e. tools may only support exporting a single-use lock + file). +- Tools supporting dependency groups MUST also support extras. +- Tools SHOULD explicitly set this key to an empty array to signal that the + inputs used to generate the lock file had no dependency groups (e.g. a + :ref:`pyproject.toml ` file had no + :ref:`[dependency-groups] ` table), signalling that the + lock file is, in effect, multi-use even if it only looks to be single-use. + + +.. _pylock-default-groups: + +``default-groups`` +================== + +- **Type**: Array of strings +- **Required?**: no; defaults to ``[]`` +- **Inspiration**: Poetry_, PDM_ +- The name of synthetic dependency groups to represent what should be installed + by default (e.g. what + :ref:`[project.dependencies] ` implicitly + represents). +- Meant to be used in situations where :ref:`pylock-packages-marker` + necessitates such a group to exist. +- The groups listed by this key SHOULD NOT be listed in + :ref:`pylock-dependency-groups` as the groups are not meant to be directly + exposed to users by name but instead via an installer's UI. + + +.. _pylock-created-by: + +``created-by`` +============== + +- **Type**: string +- **Required?**: yes +- **Inspiration**: Tools with their name in their lock file name +- Records the name of the tool used to create the lock file. +- Tools MAY use the :ref:`pylock-tool` table to record enough details that it + can be inferred what inputs were used to create the lock file. +- Tools SHOULD record the normalized name of the tool if it is available as a + Python package to facilitate finding the tool. + + +.. _pylock-packages: + +``[[packages]]`` +================ + +- **Type**: array of tables +- **Required?**: yes +- **Inspiration**: PDM_, Poetry_, uv_ +- An array containing all packages that *may* be installed. +- Packages MAY be listed multiple times with varying data, but all packages to + be installed MUST narrow down to a single entry at install time. + + +.. _pylock-packages-name: + +``packages.name`` +----------------- + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`core-metadata-name` +- The name of the package :ref:`normalized `. + + +.. _pylock-packages-version: + +``packages.version`` +-------------------- + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`core-metadata-version` +- The version of the package. +- The version SHOULD be specified when the version is known to be stable + (i.e. when an :ref:`sdist ` or + :ref:`wheels ` are specified). +- The version MUST NOT be included when it cannot be guaranteed to be consistent + with the code used (i.e. when a + :ref:`source tree ` is used). + + +.. _pylock-packages-marker: + +``packages.marker`` +------------------- + +- **Type**: string +- **Required?**: no +- **Inspiration**: PDM_ +- The + :ref:`environment marker ` + which specify when the package should be installed. + + +.. _pylock-packages-requires-python: + +``packages.requires-python`` +---------------------------- + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`core-metadata-requires-python` +- Holds the :ref:`version-specifiers` for Python version compatibility + for the package. + + +.. _pylock-packages-dependencies: + +``[[packages.dependencies]]`` +----------------------------- + +- **Type**: array of tables +- **Required?**: no +- **Inspiration**: PDM_, Poetry_, uv_ +- Records the other entries in :ref:`pylock-packages` which are direct + dependencies of this package. +- Each entry is a table which contains the minimum information required to tell + which other package entry it corresponds to where doing a key-by-key + comparison would find the appropriate package with no ambiguity (e.g. if there + are two entries for the ``spam`` package, then you can include the version + number like ``{name = "spam", version = "1.0.0"}``, or by source like + ``{name = "spam", vcs = { url = "..."}``). +- Tools MUST NOT use this information when doing installation; it is purely + informational for auditing purposes. + + +.. _pylock-packages-vcs: + +``[packages.vcs]`` +------------------ + +- **Type**: table +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-directory`, + :ref:`pylock-packages-archive`, :ref:`pylock-packages-sdist`, and + :ref:`pylock-packages-wheels` +- **Inspiration**: :ref:`direct-url-data-structure` +- Record the version control system details for the + :ref:`source tree ` it + contains. +- Tools MAY choose to not support version control systems, both from a locking + and/or installation perspective. +- Tools MAY choose to only support a subset of the available VCS types. +- Tools SHOULD provide a way for users to opt in/out of using version control + systems. +- Installation from a version control system is considered originating from a + :ref:`direct URL reference `. + + +.. _pylock-packages-vcs-type: + +``packages.vcs.type`` +''''''''''''''''''''' + +- **Type**: string; supported values specified in + :ref:`direct-url-data-structure-registered-vcs` +- **Required?**: yes +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The type of version control system used. + + +.. _pylock-packages-vcs-url: + +``packages.vcs.url`` +'''''''''''''''''''' + +- **Type**: string +- **Required?**: if :ref:`pylock-packages-vcs-path` is not specified +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The URL_ to the source tree. + + +.. _pylock-packages-vcs-path: + +``packages.vcs.path`` +''''''''''''''''''''' + +- **Type**: string +- **Required?**: if :ref:`pylock-packages-vcs-url` is not specified +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The path to the local directory of the source tree. +- If a relative path is used it MUST be relative to the location of this file. +- If the path is relative it MAY use POSIX-style path separators explicitly + for portability. + + +.. _pylock-packages-vcs-requested-revision: + +``packages.vcs.requested-revision`` +''''''''''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The branch/tag/ref/commit/revision/etc. that the user requested. +- This is purely informational and to facilitate writing the + :ref:`direct-url-data-structure`; it MUST NOT be used to checkout + the repository. + + +.. _pylock-packages-vcs-commit-id: + +``packages.vcs.commit-id`` +'''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The exact commit/revision number that is to be installed. +- If the VCS supports commit-hash based revision identifiers, such a + commit-hash, it MUST be used as the commit ID in order to reference an + immutable version of the source code. + + +.. _pylock-packages-vcs-subdirectory: + +``packages.vcs.subdirectory`` +''''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`direct-url-data-structure-subdirectories` +- The subdirectory within the + :ref:`source tree ` where + the project root of the project is (e.g. the location of the + :ref:`pyproject.toml ` file). +- The path MUST be relative to the root of the source tree structure. + + +.. _pylock-packages-directory: + +``[packages.directory]`` +------------------------ + +- **Type**: table +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-vcs`, + :ref:`pylock-packages-archive`, :ref:`pylock-packages-sdist`, and + :ref:`pylock-packages-wheels` +- **Inspiration**: :ref:`direct-url-data-structure-local-directory` +- Record the local directory details for the + :ref:`source tree ` it + contains. +- Tools MAY choose to not support local directories, both from a locking + and/or installation perspective. +- Tools SHOULD provide a way for users to opt in/out of using local directories. +- Installation from a directory is considered originating from a + :ref:`direct URL reference `. + + +.. _pylock-packages-directory-path: + +``packages.directory.path`` +''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`direct-url-data-structure-local-directory` +- The local directory where the source tree is. +- If the path is relative it MUST be relative to the location of the lock file. +- If the path is relative it MAY use POSIX-style path separators for + portability. + + +.. _pylock-packages-directory-editable: + +``packages.directory.editable`` +''''''''''''''''''''''''''''''' + +- **Type**: boolean +- **Required?**: no; defaults to ``false`` +- **Inspiration**: :ref:`direct-url-data-structure-local-directory` +- A flag representing whether the source tree was an editable install at lock + time. +- An installer MAY choose to ignore this flag if user actions or context would + make an editable install unnecessary or undesirable (e.g. a container image + that will not be mounted for development purposes but instead deployed to + production where it would be treated at read-only). + + +.. _pylock-packages-directory-subdirectory: + +``packages.directory.subdirectory`` +''''''''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-subdirectory`. + + +.. _pylock-packages-archive: + +``[packages.archive]`` +---------------------- + +- **Type**: table +- **Required?**: no +- **Inspiration**: :ref:`direct-url-data-structure-archive` +- A direct reference to an archive file to install from + (this can include wheels and sdists, as well as other archive formats + containing a source tree). +- Tools MAY choose to not support archive files, both from a locking + and/or installation perspective. +- Tools SHOULD provide a way for users to opt in/out of using archive files. +- Installation from an archive file is considered originating from a + :ref:`direct URL reference `. + + +.. _pylock-packages-archive-url: + +``packages.archive.url`` +'''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-url`. + + +.. _pylock-packages-archive-path: + +``packages.archive.path`` +''''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-path`. + + +.. _pylock-packages-archive-size: + +``packages.archive.size`` +''''''''''''''''''''''''' + +- **Type**: integer +- **Required?**: no +- **Inspiration**: uv_, :ref:`simple-repository-api` +- The size of the archive file. +- Tools SHOULD provide the file size when reasonably possible (e.g. the file + size is available via the Content-Length_ header from a HEAD_ HTTP request). + + +.. _pylock-packages-archive-upload-time: + +``packages.archive.upload-time`` +'''''''''''''''''''''''''''''''' + +- **Type**: datetime +- **Required?**: no +- **Inspiration**: :ref:`simple-repository-api` +- The time the file was uploaded. +- The date and time MUST be recorded in UTC. + + +.. _pylock-packages-archive-hashes: + +``[packages.archive.hashes]`` +''''''''''''''''''''''''''''' + +- **Type**: Table of strings +- **Required?**: yes +- **Inspiration**: PDM_, Poetry_, uv_, :ref:`simple-repository-api` +- A table listing known hash values of the file where the key is the hash + algorithm and the value is the hash value. +- The table MUST contain at least one entry. +- Hash algorithm keys SHOULD be lowercase. +- At least one secure algorithm from :py:data:`hashlib.algorithms_guaranteed` + SHOULD always be included (at time of writing, sha256 specifically is + recommended. + + +.. _pylock-packages-archive-subdirectory: + +``packages.archive.subdirectory`` +'''''''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-subdirectory`. + + +.. _pylock-packages-index: + +``packages.index`` +------------------ + +- **Type**: string +- **Required?**: no +- **Inspiration**: uv_ +- The base URL for the package index from :ref:`simple-repository-api` + where the sdist and/or wheels were found (e.g. ``https://pypi.org/simple/``). +- When possible, this SHOULD be specified to assist with generating + `software bill of materials`_ -- aka SBOMs -- and to assist in finding a file + if a URL ceases to be valid. +- Tools MAY support installing from an index if the URL recorded for a specific + file is no longer valid (e.g. returns a 404 HTTP error code). + + +.. _pylock-packages-sdist: + +``[packages.sdist]`` +-------------------- + +- **Type**: table +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-vcs`, + :ref:`pylock-packages-directory`, and :ref:`pylock-packages-archive` +- **Inspiration**: uv_ +- Details of a :ref:`source-distribution-format-sdist` for the + package. +- Tools MAY choose to not support sdist files, both from a locking + and/or installation perspective. +- Tools SHOULD provide a way for users to opt in/out of using sdist files. + + +.. _pylock-packages-sdist-name: + +``packages.sdist.name`` +''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no, not when the last component of + :ref:`pylock-packages-sdist-path`/ :ref:`pylock-packages-sdist-url` would be + the same value +- **Inspiration**: PDM_, Poetry_, uv_ +- The file name of the :ref:`source-distribution-format-sdist` file. + + +.. _pylock-packages-sdist-upload-time: + +``packages.sdist.upload-time`` +'''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-upload-time`. + + +.. _pylock-packages-sdist-url: + +``packages.sdist.url`` +'''''''''''''''''''''' + +See :ref:`pylock-packages-archive-url`. + + +.. _pylock-packages-sdist-path: + +``packages.sdist.path`` +''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-path`. + + +.. _pylock-packages-sdist-size: + +``packages.sdist.size`` +''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-size`. + + +.. _pylock-packages-sdist-hashes: + +``packages.sdist.hashes`` +''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-hashes`. + + + +.. _pylock-packages-wheels: + +``[[packages.wheels]]`` +----------------------- + +- **Type**: array of tables +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-vcs`, + :ref:`pylock-packages-directory`, and :ref:`pylock-packages-archive` +- **Inspiration**: PDM_, Poetry_, uv_ +- For recording the wheel files as specified by + :ref:`binary-distribution-format` for the package. +- Tools MUST support wheel files, both from a locking and installation + perspective. + + +.. _pylock-packages-wheels-name: + +``packages.wheels.name`` +'''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no, not when the last component of + :ref:`pylock-packages-wheels-path`/ :ref:`pylock-packages-wheels-url` would be + the same value +- **Inspiration**: PDM_, Poetry_, uv_ +- The file name of the :ref:`binary-distribution-format` file. + + +.. _pylock-packages-wheels-upload-time: + +``packages.wheels.upload-time`` +''''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-upload-time`. + + +.. _pylock-packages-wheels-url: + +``packages.wheels.url`` +''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-url`. + + +.. _pylock-packages-wheels-path: + +``packages.wheels.path`` +'''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-path`. + + +.. _pylock-packages-wheels-size: + +``packages.wheels.size`` +'''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-size`. + + +.. _pylock-packages-wheels-hashes: + +``packages.wheels.hashes`` +'''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-hashes`. + + +.. _pylock-packages-attestation-identities: + +``[[packages.attestation-identities]]`` +--------------------------------------- + +- **Type**: array of tables +- **Required?**: no +- **Inspiration**: :ref:` provenance-object` +- A recording of the attestations for **any** file recorded for this package. +- If available, tools SHOULD include the attestation identities found. +- Publisher-specific keys are to be included in the table as-is + (i.e. top-level), following the spec at + :ref:` index-hosted-attestations`. + + +.. _pylock-packages-attestation-identities-kind: + +``packages.attestation-identities.kind`` +'''''''''''''''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:` provenance-object` +- The unique identity of the Trusted Publisher. + + +.. _pylock-packages-tool: + +``[packages.tool]`` +------------------- + +- **Type**: table +- **Required?**: no +- **Inspiration**: :ref:` pyproject-tool-table` +- Similar usage as that of the :ref:`pylock-tool` table from the + :ref:` pyproject-toml-spec`, but at the package version level instead + of at the lock file level (which is also available via :ref:`pylock-tool`). +- Data recorded in the table MUST be disposable (i.e. it MUST NOT affect + installation). + + +.. _pylock-tool: + +``[tool]`` +========== + +- **Type**: table +- **Required?**: no +- **Inspiration**: :ref:`pyproject-tool-table` +- See :ref:`pylock-packages-tool`. + + +------- +Example +------- + +.. literalinclude:: pylock-toml/pylock.example.toml + + +------------ +Installation +------------ + +The following outlines the steps to be taken to install from a lock file +(while the requirements are prescriptive, the general steps and order are +a suggestion): + +#. Gather the extras and dependency groups to install and set ``extras`` and + ``dependency_groups`` for marker evaluation, respectively. + + #. ``extras`` SHOULD be set to the empty set by default. + #. ``dependency_groups`` SHOULD be the set created from + :ref:`pylock-default-groups` by default. + +#. Check if the metadata version specified by :ref:`pylock-lock-version` is + supported; an error or warning MUST be raised as appropriate. +#. If :ref:`pylock-requires-python` is specified, check that the environment + being installed for meets the requirement; an error MUST be raised if it is + not met. +#. If :ref:`pylock-environments` is specified, check that at least one of the + environment marker expressions is satisfied; an error MUST be raised if no + expression is satisfied. +#. For each package listed in :ref:`pylock-packages`: + + #. If :ref:`pylock-packages-marker` is specified, check if it is satisfied; + if it isn't, skip to the next package. + #. If :ref:`pylock-packages-requires-python` is specified, check if it is + satisfied; an error MUST be raised if it isn't. + #. Check that no other conflicting instance of the package has been slated to + be installed; an error about the ambiguity MUST be raised otherwise. + #. Check that the source of the package is specified appropriately (i.e. + there are no conflicting sources in the package entry); + an error MUST be raised if any issues are found. + #. Add the package to the set of packages to install. + +#. For each package to be installed: + + - If :ref:`pylock-packages-vcs` is set: + + #. Clone the repository to the commit ID specified in + :ref:`pylock-packages-vcs-commit-id`. + #. :ref:`Build ` the package, + respecting :ref:`pylock-packages-vcs-subdirectory`. + #. :ref:`Install `. + + - Else if :ref:`pylock-packages-directory` is set: + + #. :ref:`Build ` the package, + respecting :ref:`pylock-packages-directory-subdirectory`. + #. :ref:`Install `. + + - Else if :ref:`pylock-packages-archive` is set: + + #. Get the file. + #. Validate using :ref:`pylock-packages-archive-size` and + :ref:`pylock-packages-archive-hashes`. + #. :ref:`Build ` the package, + respecting :ref:`pylock-packages-archive-subdirectory`. + #. :ref:`Install `. + + - Else if there are entries for :ref:`pylock-packages-wheels`: + + #. Look for the appropriate wheel file based on + :ref:`pylock-packages-wheels-name`; if one is not found then move on to + :ref:`pylock-packages-sdist` or an error MUST be raised about a + lack of source for the project. + #. Get the file: + + - If :ref:`pylock-packages-wheels-path` is set, use it. + - Else if :ref:`pylock-packages-wheels-url` is set, try to use it; + optionally tools MAY use :ref:`pylock-packages-index` or some + tool-specific mechanism to download the selected wheel file (tools + MUST NOT try to change what wheel file to download based on what's + available; what file to install should be determined in an offline + fashion for reproducibility). + + #. Validate using :ref:`pylock-packages-wheels-size` and + :ref:`pylock-packages-wheels-hashes`. + #. :ref:`Install `. + + - Else if no :ref:`pylock-packages-wheels` file is found or + :ref:`pylock-packages-sdist` is solely set: + + #. Get the file. + + - If :ref:`pylock-packages-sdist-path` is set, use it. + - Else if :ref:`pylock-packages-sdist-url` is set, try to use it; tools + MAY use :ref:`pylock-packages-index` or some tool-specific mechanism + to download the file. + + #. Validate using :ref:`pylock-packages-sdist-size` and + :ref:`pylock-packages-sdist-hashes`. + #. :ref:`Build ` the package. + #. :ref:`Install `. + + +------- +History +------- + +- April 2025: Initial version, approved via :pep:`751`. + + +.. _Content-Length: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Length +.. _Dependabot: https://docs.github.com/en/code-security/dependabot +.. _HEAD: https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD +.. _PDM: https://pypi.org/project/pdm/ +.. _pip-tools: https://pypi.org/project/pip-tools/ +.. _Poetry: https://pypi.org/project/poetry/ +.. _requirements file: +.. _requirements files: https://pip.pypa.io/en/stable/reference/requirements-file-format/ +.. _software bill of materials: https://www.cisa.gov/sbom +.. _TOML: https://toml.io/ +.. _uv: https://pypi.org/project/uv/ +.. _URL: https://url.spec.whatwg.org/ diff --git a/source/specifications/pylock-toml/pylock.example.toml b/source/specifications/pylock-toml/pylock.example.toml new file mode 100644 index 000000000..45e8731b2 --- /dev/null +++ b/source/specifications/pylock-toml/pylock.example.toml @@ -0,0 +1,60 @@ +lock-version = '1.0' +environments = ["sys_platform == 'win32'", "sys_platform == 'linux'"] +requires-python = '== 3.12' +created-by = 'mousebender' + +[[packages]] +name = 'attrs' +version = '25.1.0' +requires-python = '>= 3.8' + + [[packages.wheels]] + name = 'attrs-25.1.0-py3-none-any.whl' + upload-time = 2025-01-25T11:30:10.164985+00:00 + url = 'https://files.pythonhosted.org/packages/fc/30/d4986a882011f9df997a55e6becd864812ccfcd821d64aac8570ee39f719/attrs-25.1.0-py3-none-any.whl' + size = 63152 + hashes = {sha256 = 'c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a'} + + [[packages.attestation-identities]] + environment = 'release-pypi' + kind = 'GitHub' + repository = 'python-attrs/attrs' + workflow = 'pypi-package.yml' + +[[packages]] +name = 'cattrs' +version = '24.1.2' +requires-python = '>= 3.8' +dependencies = [ + {name = 'attrs'}, +] + + [[packages.wheels]] + name = 'cattrs-24.1.2-py3-none-any.whl' + upload-time = 2024-09-22T14:58:34.812643+00:00 + url = 'https://files.pythonhosted.org/packages/c8/d5/867e75361fc45f6de75fe277dd085627a9db5ebb511a87f27dc1396b5351/cattrs-24.1.2-py3-none-any.whl' + size = 66446 + hashes = {sha256 = '67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0'} + +[[packages]] +name = 'numpy' +version = '2.2.3' +requires-python = '>= 3.10' + + [[packages.wheels]] + name = 'numpy-2.2.3-cp312-cp312-win_amd64.whl' + upload-time = 2025-02-13T16:51:21.821880+00:00 + url = 'https://files.pythonhosted.org/packages/42/6e/55580a538116d16ae7c9aa17d4edd56e83f42126cb1dfe7a684da7925d2c/numpy-2.2.3-cp312-cp312-win_amd64.whl' + size = 12626357 + hashes = {sha256 = '83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d'} + + [[packages.wheels]] + name = 'numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl' + upload-time = 2025-02-13T16:50:00.079662+00:00 + url = 'https://files.pythonhosted.org/packages/39/04/78d2e7402fb479d893953fb78fa7045f7deb635ec095b6b4f0260223091a/numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl' + size = 16116679 + hashes = {sha256 = '3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe'} + +[tool.mousebender] +command = ['.', 'lock', '--platform', 'cpython3.12-windows-x64', '--platform', 'cpython3.12-manylinux2014-x64', 'cattrs', 'numpy'] +run-on = 2025-03-06T12:28:57.760769 diff --git a/source/specifications/pypirc.rst b/source/specifications/pypirc.rst new file mode 100644 index 000000000..aeba72b0d --- /dev/null +++ b/source/specifications/pypirc.rst @@ -0,0 +1,138 @@ + +.. _pypirc: + +======================== +The :file:`.pypirc` file +======================== + +A :file:`.pypirc` file allows you to define the configuration for :term:`package +indexes ` (referred to here as "repositories"), so that you don't +have to enter the URL, username, or password whenever you upload a package with +:ref:`twine` or :ref:`flit`. + +The format (originally defined by the :ref:`distutils` package) is: + +.. code-block:: ini + + [distutils] + index-servers = + first-repository + second-repository + + [first-repository] + repository = + username = + password = + + [second-repository] + repository = + username = + password = + +The ``distutils`` section defines an ``index-servers`` field that lists the +name of all sections describing a repository. + +Each section describing a repository defines three fields: + +- ``repository``: The URL of the repository. +- ``username``: The registered username on the repository. +- ``password``: The password that will used to authenticate the username. + +.. warning:: + + Be aware that this stores your password in plain text. For better security, + consider an alternative like `keyring`_, setting environment variables, or + providing the password on the command line. + + Otherwise, set the permissions on :file:`.pypirc` so that only you can view + or modify it. For example, on Linux or macOS, run: + + .. code-block:: bash + + chmod 600 ~/.pypirc + +.. _keyring: https://pypi.org/project/keyring/ + +Common configurations +===================== + +.. note:: + + These examples apply to :ref:`twine`. Other projects (e.g. :ref:`flit`) also use + :file:`.pypirc`, but with different defaults. Please refer to each project's + documentation for more details and usage instructions. + +Twine's default configuration mimics a :file:`.pypirc` with repository sections +for PyPI and TestPyPI: + +.. code-block:: ini + + [distutils] + index-servers = + pypi + testpypi + + [pypi] + repository = https://upload.pypi.org/legacy/ + + [testpypi] + repository = https://test.pypi.org/legacy/ + +Twine will add additional configuration from :file:`$HOME/.pypirc`, the command +line, and environment variables to this default configuration. + +Using a PyPI token +------------------ + +To set your `API token`_ for PyPI, you can create a :file:`$HOME/.pypirc` +similar to: + +.. code-block:: ini + + [pypi] + username = __token__ + password = + +For :ref:`TestPyPI `, add a ``[testpypi]`` section, using the +API token from your TestPyPI account. + +.. _API token: https://pypi.org/help/#apitoken + +Using another package index +--------------------------- + +To configure an additional repository, you'll need to redefine the +``index-servers`` field to include the repository name. Here is a complete +example of a :file:`$HOME/.pypirc` for PyPI, TestPyPI, and a private repository: + +.. code-block:: ini + + [distutils] + index-servers = + pypi + testpypi + private-repository + + [pypi] + username = __token__ + password = + + [testpypi] + username = __token__ + password = + + [private-repository] + repository = + username = + password = + +.. warning:: + + Instead of using the ``password`` field, consider saving your API tokens + and passwords securely using `keyring`_ (which is installed by Twine): + + .. code-block:: bash + + keyring set https://upload.pypi.org/legacy/ __token__ + keyring set https://test.pypi.org/legacy/ __token__ + keyring set diff --git a/source/specifications/pyproject-toml.rst b/source/specifications/pyproject-toml.rst new file mode 100644 index 000000000..4ce9b7484 --- /dev/null +++ b/source/specifications/pyproject-toml.rst @@ -0,0 +1,544 @@ +.. _declaring-project-metadata: +.. _pyproject-toml-spec: + +================================ +``pyproject.toml`` specification +================================ + +.. warning:: + + This is a **technical, formal specification**. For a gentle, + user-friendly guide to ``pyproject.toml``, see + :ref:`writing-pyproject-toml`. + +The ``pyproject.toml`` file acts as a configuration file for packaging-related +tools (as well as other tools). + +.. note:: This specification was originally defined in :pep:`518` and :pep:`621`. + +The ``pyproject.toml`` file is written in `TOML `_. Three +tables are currently specified, namely +:ref:`[build-system] `, +:ref:`[project] ` and +:ref:`[tool] `. Other tables are reserved for future +use (tool-specific configuration should use the ``[tool]`` table). + +.. _pyproject-build-system-table: + +Declaring build system dependencies: the ``[build-system]`` table +================================================================= + +The ``[build-system]`` table declares any Python level dependencies that +must be installed in order to run the project's build system +successfully. + +.. TODO: merge with PEP 517 + +The ``[build-system]`` table is used to store build-related data. +Initially, only one key of the table is valid and is mandatory +for the table: ``requires``. This key must have a value of a list +of strings representing dependencies required to execute the +build system. The strings in this list follow the :ref:`version specifier +specification `. + +An example ``[build-system]`` table for a project built with +``setuptools`` is: + +.. code-block:: toml + + [build-system] + # Minimum requirements for the build system to execute. + requires = ["setuptools"] + +Build tools are expected to use the example configuration file above as +their default semantics when a ``pyproject.toml`` file is not present. + +Tools should not require the existence of the ``[build-system]`` table. +A ``pyproject.toml`` file may be used to store configuration details +other than build-related data and thus lack a ``[build-system]`` table +legitimately. If the file exists but is lacking the ``[build-system]`` +table then the default values as specified above should be used. +If the table is specified but is missing required fields then the tool +should consider it an error. + + +To provide a type-specific representation of the resulting data from +the TOML file for illustrative purposes only, the following +`JSON Schema `_ would match the data format: + +.. code-block:: json + + { + "$schema": "http://json-schema.org/schema#", + + "type": "object", + "additionalProperties": false, + + "properties": { + "build-system": { + "type": "object", + "additionalProperties": false, + + "properties": { + "requires": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": ["requires"] + }, + + "tool": { + "type": "object" + } + } + } + + +.. _pyproject-project-table: + +Declaring project metadata: the ``[project]`` table +=================================================== + +The ``[project]`` table specifies the project's :ref:`core metadata `. + +There are two kinds of metadata: *static* and *dynamic*. Static +metadata is specified in the ``pyproject.toml`` file directly and +cannot be specified or changed by a tool (this includes data +*referred* to by the metadata, e.g. the contents of files referenced +by the metadata). Dynamic metadata is listed via the ``dynamic`` key +(defined later in this specification) and represents metadata that a +tool will later provide. + +The lack of a ``[project]`` table implicitly means the :term:`build backend ` +will dynamically provide all keys. + +The only keys required to be statically defined are: + +- ``name`` + +The keys which are required but may be specified *either* statically +or listed as dynamic are: + +- ``version`` + +All other keys are considered optional and may be specified +statically, listed as dynamic, or left unspecified. + +The complete list of keys allowed in the ``[project]`` table are: + +- ``authors`` +- ``classifiers`` +- ``dependencies`` +- ``description`` +- ``dynamic`` +- ``entry-points`` +- ``gui-scripts`` +- ``keywords`` +- ``license`` +- ``license-files`` +- ``maintainers`` +- ``name`` +- ``optional-dependencies`` +- ``readme`` +- ``requires-python`` +- ``scripts`` +- ``urls`` +- ``version`` + + +.. _pyproject-toml-name: + +``name`` +-------- + +- TOML_ type: string +- Corresponding :ref:`core metadata ` field: + :ref:`Name ` + +The name of the project. + +Tools SHOULD :ref:`normalize ` this name, as soon +as it is read for internal consistency. + + +.. _pyproject-toml-version: + +``version`` +----------- + +- TOML_ type: string +- Corresponding :ref:`core metadata ` field: + :ref:`Version ` + +The version of the project, as defined in the +:ref:`Version specifier specification `. + +Users SHOULD prefer to specify already-normalized versions. + + +.. _pyproject-toml-description: + +``description`` +--------------- + +- TOML_ type: string +- Corresponding :ref:`core metadata ` field: + :ref:`Summary ` + +The summary description of the project in one line. Tools MAY error +if this includes multiple lines. + + +.. _pyproject-toml-readme: + +``readme`` +---------- + +- TOML_ type: string or table +- Corresponding :ref:`core metadata ` field: + :ref:`Description ` and + :ref:`Description-Content-Type ` + +The full description of the project (i.e. the README). + +The key accepts either a string or a table. If it is a string then +it is a path relative to ``pyproject.toml`` to a text file containing +the full description. Tools MUST assume the file's encoding is UTF-8. +If the file path ends in a case-insensitive ``.md`` suffix, then tools +MUST assume the content-type is ``text/markdown``. If the file path +ends in a case-insensitive ``.rst``, then tools MUST assume the +content-type is ``text/x-rst``. If a tool recognizes more extensions +than this PEP, they MAY infer the content-type for the user without +specifying this key as ``dynamic``. For all unrecognized suffixes +when a content-type is not provided, tools MUST raise an error. + +The ``readme`` key may also take a table. The ``file`` key has a +string value representing a path relative to ``pyproject.toml`` to a +file containing the full description. The ``text`` key has a string +value which is the full description. These keys are +mutually-exclusive, thus tools MUST raise an error if the metadata +specifies both keys. + +A table specified in the ``readme`` key also has a ``content-type`` +key which takes a string specifying the content-type of the full +description. A tool MUST raise an error if the metadata does not +specify this key in the table. If the metadata does not specify the +``charset`` parameter, then it is assumed to be UTF-8. Tools MAY +support other encodings if they choose to. Tools MAY support +alternative content-types which they can transform to a content-type +as supported by the :ref:`core metadata `. Otherwise +tools MUST raise an error for unsupported content-types. + + +.. _pyproject-toml-requires-python: + +``requires-python`` +------------------- + +- TOML_ type: string +- Corresponding :ref:`core metadata ` field: + :ref:`Requires-Python ` + +The Python version requirements of the project. + + +.. _pyproject-toml-license: + +``license`` +----------- + +- TOML_ type: string +- Corresponding :ref:`core metadata ` field: + :ref:`License-Expression ` + +Text string that is a valid SPDX +:term:`license expression `, +as specified in :doc:`/specifications/license-expression`. +Tools SHOULD validate and perform case normalization of the expression. + +Legacy specification +'''''''''''''''''''' + +- TOML_ type: table +- Corresponding :ref:`core metadata ` field: + :ref:`License ` + +The table may have one of two keys. The ``file`` key has a string +value that is a file path relative to :file:`pyproject.toml` to the file +which contains the license for the project. Tools MUST assume the +file's encoding is UTF-8. The ``text`` key has a string value which is +the license of the project. These keys are mutually exclusive, so a +tool MUST raise an error if the metadata specifies both keys. + +The table subkeys were deprecated by :pep:`639` in favor of the string value. + +.. _pyproject-toml-license-files: + +``license-files`` +----------------- + +- TOML_ type: array of strings +- Corresponding :ref:`core metadata ` field: + :ref:`License-File ` + +An array specifying paths in the project source tree relative to the project +root directory (i.e. directory containing :file:`pyproject.toml` or legacy project +configuration files, e.g. :file:`setup.py`, :file:`setup.cfg`, etc.) +to file(s) containing licenses and other legal notices to be +distributed with the package. + +The strings MUST contain valid glob patterns, as specified in +:doc:`/specifications/glob-patterns`. + +Patterns are relative to the directory containing :file:`pyproject.toml`, + +Tools MUST assume that license file content is valid UTF-8 encoded text, +and SHOULD validate this and raise an error if it is not. + +Build tools: + +- MUST include all files matched by a listed pattern in all distribution + archives. +- MUST list each matched file path under a License-File field in the + Core Metadata. + +If the ``license-files`` key is present and +is set to a value of an empty array, then tools MUST NOT include any +license files and MUST NOT raise an error. +If the ``license-files`` key is not defined, tools can decide how to handle +license files. For example they can choose not to include any files or use +their own logic to discover the appropriate files in the distribution. + + +.. _pyproject-toml-authors: +.. _pyproject-toml-maintainers: + +``authors``/``maintainers`` +--------------------------- + +- TOML_ type: Array of inline tables with string keys and values +- Corresponding :ref:`core metadata ` field: + :ref:`Author `, + :ref:`Author-email `, + :ref:`Maintainer `, and + :ref:`Maintainer-email ` + +The people or organizations considered to be the "authors" of the +project. The exact meaning is open to interpretation — it may list the +original or primary authors, current maintainers, or owners of the +package. + +The "maintainers" key is similar to "authors" in that its exact +meaning is open to interpretation. + +These keys accept an array of tables with 2 keys: ``name`` and +``email``. Both values must be strings. The ``name`` value MUST be a +valid email name (i.e. whatever can be put as a name, before an email, +in :rfc:`822`) and not contain commas. The ``email`` value MUST be a +valid email address. Both keys are optional, but at least one of the +keys must be specified in the table. + +Using the data to fill in :ref:`core metadata ` is as +follows: + +1. If only ``name`` is provided, the value goes in + :ref:`Author ` or + :ref:`Maintainer ` as appropriate. +2. If only ``email`` is provided, the value goes in + :ref:`Author-email ` or + :ref:`Maintainer-email ` + as appropriate. +3. If both ``email`` and ``name`` are provided, the value goes in + :ref:`Author-email ` or + :ref:`Maintainer-email ` + as appropriate, with the format ``{name} <{email}>``. +4. Multiple values should be separated by commas. + + +.. _pyproject-toml-keywords: + +``keywords`` +------------ + +- TOML_ type: array of strings +- Corresponding :ref:`core metadata ` field: + :ref:`Keywords ` + +The keywords for the project. + + +.. _pyproject-toml-classifiers: + +``classifiers`` +--------------- + +- TOML_ type: array of strings +- Corresponding :ref:`core metadata ` field: + :ref:`Classifier ` + +Trove classifiers which apply to the project. + +The use of ``License ::`` classifiers is deprecated and tools MAY issue a +warning informing users about that. +Build tools MAY raise an error if both the ``license`` string value +(translating to ``License-Expression`` metadata field) and the ``License ::`` +classifiers are used. + + +.. _pyproject-toml-urls: + +``urls`` +-------- + +- TOML_ type: table with keys and values of strings +- Corresponding :ref:`core metadata ` field: + :ref:`Project-URL ` + +A table of URLs where the key is the URL label and the value is the +URL itself. See :ref:`well-known-project-urls` for normalization rules +and well-known rules when processing metadata for presentation. + + +.. _pyproject-toml-scripts: +.. _pyproject-toml-gui-scripts: +.. _pyproject-toml-entry-points: + +Entry points +------------ + +- TOML_ type: table (``[project.scripts]``, ``[project.gui-scripts]``, + and ``[project.entry-points]``) +- :ref:`Entry points specification ` + +There are three tables related to entry points. The +``[project.scripts]`` table corresponds to the ``console_scripts`` +group in the :ref:`entry points specification `. The key +of the table is the name of the entry point and the value is the +object reference. + +The ``[project.gui-scripts]`` table corresponds to the ``gui_scripts`` +group in the :ref:`entry points specification `. Its +format is the same as ``[project.scripts]``. + +The ``[project.entry-points]`` table is a collection of tables. Each +sub-table's name is an entry point group. The key and value semantics +are the same as ``[project.scripts]``. Users MUST NOT create +nested sub-tables but instead keep the entry point groups to only one +level deep. + +Build back-ends MUST raise an error if the metadata defines a +``[project.entry-points.console_scripts]`` or +``[project.entry-points.gui_scripts]`` table, as they would +be ambiguous in the face of ``[project.scripts]`` and +``[project.gui-scripts]``, respectively. + + +.. _pyproject-toml-dependencies: +.. _pyproject-toml-optional-dependencies: + +``dependencies``/``optional-dependencies`` +------------------------------------------ + +- TOML_ type: Array of :pep:`508` strings (``dependencies``), and a + table with values of arrays of :pep:`508` strings + (``optional-dependencies``) +- Corresponding :ref:`core metadata ` field: + :ref:`Requires-Dist ` and + :ref:`Provides-Extra ` + +The (optional) dependencies of the project. + +For ``dependencies``, it is a key whose value is an array of strings. +Each string represents a dependency of the project and MUST be +formatted as a valid :pep:`508` string. Each string maps directly to +a :ref:`Requires-Dist ` entry. + +For ``optional-dependencies``, it is a table where each key specifies +an extra and whose value is an array of strings. The strings of the +arrays must be valid :pep:`508` strings. The keys MUST be valid values +for :ref:`Provides-Extra `. Each value +in the array thus becomes a corresponding +:ref:`Requires-Dist ` entry for the +matching :ref:`Provides-Extra ` +metadata. + + +.. _pyproject-toml-dynamic: +.. _declaring-project-metadata-dynamic: + +``dynamic`` +----------- + +- TOML_ type: array of string +- Corresponding :ref:`core metadata ` field: + :ref:`Dynamic ` + +Specifies which keys listed by this PEP were intentionally +unspecified so another tool can/will provide such metadata +dynamically. This clearly delineates which metadata is purposefully +unspecified and expected to stay unspecified compared to being +provided via tooling later on. + +- A build back-end MUST honour statically-specified metadata (which + means the metadata did not list the key in ``dynamic``). +- A build back-end MUST raise an error if the metadata specifies + ``name`` in ``dynamic``. +- If the :ref:`core metadata ` specification lists a + field as "Required", then the metadata MUST specify the key + statically or list it in ``dynamic`` (build back-ends MUST raise an + error otherwise, i.e. it should not be possible for a required key + to not be listed somehow in the ``[project]`` table). +- If the :ref:`core metadata ` specification lists a + field as "Optional", the metadata MAY list it in ``dynamic`` if the + expectation is a build back-end will provide the data for the key + later. +- Build back-ends MUST raise an error if the metadata specifies a + key statically as well as being listed in ``dynamic``. +- If the metadata does not list a key in ``dynamic``, then a build + back-end CANNOT fill in the requisite metadata on behalf of the user + (i.e. ``dynamic`` is the only way to allow a tool to fill in + metadata and the user must opt into the filling in). +- Build back-ends MUST raise an error if the metadata specifies a + key in ``dynamic`` but the build back-end was unable to determine + the data for it (omitting the data, if determined to be the accurate + value, is acceptable). + + + +.. _pyproject-tool-table: + +Arbitrary tool configuration: the ``[tool]`` table +================================================== + +The ``[tool]`` table is where any tool related to your Python +project, not just build tools, can have users specify configuration +data as long as they use a sub-table within ``[tool]``, e.g. the +`flit `_ tool would store its +configuration in ``[tool.flit]``. + +A mechanism is needed to allocate names within the ``tool.*`` +namespace, to make sure that different projects do not attempt to use +the same sub-table and collide. Our rule is that a project can use +the subtable ``tool.$NAME`` if, and only if, they own the entry for +``$NAME`` in the Cheeseshop/PyPI. + + + +History +======= + +- May 2016: The initial specification of the ``pyproject.toml`` file, with just + a ``[build-system]`` containing a ``requires`` key and a ``[tool]`` table, was + approved through :pep:`518`. + +- November 2020: The specification of the ``[project]`` table was approved + through :pep:`621`. + +- December 2024: The ``license`` key was redefined, the ``license-files`` key was + added and ``License::`` classifiers were deprecated through :pep:`639`. + + +.. _TOML: https://toml.io diff --git a/source/specifications/recording-installed-packages.rst b/source/specifications/recording-installed-packages.rst new file mode 100644 index 000000000..a689fa7fd --- /dev/null +++ b/source/specifications/recording-installed-packages.rst @@ -0,0 +1,289 @@ +.. highlight:: text + +.. _recording-installed-packages: + +============================ +Recording installed projects +============================ + +This document specifies a common format of recording information +about Python :term:`projects ` installed in an environment. +A common metadata format allows tools to query, manage or uninstall projects, +regardless of how they were installed. + +Almost all information is optional. +This allows tools outside the Python ecosystem, such as Linux package managers, +to integrate with Python tooling as much as possible. +For example, even if an installer cannot easily provide a list of installed +files in a format specific to Python tooling, it should still record the name +and version of the installed project. + + +The .dist-info directory +======================== + +Each project installed from a distribution must, in addition to files, +install a "``.dist-info``" directory located alongside importable modules and +packages (commonly, the ``site-packages`` directory). + +This directory is named as ``{name}-{version}.dist-info``, with ``name`` and +``version`` fields corresponding to :ref:`core-metadata`. Both fields must be +normalized (see the :ref:`name normalization specification ` +and the :ref:`version normalization specification `), +and replace dash (``-``) characters with underscore (``_``) characters, +so the ``.dist-info`` directory always has exactly one dash (``-``) character in +its stem, separating the ``name`` and ``version`` fields. + +Historically, tools have failed to replace dot characters or normalize case in +the ``name`` field, or not perform normalization in the ``version`` field. +Tools consuming ``.dist-info`` directories should expect those fields to be +unnormalized, and treat them as equivalent to their normalized counterparts. +New tools that write ``.dist-info`` directories MUST normalize both ``name`` +and ``version`` fields using the rules described above, and existing tools are +encouraged to start normalizing those fields. + +.. note:: + + The ``.dist-info`` directory's name is formatted to unambiguously represent + a distribution as a filesystem path. Tools presenting a distribution name + to a user should avoid using the normalized name, and instead present the + specified name (when needed prior to resolution to an installed package), + or read the respective fields in Core Metadata, since values listed there + are unescaped and accurately reflect the distribution. Libraries should + provide API for such tools to consume, so tools can have access to the + unnormalized name when displaying distribution information. + +This ``.dist-info`` directory may contain the following files, described in +detail below: + +* ``METADATA``: contains project metadata +* ``RECORD``: records the list of installed files. +* ``INSTALLER``: records the name of the tool used to install the project. +* ``entry_points.txt``: see :ref:`entry-points` for details +* ``direct_url.json``: see :ref:`direct-url` for details + +The ``METADATA`` file is mandatory. +All other files may be omitted at the installing tool's discretion. +Additional installer-specific files may be present. + +This :file:`.dist-info/` directory may contain the following directories, described in +detail below: + +* :file:`licenses/`: contains license files. +* :file:`sboms/`: contains Software Bill-of-Materials files (SBOMs). + +.. note:: + + The :ref:`binary-distribution-format` specification describes additional + files that may appear in the ``.dist-info`` directory of a :term:`Wheel`. + Such files may be copied to the ``.dist-info`` directory of an + installed project. + +The previous versions of this specification also specified a ``REQUESTED`` +file. This file is now considered a tool-specific extension, but may be +standardized again in the future. See `PEP 376 `_ +for its original meaning. + + +The METADATA file +================= + +The ``METADATA`` file contains metadata as described in the :ref:`core-metadata` +specification, version 1.1 or greater. + +The ``METADATA`` file is mandatory. +If it cannot be created, or if required core metadata is not available, +installers must report an error and fail to install the project. + + +The RECORD file +=============== + +The ``RECORD`` file holds the list of installed files. +It is a CSV file containing one record (line) per installed file. + +The CSV dialect must be readable with the default ``reader`` of Python's +``csv`` module: + +* field delimiter: ``,`` (comma), +* quoting char: ``"`` (straight double quote), +* line terminator: either ``\r\n`` or ``\n``. + +Each record is composed of three elements: the file's **path**, the **hash** +of the contents, and its **size**. + +The *path* may be either absolute, or relative to the directory containing +the ``.dist-info`` directory (commonly, the ``site-packages`` directory). +On Windows, directories may be separated either by forward- or backslashes +(``/`` or ``\``). + +The *hash* is either an empty string or the name of a hash algorithm from +:py:data:`hashlib.algorithms_guaranteed`, followed by the equals character ``=`` and +the digest of the file's contents, encoded with the urlsafe-base64-nopad +encoding (:py:func:`base64.urlsafe_b64encode(digest) ` with trailing ``=`` removed). + +The *size* is either the empty string, or file's size in bytes, +as a base 10 integer. + +For any file, either or both of the *hash* and *size* fields may be left empty. +Commonly, entries for ``.pyc`` files and the ``RECORD`` file itself have empty +*hash* and *size*. +For other files, leaving the information out is discouraged, as it +prevents verifying the integrity of the installed project. + +If the ``RECORD`` file is present, it must list all installed files of the +project, except ``.pyc`` files corresponding to ``.py`` files listed in +``RECORD``, which are optional. +Notably, the contents of the ``.dist-info`` directory (including the ``RECORD`` +file itself) must be listed. +Directories should not be listed. + +To completely uninstall a package, a tool needs to remove all +files listed in ``RECORD``, all ``.pyc`` files (of all optimization levels) +corresponding to removed ``.py`` files, and any directories emptied by +the uninstallation. + +Here is an example snippet of a possible ``RECORD`` file:: + + /usr/bin/black,sha256=iFlOnL32lIa-RKk-MDihcbJ37wxmRbE4xk6eVYVTTeU,220 + ../../../bin/blackd,sha256=lCadt4mcU-B67O1gkQVh7-vsKgLpx6ny1le34Jz6UVo,221 + __pycache__/black.cpython-38.pyc,, + __pycache__/blackd.cpython-38.pyc,, + black-19.10b0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 + black-19.10b0.dist-info/licenses/LICENSE,sha256=nAQo8MO0d5hQz1vZbhGqqK_HLUqG1KNiI9erouWNbgA,1080 + black-19.10b0.dist-info/METADATA,sha256=UN40nGoVVTSpvLrTBwNsXgZdZIwoKFSrrDDHP6B7-A0,58841 + black-19.10b0.dist-info/RECORD,, + black.py,sha256=45IF72OgNfF8WpeNHnxV2QGfbCLubV5Xjl55cI65kYs,140161 + blackd.py,sha256=JCxaK4hLkMRwVfZMj8FRpRRYC0172-juKqbN22bISLE,6672 + blib2to3/__init__.py,sha256=9_8wL9Scv8_Cs8HJyJHGvx1vwXErsuvlsAqNZLcJQR0,8 + blib2to3/__pycache__/__init__.cpython-38.pyc,, + blib2to3/__pycache__/pygram.cpython-38.pyc,sha256=zpXgX4FHDuoeIQKO_v0sRsB-RzQFsuoKoBYvraAdoJw,1512 + blib2to3/__pycache__/pytree.cpython-38.pyc,sha256=LYLplXtG578ZjaFeoVuoX8rmxHn-BMAamCOsJMU1b9I,24910 + blib2to3/pygram.py,sha256=mXpQPqHcamFwch0RkyJsb92Wd0kUP3TW7d-u9dWhCGY,2085 + blib2to3/pytree.py,sha256=RWj3IL4U-Ljhkn4laN0C3p7IRdfvT3aIRjTV-x9hK1c,28530 + +If the ``RECORD`` file is missing, tools that rely on ``.dist-info`` must not +attempt to uninstall or upgrade the package. +(This restriction does not apply to tools that rely on other sources of information, +such as system package managers in Linux distros.) + +.. note:: + + It is *strongly discouraged* for an installed package to modify itself + (e.g., store cache files under its namespace in ``site-packages``). + Changes inside ``site-packages`` should be left to specialized installer + tools such as pip. If a package is nevertheless modified in this way, + then the ``RECORD`` must be updated, otherwise uninstalling the package + will leave unlisted files in place (possibly resulting in a zombie + namespace package). + +The INSTALLER file +================== + +If present, ``INSTALLER`` is a single-line text file naming the tool used to +install the project. +If the installer is executable from the command line, ``INSTALLER`` +should contain the command name. +Otherwise, it should contain a printable ASCII string. + +The file can be terminated by zero or more ASCII whitespace characters. + +Here are examples of two possible ``INSTALLER`` files:: + + pip + +:: + + MegaCorp Cloud Install-O-Matic + +This value should be used for informational purposes only. +For example, if a tool is asked to uninstall a project but finds no ``RECORD`` +file, it may suggest that the tool named in ``INSTALLER`` may be able to do the +uninstallation. + + +The entry_points.txt file +========================= + +This file MAY be created by installers to indicate when packages contain +components intended for discovery and use by other code, including console +scripts and other applications that the installer has made available for +execution. + +Its detailed specification is at :ref:`entry-points`. + + +The direct_url.json file +======================== + +This file MUST be created by installers when installing a distribution from a +requirement specifying a direct URL reference (including a VCS URL). + +This file MUST NOT be created when installing a distribution from an other type +of requirement (i.e. name plus version specifier). + +Its detailed specification is at :ref:`direct-url`. + + +The :file:`licenses/` subdirectory +================================== + +If the metadata version is 2.4 or greater and one or more ``License-File`` +fields is specified, the :file:`.dist-info/` directory MUST contain a :file:`licenses/` +subdirectory which MUST contain the files listed in the ``License-File`` fields in +the :file:`METADATA` file at their respective paths relative to the +:file:`licenses/` directory. +Any files in this directory MUST be copied from wheels by the install tools. + + +The :file:`sboms/` subdirectory +================================== + +All files contained within the :file:`.dist-info/sboms/` directory MUST +be Software Bill-of-Materials (SBOM) files that describe software contained +within the installed package. +Any files in this directory MUST be copied from wheels by the install tools. + + +Intentionally preventing changes to installed packages +====================================================== + +In some cases (such as when needing to manage external dependencies in addition +to Python ecosystem dependencies), it is desirable for a tool that installs +packages into a Python environment to ensure that other tools are not used to +uninstall or otherwise modify that installed package, as doing so may cause +compatibility problems with the wider environment. + +To achieve this, affected tools should take the following steps: + +* Rename or remove the ``RECORD`` file to prevent changes via other tools (e.g. + appending a suffix to create a non-standard ``RECORD.tool`` file if the tool + itself needs the information, or omitting the file entirely if the package + contents are tracked and managed via other means) +* Write an ``INSTALLER`` file indicating the name of the tool that should be used + to manage the package (this allows ``RECORD``-aware tools to provide better + error notices when asked to modify affected packages) + +Python runtime providers may also prevent inadvertent modification of platform +provided packages by modifying the default Python package installation scheme +to use a location other than that used by platform provided packages (while also +ensuring both locations appear on the default Python import path). + +In some circumstances, it may be desirable to block even installation of +additional packages via Python-specific tools. For these cases refer to +:ref:`externally-managed-environments` + + +History +======= + +- June 2009: The original version of this specification was approved through + :pep:`376`. At the time, it was known as the *Database of Installed Python + Distributions*. +- March 2020: The specification of the ``direct_url.json`` file was approved + through :pep:`610`. It is only mentioned on this page; see :ref:`direct-url` + for the full definition. +- September 2020: Various amendments and clarifications were approved through + :pep:`627`. +- December 2024: The :file:`.dist-info/licenses/` directory was specified through + :pep:`639`. diff --git a/source/specifications/section-distribution-formats.rst b/source/specifications/section-distribution-formats.rst new file mode 100644 index 000000000..b2c09f8db --- /dev/null +++ b/source/specifications/section-distribution-formats.rst @@ -0,0 +1,9 @@ +================================= +Package Distribution File Formats +================================= + +.. toctree:: + :titlesonly: + + source-distribution-format + binary-distribution-format diff --git a/source/specifications/section-distribution-metadata.rst b/source/specifications/section-distribution-metadata.rst new file mode 100644 index 000000000..551e6b730 --- /dev/null +++ b/source/specifications/section-distribution-metadata.rst @@ -0,0 +1,18 @@ +============================= +Package Distribution Metadata +============================= + +.. toctree:: + :titlesonly: + + name-normalization + core-metadata + version-specifiers + dependency-specifiers + pyproject-toml + dependency-groups + inline-script-metadata + platform-compatibility-tags + well-known-project-urls + glob-patterns + license-expression diff --git a/source/specifications/section-installation-metadata.rst b/source/specifications/section-installation-metadata.rst new file mode 100644 index 000000000..685a5aac4 --- /dev/null +++ b/source/specifications/section-installation-metadata.rst @@ -0,0 +1,13 @@ +============================= +Package Installation Metadata +============================= + +.. toctree:: + :titlesonly: + + recording-installed-packages + entry-points + direct-url + direct-url-data-structure + virtual-environments + externally-managed-environments diff --git a/source/specifications/section-package-indices.rst b/source/specifications/section-package-indices.rst new file mode 100644 index 000000000..73004b4d3 --- /dev/null +++ b/source/specifications/section-package-indices.rst @@ -0,0 +1,10 @@ +======================== +Package Index Interfaces +======================== + +.. toctree:: + :titlesonly: + + pypirc + simple-repository-api + index-hosted-attestations diff --git a/source/specifications/section-python-description-formats.rst b/source/specifications/section-python-description-formats.rst new file mode 100644 index 000000000..5a56c3968 --- /dev/null +++ b/source/specifications/section-python-description-formats.rst @@ -0,0 +1,8 @@ +========================== +Python Description Formats +========================== + +.. toctree:: + :titlesonly: + + build-details/index diff --git a/source/specifications/section-reproducible-environments.rst b/source/specifications/section-reproducible-environments.rst new file mode 100644 index 000000000..1f83f3673 --- /dev/null +++ b/source/specifications/section-reproducible-environments.rst @@ -0,0 +1,8 @@ +========================= +Reproducible Environments +========================= + +.. toctree:: + :titlesonly: + + pylock-toml diff --git a/source/specifications/simple-repository-api.rst b/source/specifications/simple-repository-api.rst new file mode 100644 index 000000000..d18c425db --- /dev/null +++ b/source/specifications/simple-repository-api.rst @@ -0,0 +1,1008 @@ + +.. _simple-repository-api: + +===================== +Simple repository API +===================== + +The keywords "**MUST**", "**MUST NOT**", "**REQUIRED**", "**SHALL**", +"**SHALL NOT**", "**SHOULD**", "**SHOULD NOT**", "**RECOMMENDED**", "**MAY**", +and "**OPTIONAL**"" in this document are to be interpreted as described in +:rfc:`2119`. + +The interface for querying available package versions and +retrieving packages from an index server comes in two forms: +HTML and JSON. + +.. _simple-repository-api-base: + +Base HTML API +============= + +A repository that implements the simple API is defined by its base URL, this is +the top level URL that all additional URLs are below. The API is named the +"simple" repository due to the fact that PyPI's base URL is +``https://pypi.org/simple/``. + +.. note:: All subsequent URLs in this document will be relative to this base + URL (so given PyPI's URL, a URL of ``/foo/`` would be + ``https://pypi.org/simple/foo/``. + + +Within a repository, the root URL (``/`` for this spec which represents the base +URL) **MUST** be a valid HTML5 page with a single anchor element per project in +the repository. The text of the anchor tag **MUST** be the name of +the project and the href attribute **MUST** link to the URL for that particular +project. As an example: + +.. code-block:: html + + + + + frob + spamspamspam + + + +Below the root URL is another URL for each individual project contained within +a repository. The format of this URL is ``//`` where the ```` +is replaced by the normalized name for that project, so a project named +"HolyGrail" would have a URL like ``/holygrail/``. This URL must respond with +a valid HTML5 page with a single anchor element per file for the project. The +href attribute **MUST** be a URL that links to the location of the file for +download, and the text of the anchor tag **MUST** match the final path +component (the filename) of the URL. The URL **SHOULD** include a hash in the +form of a URL fragment with the following syntax: ``#=``, +where ```` is the lowercase name of the hash function (such as +``sha256``) and ```` is the hex encoded digest. + +In addition to the above, the following constraints are placed on the API: + +* All URLs which respond with an HTML5 page **MUST** end with a ``/`` and the + repository **SHOULD** redirect the URLs without a ``/`` to add a ``/`` to the + end. + +* URLs may be either absolute or relative as long as they point to the correct + location. + +* There are no constraints on where the files must be hosted relative to the + repository. + +* There may be any other HTML elements on the API pages as long as the required + anchor elements exist. + +* Repositories **MAY** redirect unnormalized URLs to the canonical normalized + URL (e.g. ``/Foobar/`` may redirect to ``/foobar/``), however clients + **MUST NOT** rely on this redirection and **MUST** request the normalized + URL. + +* Repositories **SHOULD** choose a hash function from one of the ones + guaranteed to be available via the :py:mod:`hashlib` module in the Python standard + library (currently ``md5``, ``sha1``, ``sha224``, ``sha256``, ``sha384``, + ``sha512``). The current recommendation is to use ``sha256``. + +* If there is a GPG signature for a particular distribution file it **MUST** + live alongside that file with the same name with a ``.asc`` appended to it. + So if the file ``/packages/HolyGrail-1.0.tar.gz`` existed and had an + associated signature, the signature would be located at + ``/packages/HolyGrail-1.0.tar.gz.asc``. + +* A repository **MAY** include a ``data-core-metadata`` attribute on a file + link. + + The repository **SHOULD** provide the hash of the Core Metadata file as the + ``data-core-metadata`` attribute's value using the syntax + ``=``, where ```` is the lower cased name of + the hash function used, and ```` is the hex encoded digest. The + repository **MAY** use ``true`` as the attribute's value if a hash is unavailable. + +* A repository **MAY** include a ``data-dist-info-metadata`` attribute on a + file link. + + Index clients **MAY** consume this key if present, as a legacy fallback + for ``data-core-metadata``. + + .. important:: + + ``data-dist-info-metadata`` was standardized with :pep:`658` and renamed to + ``data-core-metadata`` with :pep:`714`. + +* A repository **MAY** include a ``data-gpg-sig`` attribute on a file link with + a value of either ``true`` or ``false`` to indicate whether or not there is a + GPG signature. Repositories that do this **SHOULD** include it on every link. + +* A repository **MAY** include a ``data-requires-python`` attribute on a file + link. This exposes the :ref:`core-metadata-requires-python` metadata field + for the corresponding release. Where this is present, installer tools + **SHOULD** ignore the download when installing to a Python version that + doesn't satisfy the requirement. For example: + + .. code-block:: html + + ... + + In the attribute value, < and > have to be HTML encoded as ``<`` and + ``>``, respectively. + +* A repository **MAY** include a ``data-provenance`` attribute on a file link. + The value of this attribute **MUST** be a fully qualified URL, signaling that + the file's provenance can be found at that URL. This URL **MUST** represent + a `secure origin `_. + + .. note:: + + The ``data-provenance`` attribute was added with API version 1.3. + + .. note:: + + The format of the linked provenance is defined in :ref:`index-hosted-attestations`. + +Normalized Names +---------------- + +This spec references the concept of a "normalized" project name. As per +:ref:`the name normalization specification ` +the only valid characters in a name are the ASCII alphabet, ASCII numbers, +``.``, ``-``, and ``_``. The name should be lowercased with all runs of the +characters ``.``, ``-``, or ``_`` replaced with a single ``-`` character. This +can be implemented in Python with the ``re`` module:: + + import re + + def normalize(name): + return re.sub(r"[-_.]+", "-", name).lower() + +.. _simple-repository-api-yank: + +Adding "Yank" Support to the Simple API +======================================= + +Links in the simple repository **MAY** have a ``data-yanked`` attribute +which may have no value, or may have an arbitrary string as a value. The +presence of a ``data-yanked`` attribute **SHOULD** be interpreted as +indicating that the file pointed to by this particular link has been +"Yanked", and should not generally be selected by an installer, except +under specific scenarios. + +The value of the ``data-yanked`` attribute, if present, is an arbitrary +string that represents the reason for why the file has been yanked. Tools +that process the simple repository API **MAY** surface this string to +end users. + +The yanked attribute is not immutable once set, and may be rescinded in +the future (and once rescinded, may be reset as well). Thus API users +**MUST** be able to cope with a yanked file being "unyanked" (and even +yanked again). + + +Installers +---------- + +The desirable experience for users is that once a file is yanked, when +a human being is currently trying to directly install a yanked file, that +it fails as if that file had been deleted. However, when a human did that +awhile ago, and now a computer is just continuing to mechanically follow +the original order to install the now yanked file, then it acts as if it +had not been yanked. + +An installer **MUST** ignore yanked releases, if the selection constraints +can be satisfied with a non-yanked version, and **MAY** refuse to use a +yanked release even if it means that the request cannot be satisfied at all. +An implementation **SHOULD** choose a policy that follows the spirit of the +intention above, and that prevents "new" dependencies on yanked +releases/files. + +What this means is left up to the specific installer, to decide how to best +fit into the overall usage of their installer. However, there are two +suggested approaches to take: + +1. Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version using + either ``==`` (without any modifiers that make it a range, such as + ``.*``) or ``===``. Matching this version specifier should otherwise + be done as per :ref:`the version specifiers specification + ` for things like local versions, zero padding, + etc. +2. Yanked files are always ignored, unless they are the only file that + matches what a lock file (such as ``Pipfile.lock`` or ``poetry.lock``) + specifies to be installed. In this case, a yanked file **SHOULD** not + be used when creating or updating a lock file from some input file or + command. + +Regardless of the specific strategy that an installer chooses for deciding +when to install yanked files, an installer **SHOULD** emit a warning when +it does decide to install a yanked file. That warning **MAY** utilize the +value of the ``data-yanked`` attribute (if it has a value) to provide more +specific feedback to the user about why that file had been yanked. + + +Mirrors +------- + +Mirrors can generally treat yanked files one of two ways: + +1. They may choose to omit them from their simple repository API completely, + providing a view over the repository that shows only "active", unyanked + files. +2. They may choose to include yanked files, and additionally mirror the + ``data-yanked`` attribute as well. + +Mirrors **MUST NOT** mirror a yanked file without also mirroring the +``data-yanked`` attribute for it. + +.. _simple-repository-api-versioning: + +Versioning PyPI's Simple API +============================ + +This spec proposes the inclusion of a meta tag on the responses of every +successful request to a simple API page, which contains a name attribute +of ``pypi:repository-version``, and a content that is a :ref:`version specifiers +specification ` compatible +version number, which is further constrained to ONLY be Major.Minor, and +none of the additional features supported by :ref:`the version specifiers +specification `. + +This would end up looking like: + +.. code-block:: html + + + +When interpreting the repository version: + +* Incrementing the major version is used to signal a backwards + incompatible change such that existing clients would no longer be + expected to be able to meaningfully use the API. +* Incrementing the minor version is used to signal a backwards + compatible change such that existing clients would still be + expected to be able to meaningfully use the API. + +It is left up to the discretion of any future specs as to what +specifically constitutes a backwards incompatible vs compatible change +beyond the broad suggestion that existing clients will be able to +"meaningfully" continue to use the API, and can include adding, +modifying, or removing existing features. + +It is expectation of this spec that the major version will never be +incremented, and any future major API evolutions would utilize a +different mechanism for API evolution. However the major version +is included to disambiguate with future versions (e.g. a hypothetical +simple api v2 that lived at /v2/, but which would be confusing if the +repository-version was set to a version >= 2). + +API Version History +------------------- + +This section contains only an abbreviated history of changes, +as marked by the API version number. For a full history of changes including +changes made before API versioning, see :ref:`History `. + +- API version 1.0: Initial version of the API, declared with :pep:`629`. +- API version 1.1: Added ``versions``, ``files[].size``, and ``files[].upload-time`` metadata + to the JSON serialization, declared with :pep:`700`. +- API version 1.2: Added repository "tracks" metadata, declared with :pep:`708`. +- API version 1.3: Added provenance metadata, declared with :pep:`740`. + +Clients +------- + +Clients interacting with the simple API **SHOULD** introspect each +response for the repository version, and if that data does not exist +**MUST** assume that it is version 1.0. + +When encountering a major version greater than expected, clients +**MUST** hard fail with an appropriate error message for the user. + +When encountering a minor version greater than expected, clients +**SHOULD** warn users with an appropriate message. + +Clients **MAY** still continue to use feature detection in order to +determine what features a repository uses. + +.. _simple-repository-api-metadata-file: + +Serve Distribution Metadata in the Simple Repository API +======================================================== + +In a simple repository's project page, each anchor tag pointing to a +distribution **MAY** have a ``data-dist-info-metadata`` attribute. The +presence of the attribute indicates the distribution represented by +the anchor tag **MUST** contain a Core Metadata file that will not be +modified when the distribution is processed and/or installed. + +If a ``data-dist-info-metadata`` attribute is present, the repository +**MUST** serve the distribution's Core Metadata file alongside the +distribution with a ``.metadata`` appended to the distribution's file +name. For example, the Core Metadata of a distribution served at +``/files/distribution-1.0-py3.none.any.whl`` would be located at +``/files/distribution-1.0-py3.none.any.whl.metadata``. This is similar +to how :ref:`the base HTML API specification ` +specifies the GPG signature file's location. + +The repository **SHOULD** provide the hash of the Core Metadata file +as the ``data-dist-info-metadata`` attribute's value using the syntax +``=``, where ```` is the lower cased +name of the hash function used, and ```` is the hex encoded +digest. The repository **MAY** use ``true`` as the attribute's value +if a hash is unavailable. + +Backwards Compatibility +----------------------- + +If an anchor tag lacks the ``data-dist-info-metadata`` attribute, +tools are expected to revert to their current behaviour of downloading +the distribution to inspect the metadata. + +Older tools not supporting the new ``data-dist-info-metadata`` +attribute are expected to ignore the attribute and maintain their +current behaviour of downloading the distribution to inspect the +metadata. This is similar to how prior ``data-`` attribute additions +expect existing tools to operate. + +.. _simple-repository-api-json: + +JSON-based Simple API for Python Package Indexes +================================================ + +To enable response parsing with only the standard library, this spec specifies that +all responses (besides the files themselves, and the HTML responses from +:ref:`the base HTML API specification `) should be +serialized using `JSON `_. + +To enable zero configuration discovery and to minimize the amount of additional HTTP +requests, this spec extends :ref:`the base HTML API specification +` such that all of the API endpoints (other than the +files themselves) will utilize HTTP content negotiation to allow client and server to +select the correct serialization format to serve, i.e. either HTML or JSON. + + +Versioning +---------- + +Versioning will adhere to :ref:`the API versioning specification +` format (``Major.Minor``), which has defined the +existing HTML responses to be ``1.0``. Since this spec does not introduce new features +into the API, rather it describes a different serialization format for the existing +features, this spec does not change the existing ``1.0`` version, and instead just +describes how to serialize that into JSON. + +Similar to :ref:`the API versioning specification +`, the major version number **MUST** be +incremented if any +changes to the new format would result in no longer being able to expect existing +clients to meaningfully understand the format. + +Likewise, the minor version **MUST** be incremented if features are +added or removed from the format, but existing clients would be expected to continue +to meaningfully understand the format. + +Changes that would not result in existing clients being unable to meaningfully +understand the format and which do not represent features being added or removed +may occur without changing the version number. + +This is intentionally vague, as this spec believes it is best left up to future specs +that make any changes to the API to investigate and decide whether or not that +change should increment the major or minor version. + +Future versions of the API may add things that can only be represented in a subset +of the available serializations of that version. All serializations version numbers, +within a major version, **SHOULD** be kept in sync, but the specifics of how a +feature serializes into each format may differ, including whether or not that feature +is present at all. + +It is the intent of this spec that the API should be thought of as URL endpoints that +return data, whose interpretation is defined by the version of that data, and then +serialized into the target serialization format. + + +.. _json-serialization: + +JSON Serialization +------------------ + +The URL structure from :ref:`the base HTML API specification +` still applies, as this spec only adds an additional +serialization format for the already existing API. + +The following constraints apply to all JSON serialized responses described in this +spec: + +* All JSON responses will *always* be a JSON object rather than an array or other + type. + +* While JSON doesn't natively support a URL type, any value that represents an + URL in this API may be either absolute or relative as long as they point to + the correct location. If relative, they are relative to the current URL as if + it were HTML. + +* Additional keys may be added to any dictionary objects in the API responses + and clients **MUST** ignore keys that they don't understand. + +* All JSON responses will have a ``meta`` key, which contains information related to + the response itself, rather than the content of the response. + +* All JSON responses will have a ``meta.api-version`` key, which will be a string that + contains the :ref:`API versioning specification + ` ``Major.Minor`` version number, with the + same fail/warn semantics as defined in :ref:`the API versioning specification + `. + +* All requirements of :ref:`the base HTML API specification + ` that are not HTML specific still apply. + +* Keys (at any level) with a leading underscore are reserved as private for + index server use. No future standard will assign a meaning to any such key. + +Project List +~~~~~~~~~~~~ + +The root URL ``/`` for this spec (which represents the base URL) will be a JSON encoded +dictionary which has a two keys: + +- ``projects``: An array where each entry is a dictionary with a single key, ``name``, which represents string of the project name. +- ``meta``: The general response metadata as `described earlier `__. + +As an example: + +.. code-block:: json + + { + "meta": { + "api-version": "1.3" + }, + "projects": [ + {"name": "Frob"}, + {"name": "spamspamspam"} + ] + } + + +.. note:: + + The ``name`` field is the same as the one from :ref:`the base HTML API + specification `, which does not specify + whether it is the non-normalized display name or the normalized name. In practice + different implementations of these specs are choosing differently here, so relying + on it being either non-normalized or normalized is relying on an implementation + detail of the repository in question. + + +.. note:: + + While the ``projects`` key is an array, and thus is required to be in some kind + of an order, neither :ref:`the base HTML API specification + ` nor this spec requires any specific ordering nor + that the ordering is consistent from one request to the next. Mentally this is + best thought of as a set, but both JSON and HTML lack the functionality to have + sets. + + +Project Detail +~~~~~~~~~~~~~~ + +The format of this URL is ``//`` where the ```` is replaced by the +:ref:`the base HTML API specification ` normalized +name for that project, so a project named "Silly_Walk" would +have a URL like ``/silly-walk/``. + +This URL must respond with a JSON encoded dictionary that has four keys: + +- ``name``: The normalized name of the project. +- ``files``: A list of dictionaries, each one representing an individual file. +- ``meta``: The general response metadata as `described earlier `__. +- ``versions``: A list of version strings specifying all of the project versions + uploaded for this project. The value of ``versions`` is logically a set, + and as such may not contain duplicates, and the order of the versions is + not significant. + + .. note:: + + All of the files listed in the ``files`` key MUST be associated with one of the + versions in the ``versions`` key. The ``versions`` key MAY contain versions with + no associated files (to represent versions with no files uploaded, if the server + has such a concept). + + .. note:: + + Because servers may hold "legacy" data from before the adoption of + :ref:`the version specifiers specification (VSS) `, version + strings currently cannot be required to be valid VSS versions, and therefore + cannot be assumed to be orderable using the VSS rules. However, servers **SHOULD** + use normalized VSS versions where possible. + + .. note:: + + The ``versions`` key was added with API version 1.1. + +Each individual file dictionary has the following keys: + +- ``filename``: The filename that is being represented. +- ``url``: The URL that the file can be fetched from. +- ``hashes``: A dictionary mapping a hash name to a hex encoded digest of the file. + Multiple hashes can be included, and it is up to the client to decide what to do + with multiple hashes (it may validate all of them or a subset of them, or nothing + at all). These hash names **SHOULD** always be normalized to be lowercase. + + The ``hashes`` dictionary **MUST** be present, even if no hashes are available + for the file, however it is **HIGHLY** recommended that at least one secure, + guaranteed-to-be-available hash is always included. + + By default, any hash algorithm available via :py:mod:`hashlib` (specifically any that can + be passed to :py:func:`hashlib.new()` and do not require additional parameters) can + be used as a key for the hashes dictionary. At least one secure algorithm from + :py:data:`hashlib.algorithms_guaranteed` **SHOULD** always be included. At the time + of this spec, ``sha256`` specifically is recommended. +- ``requires-python``: An **optional** key that exposes the + :ref:`core-metadata-requires-python` + metadata field. Where this is present, installer tools + **SHOULD** ignore the download when installing to a Python version that + doesn't satisfy the requirement. + + Unlike ``data-requires-python`` in :ref:`the base HTML API specification + `, the ``requires-python`` key does not + require any special escaping other than anything JSON does naturally. +- ``core-metadata``: An **optional** key that indicates + that metadata for this file is available, via the same location as specified in + :ref:`the API metadata file specification + ` (``{file_url}.metadata``). Where this + is present, it **MUST** be + either a boolean to indicate if the file has an associated metadata file, or a + dictionary mapping hash names to a hex encoded digest of the metadata's hash. + + When this is a dictionary of hashes instead of a boolean, then all the same + requirements and recommendations as the ``hashes`` key hold true for this key as + well. + + If this key is missing then the metadata file may or may not exist. If the key + value is truthy, then the metadata file is present, and if it is falsey then it + is not. + + It is recommended that servers make the hashes of the metadata file available if + possible. + +- ``dist-info-metadata``: An **optional**, deprecated alias for ``core-metadata``. + + Index clients **MAY** consume this key if present, as a legacy fallback + for ``core-metadata``. + + .. important:: + + ``dist-info-metadata`` was standardized with :pep:`658` and renamed to + ``core-metadata`` with :pep:`714`. + +- ``gpg-sig``: An **optional** key that acts a boolean to indicate if the file has + an associated GPG signature or not. The URL for the signature file follows what + is specified in :ref:`the base HTML API specification + ` (``{file_url}.asc``). If this key does not exist, then + the signature may or may not exist. + +- ``yanked``: An **optional** key which may be either a boolean to indicate if the + file has been yanked, or a non empty, but otherwise arbitrary, string to indicate + that a file has been yanked with a specific reason. If the ``yanked`` key is present + and is a truthy value, then it **SHOULD** be interpreted as indicating that the + file pointed to by the ``url`` field has been "Yanked" as per :ref:`the API + yank specification `. +- ``size``: A **mandatory** key. It **MUST** contain an integer which is the file size in bytes. + + .. note:: + + The ``size`` key was added with API version 1.1. + +- ``upload-time``: An **optional** key that, if present, **MUST** contain a valid + ISO 8601 date/time string in the format ``yyyy-mm-ddThh:mm:ss.ffffffZ`` + which represents the time the file was uploaded to the index. + + As indicated by the ``Z`` suffix, the upload time **MUST** use the UTC timezone. + The fractional seconds part of the timestamp (the ``.ffffff`` part) is optional, + and if present may contain up to 6 digits of precision. If a server does not record + upload time information for a file, it **MAY** omit the ``upload-time`` key. + + .. note:: + + The ``upload-time`` key was added with API version 1.1. + +- ``provenance``: An **optional** key which, if present **MUST** be either a JSON + string or ``null``. If not ``null``, it **MUST** be a URL to the file's + associated provenance, with the same rules as ``data-provenance`` in the + :ref:`base HTML API specification `. + + .. note:: + + The ``provenance`` field was added with API version 1.3. + +As an example: + +.. code-block:: json + + { + "meta": { + "api-version": "1.3" + }, + "name": "holygrail", + "files": [ + { + "filename": "holygrail-1.0.tar.gz", + "url": "https://example.com/files/holygrail-1.0.tar.gz", + "hashes": {"sha256": "...", "blake2b": "..."}, + "requires-python": ">=3.7", + "yanked": "Had a vulnerability", + "size": 123456 + }, + { + "filename": "holygrail-1.0-py3-none-any.whl", + "url": "https://example.com/files/holygrail-1.0-py3-none-any.whl", + "hashes": {"sha256": "...", "blake2b": "..."}, + "requires-python": ">=3.7", + "dist-info-metadata": true, + "provenance": "https://example.com/files/holygrail-1.0-py3-none-any.whl.provenance", + "size": 1337 + } + ], + "versions": ["1.0"] + } + + +.. note:: + + While the ``files`` key is an array, and thus is required to be in some kind + of an order, neither :ref:`the base HTML API specification + ` nor this spec requires any specific ordering nor + that the ordering is consistent from one request to the next. Mentally this is + best thought of as a set, but both JSON and HTML lack the functionality to have + sets. + + +Content-Types +------------- + +This spec proposes that all responses from the Simple API will have a standard +content type that describes what the response is (a Simple API response), what +version of the API it represents, and what serialization format has been used. + +The structure of this content type will be: + +.. code-block:: text + + application/vnd.pypi.simple.$version+format + +Since only major versions should be disruptive to clients attempting to +understand one of these API responses, only the major version will be included +in the content type, and will be prefixed with a ``v`` to clarify that it is a +version number. + +Which means that for the existing 1.0 API, the content types would be: + +- **JSON:** ``application/vnd.pypi.simple.v1+json`` +- **HTML:** ``application/vnd.pypi.simple.v1+html`` + +In addition to the above, a special "meta" version is supported named ``latest``, +whose purpose is to allow clients to request the absolute latest version, without +having to know ahead of time what that version is. It is recommended however, +that clients be explicit about what versions they support. + +To support existing clients which expect the existing :ref:`the base HTML API +specification ` API responses to +use the ``text/html`` content type, this spec further defines ``text/html`` as an alias +for the ``application/vnd.pypi.simple.v1+html`` content type. + + +Version + Format Selection +-------------------------- + +Now that there is multiple possible serializations, we need a mechanism to allow +clients to indicate what serialization formats they're able to understand. In +addition, it would be beneficial if any possible new major version to the API can +be added without disrupting existing clients expecting the previous API version. + +To enable this, this spec standardizes on the use of HTTP's +`Server-Driven Content Negotiation `_. + +While this spec won't fully describe the entirety of server-driven content +negotiation, the flow is roughly: + +1. The client makes an HTTP request containing an ``Accept`` header listing all + of the version+format content types that they are able to understand. +2. The server inspects that header, selects one of the listed content types, + then returns a response using that content type (treating the absence of + an ``Accept`` header as ``Accept: */*``). +3. If the server does not support any of the content types in the ``Accept`` + header then they are able to choose between 3 different options for how to + respond: + + a. Select a default content type other than what the client has requested + and return a response with that. + b. Return a HTTP ``406 Not Acceptable`` response to indicate that none of + the requested content types were available, and the server was unable + or unwilling to select a default content type to respond with. + c. Return a HTTP ``300 Multiple Choices`` response that contains a list of + all of the possible responses that could have been chosen. +4. The client interprets the response, handling the different types of responses + that the server may have responded with. + +This spec does not specify which choices the server makes in regards to handling +a content type that it isn't able to return, and clients **SHOULD** be prepared +to handle all of the possible responses in whatever way makes the most sense for +that client. + +However, as there is no standard format for how a ``300 Multiple Choices`` +response can be interpreted, this spec highly discourages servers from utilizing +that option, as clients will have no way to understand and select a different +content-type to request. In addition, it's unlikely that the client *could* +understand a different content type anyways, so at best this response would +likely just be treated the same as a ``406 Not Acceptable`` error. + +This spec **does** require that if the meta version ``latest`` is being used, the +server **MUST** respond with the content type for the actual version that is +contained in the response +(i.e. an ``Accept: application/vnd.pypi.simple.latest+json`` request that returns +a ``v1.x`` response should have a ``Content-Type`` of +``application/vnd.pypi.simple.v1+json``). + +The ``Accept`` header is a comma separated list of content types that the client +understands and is able to process. It supports three different formats for each +content type that is being requested: + +- ``$type/$subtype`` +- ``$type/*`` +- ``*/*`` + +For the use of selecting a version+format, the most useful of these is +``$type/$subtype``, as that is the only way to actually specify the version +and format you want. + +The order of the content types listed in the ``Accept`` header does not have any +specific meaning, and the server **SHOULD** consider all of them to be equally +valid to respond with. If a client wishes to specify that they prefer a specific +content type over another, they may use the ``Accept`` header's +`quality value `_ +syntax. + +This allows a client to specify a priority for a specific entry in their +``Accept`` header, by appending a ``;q=`` followed by a value between ``0`` and +``1`` inclusive, with up to 3 decimal digits. When interpreting this value, +an entry with a higher quality has priority over an entry with a lower quality, +and any entry without a quality present will default to a quality of ``1``. + +However, clients should keep in mind that a server is free to select **any** of +the content types they've asked for, regardless of their requested priority, and +it may even return a content type that they did **not** ask for. + +To aid clients in determining the content type of the response that they have +received from an API request, this spec requires that servers always include a +``Content-Type`` header indicating the content type of the response. This is +technically a backwards incompatible change, however in practice +`pip has been enforcing this requirement `_ +so the risks for actual breakages is low. + +An example of how a client can operate would look like: + +.. code-block:: python + + import email.message + import requests + + def parse_content_type(header: str) -> str: + m = email.message.Message() + m["content-type"] = header + return m.get_content_type() + + # Construct our list of acceptable content types, we want to prefer + # that we get a v1 response serialized using JSON, however we also + # can support a v1 response serialized using HTML. For compatibility + # we also request text/html, but we prefer it least of all since we + # don't know if it's actually a Simple API response, or just some + # random HTML page that we've gotten due to a misconfiguration. + CONTENT_TYPES = [ + "application/vnd.pypi.simple.v1+json", + "application/vnd.pypi.simple.v1+html;q=0.2", + "text/html;q=0.01", # For legacy compatibility + ] + ACCEPT = ", ".join(CONTENT_TYPES) + + + # Actually make our request to the API, requesting all of the content + # types that we find acceptable, and letting the server select one of + # them out of the list. + resp = requests.get("https://pypi.org/simple/", headers={"Accept": ACCEPT}) + + # If the server does not support any of the content types you requested, + # AND it has chosen to return a HTTP 406 error instead of a default + # response then this will raise an exception for the 406 error. + resp.raise_for_status() + + + # Determine what kind of response we've gotten to ensure that it is one + # that we can support, and if it is, dispatch to a function that will + # understand how to interpret that particular version+serialization. If + # we don't understand the content type we've gotten, then we'll raise + # an exception. + content_type = parse_content_type(resp.headers.get("content-type", "")) + match content_type: + case "application/vnd.pypi.simple.v1+json": + handle_v1_json(resp) + case "application/vnd.pypi.simple.v1+html" | "text/html": + handle_v1_html(resp) + case _: + raise Exception(f"Unknown content type: {content_type}") + +If a client wishes to only support HTML or only support JSON, then they would +just remove the content types that they do not want from the ``Accept`` header, +and turn receiving them into an error. + + +Alternative Negotiation Mechanisms +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While using HTTP's Content negotiation is considered the standard way for a client +and server to coordinate to ensure that the client is getting an HTTP response that +it is able to understand, there are situations where that mechanism may not be +sufficient. For those cases this spec has alternative negotiation mechanisms that +may *optionally* be used instead. + + +URL Parameter +^^^^^^^^^^^^^ + +Servers that implement the Simple API may choose to support a URL parameter named +``format`` to allow the clients to request a specific version of the URL. + +The value of the ``format`` parameter should be **one** of the valid content types. +Passing multiple content types, wild cards, quality values, etc... is **not** +supported. + +Supporting this parameter is optional, and clients **SHOULD NOT** rely on it for +interacting with the API. This negotiation mechanism is intended to allow for easier +human based exploration of the API within a browser, or to allow documentation or +notes to link to a specific version+format. + +Servers that do not support this parameter may choose to return an error when it is +present, or they may simple ignore its presence. + +When a server does implement this parameter, it **SHOULD** take precedence over any +values in the client's ``Accept`` header, and if the server does not support the +requested format, it may choose to fall back to the ``Accept`` header, or choose any +of the error conditions that standard server-driven content negotiation typically +has (e.g. ``406 Not Available``, ``303 Multiple Choices``, or selecting a default +type to return). + + +Endpoint Configuration +^^^^^^^^^^^^^^^^^^^^^^ + +This option technically is not a special option at all, it is just a natural +consequence of using content negotiation and allowing servers to select which of the +available content types is their default. + +If a server is unwilling or unable to implement the server-driven content negotiation, +and would instead rather require users to explicitly configure their client to select +the version they want, then that is a supported configuration. + +To enable this, a server should make multiple endpoints (for instance, +``/simple/v1+html/`` and/or ``/simple/v1+json/``) for each version+format that they +wish to support. Under that endpoint, they can host a copy of their repository that +only supports one (or a subset) of the content-types. When a client makes a request +using the ``Accept`` header, the server can ignore it and return the content type +that corresponds to that endpoint. + +For clients that wish to require specific configuration, they can keep track of +which version+format a specific repository URL was configured for, and when making +a request to that server, emit an ``Accept`` header that *only* includes the correct +content type. + + +TUF Support - PEP 458 +--------------------- + +:pep:`458` requires that all API responses are hashable and that they can be uniquely +identified by a path relative to the repository root. For a Simple API repository, the +target path is the Root of our API (e.g. ``/simple/`` on PyPI). This creates +challenges when accessing the API using a TUF client instead of directly using a +standard HTTP client, as the TUF client cannot handle the fact that a target could +have multiple different representations that all hash differently. + +:pep:`458` does not specify what the target path should be for the Simple API, but +TUF requires that the target paths be "file-like", in other words, a path like +``simple/PROJECT/`` is not acceptable, because it technically points to a +directory. + +The saving grace is that the target path does not *have* to actually match the URL +being fetched from the Simple API, and it can just be a sigil that the fetching code +knows how to transform into the actual URL that needs to be fetched. This same thing +can hold true for other aspects of the actual HTTP request, such as the ``Accept`` +header. + +Ultimately figuring out how to map a directory to a filename is out of scope for this +spec (but it would be in scope for :pep:`458`), and this spec defers making a decision +about how exactly to represent this inside of :pep:`458` metadata. + +However, it appears that the current WIP branch against pip that attempts to implement +:pep:`458` is using a target path like ``simple/PROJECT/index.html``. This could be +modified to include the API version and serialization format using something like +``simple/PROJECT/vnd.pypi.simple.vN.FORMAT``. So the v1 HTML format would be +``simple/PROJECT/vnd.pypi.simple.v1.html`` and the v1 JSON format would be +``simple/PROJECT/vnd.pypi.simple.v1.json``. + +In this case, since ``text/html`` is an alias to ``application/vnd.pypi.simple.v1+html`` +when interacting through TUF, it likely will make the most sense to normalize to the +more explicit name. + +Likewise the ``latest`` metaversion should not be included in the targets, only +explicitly declared versions should be supported. + +Recommendations +--------------- + +This section is non-normative, and represents what the spec authors believe to be +the best default implementation decisions for something implementing this spec, but +it does **not** represent any sort of requirement to match these decisions. + +These decisions have been chosen to maximize the number of requests that can be +moved onto the newest version of an API, while maintaining the greatest amount +of compatibility. In addition, they've also tried to make using the API provide +guardrails that attempt to push clients into making the best choices it can. + +It is recommended that servers: + +- Support all 3 content types described in this spec, using server-driven + content negotiation, for as long as they reasonably can, or at least as + long as they're receiving non trivial traffic that uses the HTML responses. + +- When encountering an ``Accept`` header that does not contain any content types + that it knows how to work with, the server should not ever return a + ``300 Multiple Choice`` response, and instead return a ``406 Not Acceptable`` + response. + + - However, if choosing to use the endpoint configuration, you should prefer to + return a ``200 OK`` response in the expected content type for that endpoint. + +- When selecting an acceptable version, the server should choose the highest version + that the client supports, with the most expressive/featureful serialization format, + taking into account the specificity of the client requests as well as any + quality priority values they have expressed, and it should only use the + ``text/html`` content type as a last resort. + +It is recommended that clients: + +- Support all 3 content types described in this spec, using server-driven + content negotiation, for as long as they reasonably can. + +- When constructing an ``Accept`` header, include all of the content types + that you support. + + You should generally *not* include a quality priority value for your content + types, unless you have implementation specific reasons that you want the + server to take into account (for example, if you're using the standard library + HTML parser and you're worried that there may be some kinds of HTML responses + that you're unable to parse in some edge cases). + + The one exception to this recommendation is that it is recommended that you + *should* include a ``;q=0.01`` value on the legacy ``text/html`` content type, + unless it is the only content type that you are requesting. + +- Explicitly select what versions they are looking for, rather than using the + ``latest`` meta version during normal operation. + +- Check the ``Content-Type`` of the response and ensure it matches something + that you were expecting. + +.. _simple-repository-history: + +History +======= + +* September 2015: initial form of the HTML format, in :pep:`503` +* July 2016: Requires-Python metadata, in an update to :pep:`503` +* May 2019: "yank" support, in :pep:`592` +* July 2020: API versioning convention and metadata, and declaring the HTML + format as API v1, in :pep:`629` +* May 2021: providing package metadata independently from a package, in + :pep:`658` +* May 2022: initial form of the JSON format, with a mechanism for clients to + choose between them, and declaring both formats as API v1, in :pep:`691` +* October 2022: project versions and file size and upload-time in the JSON + format, in :pep:`700` +* June 2023: renaming the field which provides package metadata independently + from a package, in :pep:`714` +* November 2024: provenance metadata in the HTML and JSON formats, in :pep:`740` diff --git a/source/specifications/source-distribution-format.rst b/source/specifications/source-distribution-format.rst new file mode 100644 index 000000000..9ac93be7b --- /dev/null +++ b/source/specifications/source-distribution-format.rst @@ -0,0 +1,163 @@ + +.. _source-distribution-format: + +========================== +Source distribution format +========================== + +The current standard source distribution format is identified by the +presence of a :file:`pyproject.toml` file in the distribution archive. The layout +of such a distribution was originally specified in :pep:`517` and is formally +documented here. + +There is also the legacy source distribution format, implicitly defined by the +behaviour of ``distutils`` module in the standard library, when executing +:command:`setup.py sdist`. This document does not attempt to standardise this +format, except to note that if a legacy source distribution contains a +``PKG-INFO`` file using metadata version 2.2 or later, then it MUST follow +the rules applicable to source distributions defined in the metadata +specification. + +Source distributions are also known as *sdists* for short. + +.. _source-distribution-format-source-tree: + +Source trees +============ + +A *source tree* is a collection of files and directories -- like a version +control system checkout -- which contains a :file:`pyproject.toml` file that +can be used to build a source distribution from the contained files and +directories. :pep:`517` and :pep:`518` specify what is required to meet the +definition of what :file:`pyproject.toml` must contain for something to be +deemed a source tree. + +.. _source-distribution-format-sdist: + +Source distribution file name +============================= + +The file name of a sdist was standardised in :pep:`625`. The file name must be in +the form ``{name}-{version}.tar.gz``, where ``{name}`` is normalised according to +the same rules as for binary distributions (see :ref:`binary-distribution-format`), +and ``{version}`` is the canonicalized form of the project version (see +:ref:`version-specifiers`). + +The name and version components of the filename MUST match the values stored +in the metadata contained in the file. + +Code that produces a source distribution file MUST give the file a name that matches +this specification. This includes the ``build_sdist`` hook of a +:term:`build backend `. + +Code that processes source distribution files MAY recognise source distribution files +by the ``.tar.gz`` suffix and the presence of precisely *one* hyphen in the filename. +Code that does this may then use the distribution name and version from the filename +without further verification. + +Source distribution file format +=============================== + +A ``.tar.gz`` source distribution (sdist) contains a single top-level directory +called ``{name}-{version}`` (e.g. ``foo-1.0``), containing the source files of +the package. The name and version MUST match the metadata stored in the file. +This directory must also contain a :file:`pyproject.toml` in the format defined in +:ref:`pyproject-toml-spec`, and a :file:`PKG-INFO` file containing +metadata in the format described in the :ref:`core-metadata` specification. The +metadata MUST conform to at least version 2.2 of the metadata specification. + +If the metadata version is 2.4 or greater, the source distribution MUST contain +any license files specified by the ``License-File`` field in the :file:`PKG-INFO` +at their respective paths relative to the root directory of the sdist +(containing the :file:`pyproject.toml` and the :file:`PKG-INFO` metadata). + +No other content of a sdist is required or defined. Build systems can store +whatever information they need in the sdist to build the project. + +The tarball should use the modern POSIX.1-2001 pax tar format, which specifies +UTF-8 based file names. In particular, source distribution files must be readable +using the standard library tarfile module with the open flag 'r:gz'. + + +.. _sdist-archive-features: + +Source distribution archive features +==================================== + +Because extracting tar files as-is is dangerous, and the results are +platform-specific, archive features of source distributions are limited. + +Unpacking with the data filter +------------------------------ + +When extracting a source distribution, tools MUST either use +:py:func:`tarfile.data_filter` (e.g. :py:meth:`TarFile.extractall(..., filter='data') `), OR +follow the *Unpacking without the data filter* section below. + +As an exception, on Python interpreters without :py:func:`hasattr(tarfile, 'data_filter') ` +(:pep:`706`), tools that normally use that filter (directly on indirectly) +MAY warn the user and ignore this specification. +The trade-off between usability (e.g. fully trusting the archive) and +security (e.g. refusing to unpack) is left up to the tool in this case. + + +Unpacking without the data filter +--------------------------------- + +Tools that do not use the ``data`` filter directly (e.g. for backwards +compatibility, allowing additional features, or not using Python) MUST follow +this section. +(At the time of this writing, the ``data`` filter also follows this section, +but it may get out of sync in the future.) + +The following files are invalid in an *sdist* archive. +Upon encountering such an entry, tools SHOULD notify the user, +MUST NOT unpack the entry, and MAY abort with a failure: + +- Files that would be placed outside the destination directory. +- Links (symbolic or hard) pointing outside the destination directory. +- Device files (including pipes). + +The following are also invalid. Tools MAY treat them as above, +but are NOT REQUIRED to do so: + +- Files with a ``..`` component in the filename or link target. +- Links pointing to a file that is not part of the archive. + +Tools MAY unpack links (symbolic or hard) as regular files, +using content from the archive. + +When extracting *sdist* archives: + +- Leading slashes in file names MUST be dropped. + (This is nowadays standard behaviour for ``tar`` unpacking.) +- For each ``mode`` (Unix permission) bit, tools MUST either: + + - use the platform's default for a new file/directory (respectively), + - set the bit according to the archive, or + - use the bit from ``rw-r--r--`` (``0o644``) for non-executable files or + ``rwxr-xr-x`` (``0o755``) for executable files and directories. + +- High ``mode`` bits (setuid, setgid, sticky) MUST be cleared. +- It is RECOMMENDED to preserve the user *executable* bit. + + +Further hints +------------- + +Tool authors are encouraged to consider how *hints for further +verification* in ``tarfile`` documentation apply to their tool. + + +History +======= + +* November 2020: The original version of this specification was approved through + :pep:`643`. +* July 2021: Defined what a source tree is. +* September 2022: The filename of a source distribution was standardized through + :pep:`625`. +* August 2023: Source distribution archive features were standardized through + :pep:`721`. +* December 2024: License files inclusion into source distribution was standardized + through :pep:`639`. diff --git a/source/specifications/version-specifiers.rst b/source/specifications/version-specifiers.rst new file mode 100644 index 000000000..c0b544160 --- /dev/null +++ b/source/specifications/version-specifiers.rst @@ -0,0 +1,1273 @@ +.. highlight:: text + +.. _version-specifiers: + +================== +Version specifiers +================== + + +This specification describes a scheme for identifying versions of Python software +distributions, and declaring dependencies on particular versions. + + +Definitions +=========== + +The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", +"SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this +document are to be interpreted as described in :rfc:`2119`. + +"Build tools" are automated tools intended to run on development systems, +producing source and binary distribution archives. Build tools may also be +invoked by integration tools in order to build software distributed as +sdists rather than prebuilt binary archives. + +"Index servers" are active distribution registries which publish version and +dependency metadata and place constraints on the permitted metadata. + +"Publication tools" are automated tools intended to run on development +systems and upload source and binary distribution archives to index servers. + +"Installation tools" are integration tools specifically intended to run on +deployment targets, consuming source and binary distribution archives from +an index server or other designated location and deploying them to the target +system. + +"Automated tools" is a collective term covering build tools, index servers, +publication tools, integration tools and any other software that produces +or consumes distribution version and dependency metadata. + + +Version scheme +============== + +Distributions are identified by a public version identifier which +supports all defined version comparison operations + +The version scheme is used both to describe the distribution version +provided by a particular distribution archive, as well as to place +constraints on the version of dependencies needed in order to build or +run the software. + + +.. _public-version-identifiers: + +Public version identifiers +-------------------------- + +The canonical public version identifiers MUST comply with the following +scheme:: + + [N!]N(.N)*[{a|b|rc}N][.postN][.devN] + +Public version identifiers MUST NOT include leading or trailing whitespace. + +Public version identifiers MUST be unique within a given distribution. + +Installation tools SHOULD ignore any public versions which do not comply with +this scheme but MUST also include the normalizations specified below. +Installation tools MAY warn the user when non-compliant or ambiguous versions +are detected. + +See also :ref:`version-specifiers-regex` which provides a regular +expression to check strict conformance with the canonical format, as +well as a more permissive regular expression accepting inputs that may +require subsequent normalization. + +Public version identifiers are separated into up to five segments: + +* Epoch segment: ``N!`` +* Release segment: ``N(.N)*`` +* Pre-release segment: ``{a|b|rc}N`` +* Post-release segment: ``.postN`` +* Development release segment: ``.devN`` + +Any given release will be a "final release", "pre-release", "post-release" or +"developmental release" as defined in the following sections. + +All numeric components MUST be non-negative integers represented as sequences +of ASCII digits. + +All numeric components MUST be interpreted and ordered according to their +numeric value, not as text strings. + +All numeric components MAY be zero. Except as described below for the +release segment, a numeric component of zero has no special significance +aside from always being the lowest possible value in the version ordering. + +.. note:: + + Some hard to read version identifiers are permitted by this scheme in + order to better accommodate the wide range of versioning practices + across existing public and private Python projects. + + Accordingly, some of the versioning practices which are technically + permitted by the specification are strongly discouraged for new projects. Where + this is the case, the relevant details are noted in the following + sections. + + +.. _local-version-identifiers: + +Local version identifiers +------------------------- + +Local version identifiers MUST comply with the following scheme:: + + [+] + +They consist of a normal public version identifier (as defined in the +previous section), along with an arbitrary "local version label", separated +from the public version identifier by a plus. Local version labels have +no specific semantics assigned, but some syntactic restrictions are imposed. + +Local version identifiers are used to denote fully API (and, if applicable, +ABI) compatible patched versions of upstream projects. For example, these +may be created by application developers and system integrators by applying +specific backported bug fixes when upgrading to a new upstream release would +be too disruptive to the application or other integrated system (such as a +Linux distribution). + +The inclusion of the local version label makes it possible to differentiate +upstream releases from potentially altered rebuilds by downstream +integrators. The use of a local version identifier does not affect the kind +of a release but, when applied to a source distribution, does indicate that +it may not contain the exact same code as the corresponding upstream release. + +To ensure local version identifiers can be readily incorporated as part of +filenames and URLs, and to avoid formatting inconsistencies in hexadecimal +hash representations, local version labels MUST be limited to the following +set of permitted characters: + +* ASCII letters (``[a-zA-Z]``) +* ASCII digits (``[0-9]``) +* periods (``.``) + +Local version labels MUST start and end with an ASCII letter or digit. + +Comparison and ordering of local versions considers each segment of the local +version (divided by a ``.``) separately. If a segment consists entirely of +ASCII digits then that section should be considered an integer for comparison +purposes and if a segment contains any ASCII letters then that segment is +compared lexicographically with case insensitivity. When comparing a numeric +and lexicographic segment, the numeric section always compares as greater than +the lexicographic segment. Additionally a local version with a great number of +segments will always compare as greater than a local version with fewer +segments, as long as the shorter local version's segments match the beginning +of the longer local version's segments exactly. + +An "upstream project" is a project that defines its own public versions. A +"downstream project" is one which tracks and redistributes an upstream project, +potentially backporting security and bug fixes from later versions of the +upstream project. + +Local version identifiers SHOULD NOT be used when publishing upstream +projects to a public index server, but MAY be used to identify private +builds created directly from the project source. Local +version identifiers SHOULD be used by downstream projects when releasing a +version that is API compatible with the version of the upstream project +identified by the public version identifier, but contains additional changes +(such as bug fixes). As the Python Package Index is intended solely for +indexing and hosting upstream projects, it MUST NOT allow the use of local +version identifiers. + +Source distributions using a local version identifier SHOULD provide the +``python.integrator`` extension metadata (as defined in :pep:`459`). + + +Final releases +-------------- + +A version identifier that consists solely of a release segment and optionally +an epoch identifier is termed a "final release". + +The release segment consists of one or more non-negative integer +values, separated by dots:: + + N(.N)* + +Final releases within a project MUST be numbered in a consistently +increasing fashion, otherwise automated tools will not be able to upgrade +them correctly. + +Comparison and ordering of release segments considers the numeric value +of each component of the release segment in turn. When comparing release +segments with different numbers of components, the shorter segment is +padded out with additional zeros as necessary. + +While any number of additional components after the first are permitted +under this scheme, the most common variants are to use two components +("major.minor") or three components ("major.minor.micro"). + +For example:: + + 0.9 + 0.9.1 + 0.9.2 + ... + 0.9.10 + 0.9.11 + 1.0 + 1.0.1 + 1.1 + 2.0 + 2.0.1 + ... + +A release series is any set of final release numbers that start with a +common prefix. For example, ``3.3.1``, ``3.3.5`` and ``3.3.9.45`` are all +part of the ``3.3`` release series. + +.. note:: + + ``X.Y`` and ``X.Y.0`` are not considered distinct release numbers, as + the release segment comparison rules implicit expand the two component + form to ``X.Y.0`` when comparing it to any release segment that includes + three components. + +Date-based release segments are also permitted. An example of a date-based +release scheme using the year and month of the release:: + + 2012.4 + 2012.7 + 2012.10 + 2013.1 + 2013.6 + ... + + +.. _pre-release-versions: + +Pre-releases +------------ + +Some projects use an "alpha, beta, release candidate" pre-release cycle to +support testing by their users prior to a final release. + +If used as part of a project's development cycle, these pre-releases are +indicated by including a pre-release segment in the version identifier:: + + X.YaN # Alpha release + X.YbN # Beta release + X.YrcN # Release Candidate + X.Y # Final release + +A version identifier that consists solely of a release segment and a +pre-release segment is termed a "pre-release". + +The pre-release segment consists of an alphabetical identifier for the +pre-release phase, along with a non-negative integer value. Pre-releases for +a given release are ordered first by phase (alpha, beta, release candidate) +and then by the numerical component within that phase. + +Installation tools MAY accept both ``c`` and ``rc`` releases for a common +release segment in order to handle some existing legacy distributions. + +Installation tools SHOULD interpret ``c`` versions as being equivalent to +``rc`` versions (that is, ``c1`` indicates the same version as ``rc1``). + +Build tools, publication tools and index servers SHOULD disallow the creation +of both ``rc`` and ``c`` releases for a common release segment. + + +Post-releases +------------- + +Some projects use post-releases to address minor errors in a final release +that do not affect the distributed software (for example, correcting an error +in the release notes). + +If used as part of a project's development cycle, these post-releases are +indicated by including a post-release segment in the version identifier:: + + X.Y.postN # Post-release + +A version identifier that includes a post-release segment without a +developmental release segment is termed a "post-release". + +The post-release segment consists of the string ``.post``, followed by a +non-negative integer value. Post-releases are ordered by their +numerical component, immediately following the corresponding release, +and ahead of any subsequent release. + +.. note:: + + The use of post-releases to publish maintenance releases containing + actual bug fixes is strongly discouraged. In general, it is better + to use a longer release number and increment the final component + for each maintenance release. + +Post-releases are also permitted for pre-releases:: + + X.YaN.postM # Post-release of an alpha release + X.YbN.postM # Post-release of a beta release + X.YrcN.postM # Post-release of a release candidate + +.. note:: + + Creating post-releases of pre-releases is strongly discouraged, as + it makes the version identifier difficult to parse for human readers. + In general, it is substantially clearer to simply create a new + pre-release by incrementing the numeric component. + + +Developmental releases +---------------------- + +Some projects make regular developmental releases, and system packagers +(especially for Linux distributions) may wish to create early releases +directly from source control which do not conflict with later project +releases. + +If used as part of a project's development cycle, these developmental +releases are indicated by including a developmental release segment in the +version identifier:: + + X.Y.devN # Developmental release + +A version identifier that includes a developmental release segment is +termed a "developmental release". + +The developmental release segment consists of the string ``.dev``, +followed by a non-negative integer value. Developmental releases are ordered +by their numerical component, immediately before the corresponding release +(and before any pre-releases with the same release segment), and following +any previous release (including any post-releases). + +Developmental releases are also permitted for pre-releases and +post-releases:: + + X.YaN.devM # Developmental release of an alpha release + X.YbN.devM # Developmental release of a beta release + X.YrcN.devM # Developmental release of a release candidate + X.Y.postN.devM # Developmental release of a post-release + +Do note that development releases are considered a type of pre-release when +handling them. + +.. note:: + + While they may be useful for continuous integration purposes, publishing + developmental releases of pre-releases to general purpose public index + servers is strongly discouraged, as it makes the version identifier + difficult to parse for human readers. If such a release needs to be + published, it is substantially clearer to instead create a new + pre-release by incrementing the numeric component. + + Developmental releases of post-releases are also strongly discouraged, + but they may be appropriate for projects which use the post-release + notation for full maintenance releases which may include code changes. + + +Version epochs +-------------- + +If included in a version identifier, the epoch appears before all other +components, separated from the release segment by an exclamation mark:: + + E!X.Y # Version identifier with epoch + +If no explicit epoch is given, the implicit epoch is ``0``. + +Most version identifiers will not include an epoch, as an explicit epoch is +only needed if a project *changes* the way it handles version numbering in +a way that means the normal version ordering rules will give the wrong +answer. For example, if a project is using date based versions like +``2014.04`` and would like to switch to semantic versions like ``1.0``, then +the new releases would be identified as *older* than the date based releases +when using the normal sorting scheme:: + + 1.0 + 1.1 + 2.0 + 2013.10 + 2014.04 + +However, by specifying an explicit epoch, the sort order can be changed +appropriately, as all versions from a later epoch are sorted after versions +from an earlier epoch:: + + 2013.10 + 2014.04 + 1!1.0 + 1!1.1 + 1!2.0 + + +.. _version-specifiers-normalization: + +Normalization +------------- + +In order to maintain better compatibility with existing versions there are a +number of "alternative" syntaxes that MUST be taken into account when parsing +versions. These syntaxes MUST be considered when parsing a version, however +they should be "normalized" to the standard syntax defined above. + + +Case sensitivity +~~~~~~~~~~~~~~~~ + +All ascii letters should be interpreted case insensitively within a version and +the normal form is lowercase. This allows versions such as ``1.1RC1`` which +would be normalized to ``1.1rc1``. + + +Integer Normalization +~~~~~~~~~~~~~~~~~~~~~ + +All integers are interpreted via the ``int()`` built in and normalize to the +string form of the output. This means that an integer version of ``00`` would +normalize to ``0`` while ``09000`` would normalize to ``9000``. This does not +hold true for integers inside of an alphanumeric segment of a local version +such as ``1.0+foo0100`` which is already in its normalized form. + + +Pre-release separators +~~~~~~~~~~~~~~~~~~~~~~ + +Pre-releases should allow a ``.``, ``-``, or ``_`` separator between the +release segment and the pre-release segment. The normal form for this is +without a separator. This allows versions such as ``1.1.a1`` or ``1.1-a1`` +which would be normalized to ``1.1a1``. It should also allow a separator to +be used between the pre-release signifier and the numeral. This allows versions +such as ``1.0a.1`` which would be normalized to ``1.0a1``. + + +Pre-release spelling +~~~~~~~~~~~~~~~~~~~~ + +Pre-releases allow the additional spellings of ``alpha``, ``beta``, ``c``, +``pre``, and ``preview`` for ``a``, ``b``, ``rc``, ``rc``, and ``rc`` +respectively. This allows versions such as ``1.1alpha1``, ``1.1beta2``, or +``1.1c3`` which normalize to ``1.1a1``, ``1.1b2``, and ``1.1rc3``. In every +case the additional spelling should be considered equivalent to their normal +forms. + + +Implicit pre-release number +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Pre releases allow omitting the numeral in which case it is implicitly assumed +to be ``0``. The normal form for this is to include the ``0`` explicitly. This +allows versions such as ``1.2a`` which is normalized to ``1.2a0``. + + +Post release separators +~~~~~~~~~~~~~~~~~~~~~~~ + +Post releases allow a ``.``, ``-``, or ``_`` separator as well as omitting the +separator all together. The normal form of this is with the ``.`` separator. +This allows versions such as ``1.2-post2`` or ``1.2post2`` which normalize to +``1.2.post2``. Like the pre-release separator this also allows an optional +separator between the post release signifier and the numeral. This allows +versions like ``1.2.post-2`` which would normalize to ``1.2.post2``. + + +Post release spelling +~~~~~~~~~~~~~~~~~~~~~ + +Post-releases allow the additional spellings of ``rev`` and ``r``. This allows +versions such as ``1.0-r4`` which normalizes to ``1.0.post4``. As with the +pre-releases the additional spellings should be considered equivalent to their +normal forms. + + +Implicit post release number +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Post releases allow omitting the numeral in which case it is implicitly assumed +to be ``0``. The normal form for this is to include the ``0`` explicitly. This +allows versions such as ``1.2.post`` which is normalized to ``1.2.post0``. + + +Implicit post releases +~~~~~~~~~~~~~~~~~~~~~~ + +Post releases allow omitting the ``post`` signifier all together. When using +this form the separator MUST be ``-`` and no other form is allowed. This allows +versions such as ``1.0-1`` to be normalized to ``1.0.post1``. This particular +normalization MUST NOT be used in conjunction with the implicit post release +number rule. In other words, ``1.0-`` is *not* a valid version and it does *not* +normalize to ``1.0.post0``. + + +Development release separators +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Development releases allow a ``.``, ``-``, or a ``_`` separator as well as +omitting the separator all together. The normal form of this is with the ``.`` +separator. This allows versions such as ``1.2-dev2`` or ``1.2dev2`` which +normalize to ``1.2.dev2``. + + +Implicit development release number +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Development releases allow omitting the numeral in which case it is implicitly +assumed to be ``0``. The normal form for this is to include the ``0`` +explicitly. This allows versions such as ``1.2.dev`` which is normalized to +``1.2.dev0``. + + +Local version segments +~~~~~~~~~~~~~~~~~~~~~~ + +With a local version, in addition to the use of ``.`` as a separator of +segments, the use of ``-`` and ``_`` is also acceptable. The normal form is +using the ``.`` character. This allows versions such as ``1.0+ubuntu-1`` to be +normalized to ``1.0+ubuntu.1``. + + +Preceding v character +~~~~~~~~~~~~~~~~~~~~~ + +In order to support the common version notation of ``v1.0`` versions may be +preceded by a single literal ``v`` character. This character MUST be ignored +for all purposes and should be omitted from all normalized forms of the +version. The same version with and without the ``v`` is considered equivalent. + + +Leading and Trailing Whitespace +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Leading and trailing whitespace must be silently ignored and removed from all +normalized forms of a version. This includes ``" "``, ``\t``, ``\n``, ``\r``, +``\f``, and ``\v``. This allows accidental whitespace to be handled sensibly, +such as a version like ``1.0\n`` which normalizes to ``1.0``. + + +Examples of compliant version schemes +------------------------------------- + +The standard version scheme is designed to encompass a wide range of +identification practices across public and private Python projects. In +practice, a single project attempting to use the full flexibility offered +by the scheme would create a situation where human users had difficulty +figuring out the relative order of versions, even though the rules above +ensure all compliant tools will order them consistently. + +The following examples illustrate a small selection of the different +approaches projects may choose to identify their releases, while still +ensuring that the "latest release" and the "latest stable release" can +be easily determined, both by human users and automated tools. + +Simple "major.minor" versioning:: + + 0.1 + 0.2 + 0.3 + 1.0 + 1.1 + ... + +Simple "major.minor.micro" versioning:: + + 1.1.0 + 1.1.1 + 1.1.2 + 1.2.0 + ... + +"major.minor" versioning with alpha, beta and candidate +pre-releases:: + + 0.9 + 1.0a1 + 1.0a2 + 1.0b1 + 1.0rc1 + 1.0 + 1.1a1 + ... + +"major.minor" versioning with developmental releases, release candidates +and post-releases for minor corrections:: + + 0.9 + 1.0.dev1 + 1.0.dev2 + 1.0.dev3 + 1.0.dev4 + 1.0c1 + 1.0c2 + 1.0 + 1.0.post1 + 1.1.dev1 + ... + +Date based releases, using an incrementing serial within each year, skipping +zero:: + + 2012.1 + 2012.2 + 2012.3 + ... + 2012.15 + 2013.1 + 2013.2 + ... + + +Summary of permitted suffixes and relative ordering +--------------------------------------------------- + +.. note:: + + This section is intended primarily for authors of tools that + automatically process distribution metadata, rather than developers + of Python distributions deciding on a versioning scheme. + +The epoch segment of version identifiers MUST be sorted according to the +numeric value of the given epoch. If no epoch segment is present, the +implicit numeric value is ``0``. + +The release segment of version identifiers MUST be sorted in +the same order as Python's tuple sorting when the normalized release segment is +parsed as follows:: + + tuple(map(int, release_segment.split("."))) + +All release segments involved in the comparison MUST be converted to a +consistent length by padding shorter segments with zeros as needed. + +Within a numeric release (``1.0``, ``2.7.3``), the following suffixes +are permitted and MUST be ordered as shown:: + + .devN, aN, bN, rcN, , .postN + +Note that ``c`` is considered to be semantically equivalent to ``rc`` and must +be sorted as if it were ``rc``. Tools MAY reject the case of having the same +``N`` for both a ``c`` and a ``rc`` in the same release segment as ambiguous +and remain in compliance with the specification. + +Within an alpha (``1.0a1``), beta (``1.0b1``), or release candidate +(``1.0rc1``, ``1.0c1``), the following suffixes are permitted and MUST be +ordered as shown:: + + .devN, , .postN + +Within a post-release (``1.0.post1``), the following suffixes are permitted +and MUST be ordered as shown:: + + .devN, + +Note that ``devN`` and ``postN`` MUST always be preceded by a dot, even +when used immediately following a numeric version (e.g. ``1.0.dev456``, +``1.0.post1``). + +Within a pre-release, post-release or development release segment with a +shared prefix, ordering MUST be by the value of the numeric component. + +The following example covers many of the possible combinations:: + + 1.dev0 + 1.0.dev456 + 1.0a1 + 1.0a2.dev456 + 1.0a12.dev456 + 1.0a12 + 1.0b1.dev456 + 1.0b2 + 1.0b2.post345.dev456 + 1.0b2.post345 + 1.0rc1.dev456 + 1.0rc1 + 1.0 + 1.0+abc.5 + 1.0+abc.7 + 1.0+5 + 1.0.post456.dev34 + 1.0.post456 + 1.0.15 + 1.1.dev1 + + +Version ordering across different metadata versions +--------------------------------------------------- + +Metadata v1.0 (:pep:`241`) and metadata v1.1 (:pep:`314`) do not specify a standard +version identification or ordering scheme. However metadata v1.2 (:pep:`345`) +does specify a scheme which is defined in :pep:`386`. + +Due to the nature of the simple installer API it is not possible for an +installer to be aware of which metadata version a particular distribution was +using. Additionally installers required the ability to create a reasonably +prioritized list that includes all, or as many as possible, versions of +a project to determine which versions it should install. These requirements +necessitate a standardization across one parsing mechanism to be used for all +versions of a project. + +Due to the above, this specification MUST be used for all versions of metadata and +supersedes :pep:`386` even for metadata v1.2. Tools SHOULD ignore any versions +which cannot be parsed by the rules in this specification, but MAY fall back to +implementation defined version parsing and ordering schemes if no versions +complying with this specification are available. + +Distribution users may wish to explicitly remove non-compliant versions from +any private package indexes they control. + + +Compatibility with other version schemes +---------------------------------------- + +Some projects may choose to use a version scheme which requires +translation in order to comply with the public version scheme defined in +this specification. In such cases, the project specific version can be stored in the +metadata while the translated public version is published in the version field. + +This allows automated distribution tools to provide consistently correct +ordering of published releases, while still allowing developers to use +the internal versioning scheme they prefer for their projects. + + +Semantic versioning +~~~~~~~~~~~~~~~~~~~ + +`Semantic versioning`_ is a popular version identification scheme that is +more prescriptive than this specification regarding the significance of different +elements of a release number. Even if a project chooses not to abide by +the details of semantic versioning, the scheme is worth understanding as +it covers many of the issues that can arise when depending on other +distributions, and when publishing a distribution that others rely on. + +The "Major.Minor.Patch" (described in this specification as "major.minor.micro") +aspects of semantic versioning (clauses 1-8 in the 2.0.0 specification) +are fully compatible with the version scheme defined in this specification, and abiding +by these aspects is encouraged. + +Semantic versions containing a hyphen (pre-releases - clause 10) or a +plus sign (builds - clause 11) are *not* compatible with this specification +and are not permitted in the public version field. + +One possible mechanism to translate such semantic versioning based source +labels to compatible public versions is to use the ``.devN`` suffix to +specify the appropriate version order. + +Specific build information may also be included in local version labels. + +.. _Semantic versioning: https://semver.org/ + + +DVCS based version labels +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Many build tools integrate with distributed version control systems like +Git and Mercurial in order to add an identifying hash to the version +identifier. As hashes cannot be ordered reliably such versions are not +permitted in the public version field. + +As with semantic versioning, the public ``.devN`` suffix may be used to +uniquely identify such releases for publication, while the original DVCS based +label can be stored in the project metadata. + +Identifying hash information may also be included in local version labels. + + +Olson database versioning +~~~~~~~~~~~~~~~~~~~~~~~~~ + +The ``pytz`` project inherits its versioning scheme from the corresponding +Olson timezone database versioning scheme: the year followed by a lowercase +character indicating the version of the database within that year. + +This can be translated to a compliant public version identifier as +``.``, where the serial starts at zero or one (for the +'a' release) and is incremented with each subsequent database +update within the year. + +As with other translated version identifiers, the corresponding Olson +database version could be recorded in the project metadata. + + +Version specifiers +================== + +A version specifier consists of a series of version clauses, separated by +commas. For example:: + + ~= 0.9, >= 1.0, != 1.3.4.*, < 2.0 + +The comparison operator determines the kind of version clause: + +* ``~=``: `Compatible release`_ clause +* ``==``: `Version matching`_ clause +* ``!=``: `Version exclusion`_ clause +* ``<=``, ``>=``: `Inclusive ordered comparison`_ clause +* ``<``, ``>``: `Exclusive ordered comparison`_ clause +* ``===``: `Arbitrary equality`_ clause. + +The comma (",") is equivalent to a logical **and** operator: a candidate +version must match all given version clauses in order to match the +specifier as a whole. + +Whitespace between a conditional operator and the following version +identifier is optional, as is the whitespace around the commas. + +When multiple candidate versions match a version specifier, the preferred +version SHOULD be the latest version as determined by the consistent +ordering defined by the standard `Version scheme`_. Whether or not +pre-releases are considered as candidate versions SHOULD be handled as +described in `Handling of pre-releases`_. + +Except where specifically noted below, local version identifiers MUST NOT be +permitted in version specifiers, and local version labels MUST be ignored +entirely when checking if candidate versions match a given version +specifier. + + +.. _version-specifiers-compatible-release: + +Compatible release +------------------ + +A compatible release clause consists of the compatible release operator ``~=`` +and a version identifier. It matches any candidate version that is expected +to be compatible with the specified version. + +The specified version identifier must be in the standard format described in +`Version scheme`_. Local version identifiers are NOT permitted in this +version specifier. + +For a given release identifier ``V.N``, the compatible release clause is +approximately equivalent to the pair of comparison clauses:: + + >= V.N, == V.* + +This operator MUST NOT be used with a single segment version number such as +``~=1``. + +For example, the following groups of version clauses are equivalent:: + + ~= 2.2 + >= 2.2, == 2.* + + ~= 1.4.5 + >= 1.4.5, == 1.4.* + +If a pre-release, post-release or developmental release is named in a +compatible release clause as ``V.N.suffix``, then the suffix is ignored +when determining the required prefix match:: + + ~= 2.2.post3 + >= 2.2.post3, == 2.* + + ~= 1.4.5a4 + >= 1.4.5a4, == 1.4.* + +The padding rules for release segment comparisons means that the assumed +degree of forward compatibility in a compatible release clause can be +controlled by appending additional zeros to the version specifier:: + + ~= 2.2.0 + >= 2.2.0, == 2.2.* + + ~= 1.4.5.0 + >= 1.4.5.0, == 1.4.5.* + + +Version matching +---------------- + +A version matching clause includes the version matching operator ``==`` +and a version identifier. + +The specified version identifier must be in the standard format described in +`Version scheme`_, but a trailing ``.*`` is permitted on public version +identifiers as described below. + +By default, the version matching operator is based on a strict equality +comparison: the specified version must be exactly the same as the requested +version. The *only* substitution performed is the zero padding of the +release segment to ensure the release segments are compared with the same +length. + +Whether or not strict version matching is appropriate depends on the specific +use case for the version specifier. Automated tools SHOULD at least issue +warnings and MAY reject them entirely when strict version matches are used +inappropriately. + +Prefix matching may be requested instead of strict comparison, by appending +a trailing ``.*`` to the version identifier in the version matching clause. +This means that additional trailing segments will be ignored when +determining whether or not a version identifier matches the clause. If the +specified version includes only a release segment, then trailing components +(or the lack thereof) in the release segment are also ignored. + +For example, given the version ``1.1.post1``, the following clauses would +match or not as shown:: + + == 1.1 # Not equal, so 1.1.post1 does not match clause + == 1.1.post1 # Equal, so 1.1.post1 matches clause + == 1.1.* # Same prefix, so 1.1.post1 matches clause + +For purposes of prefix matching, the pre-release segment is considered to +have an implied preceding ``.``, so given the version ``1.1a1``, the +following clauses would match or not as shown:: + + == 1.1 # Not equal, so 1.1a1 does not match clause + == 1.1a1 # Equal, so 1.1a1 matches clause + == 1.1.* # Same prefix, so 1.1a1 matches clause if pre-releases are requested + +An exact match is also considered a prefix match (this interpretation is +implied by the usual zero padding rules for the release segment of version +identifiers). Given the version ``1.1``, the following clauses would +match or not as shown:: + + == 1.1 # Equal, so 1.1 matches clause + == 1.1.0 # Zero padding expands 1.1 to 1.1.0, so it matches clause + == 1.1.dev1 # Not equal (dev-release), so 1.1 does not match clause + == 1.1a1 # Not equal (pre-release), so 1.1 does not match clause + == 1.1.post1 # Not equal (post-release), so 1.1 does not match clause + == 1.1.* # Same prefix, so 1.1 matches clause + +It is invalid to have a prefix match containing a development or local release +such as ``1.0.dev1.*`` or ``1.0+foo1.*``. If present, the development release +segment is always the final segment in the public version, and the local version +is ignored for comparison purposes, so using either in a prefix match wouldn't +make any sense. + +The use of ``==`` (without at least the wildcard suffix) when defining +dependencies for published distributions is strongly discouraged as it +greatly complicates the deployment of security fixes. The strict version +comparison operator is intended primarily for use when defining +dependencies for repeatable *deployments of applications* while using +a shared distribution index. + +If the specified version identifier is a public version identifier (no +local version label), then the local version label of any candidate versions +MUST be ignored when matching versions. + +If the specified version identifier is a local version identifier, then the +local version labels of candidate versions MUST be considered when matching +versions, with the public version identifier being matched as described +above, and the local version label being checked for equivalence using a +strict string equality comparison. + + +Version exclusion +----------------- + +A version exclusion clause includes the version exclusion operator ``!=`` +and a version identifier. + +The allowed version identifiers and comparison semantics are the same as +those of the `Version matching`_ operator, except that the sense of any +match is inverted. + +For example, given the version ``1.1.post1``, the following clauses would +match or not as shown:: + + != 1.1 # Not equal, so 1.1.post1 matches clause + != 1.1.post1 # Equal, so 1.1.post1 does not match clause + != 1.1.* # Same prefix, so 1.1.post1 does not match clause + + +Inclusive ordered comparison +---------------------------- + +An inclusive ordered comparison clause includes a comparison operator and a +version identifier, and will match any version where the comparison is correct +based on the relative position of the candidate version and the specified +version given the consistent ordering defined by the standard +`Version scheme`_. + +The inclusive ordered comparison operators are ``<=`` and ``>=``. + +As with version matching, the release segment is zero padded as necessary to +ensure the release segments are compared with the same length. + +Local version identifiers are NOT permitted in this version specifier. + + +Exclusive ordered comparison +---------------------------- + +The exclusive ordered comparisons ``>`` and ``<`` are similar to the inclusive +ordered comparisons in that they rely on the relative position of the candidate +version and the specified version given the consistent ordering defined by the +standard `Version scheme`_. However, they specifically exclude pre-releases, +post-releases, and local versions of the specified version. + +The exclusive ordered comparison ``>V`` **MUST NOT** allow a post-release +of the given version unless ``V`` itself is a post release. You may mandate +that releases are later than a particular post release, including additional +post releases, by using ``>V.postN``. For example, ``>1.7`` will allow +``1.7.1`` but not ``1.7.0.post1`` and ``>1.7.post2`` will allow ``1.7.1`` +and ``1.7.0.post3`` but not ``1.7.0``. + +The exclusive ordered comparison ``>V`` **MUST NOT** match a local version of +the specified version. + +The exclusive ordered comparison ``=`` entry as +part of the URL fragment. + +For version control references, the ``VCS+protocol`` scheme SHOULD be +used to identify both the version control system and the secure transport, +and a version control system with hash based commit identifiers SHOULD be +used. Automated tools MAY omit warnings about missing hashes for version +control systems that do not provide hash based commit identifiers. + +To handle version control systems that do not support including commit or +tag references directly in the URL, that information may be appended to the +end of the URL using the ``@`` or the ``@#`` +notation. + +.. note:: + + This isn't *quite* the same as the existing VCS reference notation + supported by pip. Firstly, the distribution name is moved in front rather + than embedded as part of the URL. Secondly, the commit hash is included + even when retrieving based on a tag, in order to meet the requirement + above that *every* link should include a hash to make things harder to + forge (creating a malicious repo with a particular tag is easy, creating + one with a specific *hash*, less so). + +Remote URL examples:: + + pip @ https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686 + pip @ git+https://github.com/pypa/pip.git@7921be1537eac1e97bc40179a57f0349c2aee67d + pip @ git+https://github.com/pypa/pip.git@1.3.1#7921be1537eac1e97bc40179a57f0349c2aee67d + + +File URLs +--------- + +File URLs take the form of ``file:///``. If the ```` is +omitted it is assumed to be ``localhost`` and even if the ```` is omitted +the third slash MUST still exist. The ```` defines what the file path on +the filesystem that is to be accessed. + +On the various \*nix operating systems the only allowed values for ```` +is for it to be omitted, ``localhost``, or another FQDN that the current +machine believes matches its own host. In other words, on \*nix the ``file://`` +scheme can only be used to access paths on the local machine. + +On Windows the file format should include the drive letter if applicable as +part of the ```` (e.g. ``file:///c:/path/to/a/file``). Unlike \*nix on +Windows the ```` parameter may be used to specify a file residing on a +network share. In other words, in order to translate ``\\machine\volume\file`` +to a ``file://`` url, it would end up as ``file://machine/volume/file``. For +more information on ``file://`` URLs on Windows see +`MSDN `_. + + + +Summary of differences from pkg_resources.parse_version +======================================================= + +* Note: this comparison is to ``pkg_resources.parse_version`` as it existed at + the time :pep:`440` was written. After the PEP was accepted, setuptools 6.0 and + later versions adopted the behaviour described here. + +* Local versions sort differently, this specification requires that they sort as greater + than the same version without a local version, whereas + ``pkg_resources.parse_version`` considers it a pre-release marker. + +* This specification purposely restricts the syntax which constitutes a valid version + while ``pkg_resources.parse_version`` attempts to provide some meaning from + *any* arbitrary string. + +* ``pkg_resources.parse_version`` allows arbitrarily deeply nested version + signifiers like ``1.0.dev1.post1.dev5``. This specification however allows only a + single use of each type and they must exist in a certain order. + + + +.. _version-specifiers-regex: + +Appendix: Parsing version strings with regular expressions +========================================================== + +As noted earlier in the :ref:`public-version-identifiers` section, +published version identifiers SHOULD use the canonical format. This +section provides regular expressions that can be used to test whether a +version is already in that form, and if it's not, extract the various +components for subsequent normalization. + +To test whether a version identifier is in the canonical format, you can use +the following function: + +.. code-block:: python + + import re + def is_canonical(version): + return re.match(r'^([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$', version) is not None + +To extract the components of a version identifier, use the following regular +expression (as defined by the `packaging `_ +project): + +.. code-block:: python + + VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+                [-_\.]?
+                (?P(a|b|c|rc|alpha|beta|pre|preview))
+                [-_\.]?
+                (?P[0-9]+)?
+            )?
+            (?P                                         # post release
+                (?:-(?P[0-9]+))
+                |
+                (?:
+                    [-_\.]?
+                    (?Ppost|rev|r)
+                    [-_\.]?
+                    (?P[0-9]+)?
+                )
+            )?
+            (?P                                          # dev release
+                [-_\.]?
+                (?Pdev)
+                [-_\.]?
+                (?P[0-9]+)?
+            )?
+        )
+        (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+    """
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+
+
+History
+=======
+
+- August 2014: This specification was approved through :pep:`440`.
+- May 2025: Clarify that development releases are a form of pre-release when
+  they are handled.
diff --git a/source/specifications/virtual-environments.rst b/source/specifications/virtual-environments.rst
new file mode 100644
index 000000000..c73a28c27
--- /dev/null
+++ b/source/specifications/virtual-environments.rst
@@ -0,0 +1,56 @@
+
+.. _virtual-environments:
+
+===========================
+Python Virtual Environments
+===========================
+
+For Python 3.3 and later versions, :pep:`405` introduced interpreter level support
+for the concept of "Python Virtual Environments". Each virtual environment has
+its own Python binary (allowing creation of environments with various Python
+versions) and can have its own independent set of installed Python packages in
+its site directories, but shares the standard library with the base installed
+Python. While the concept of virtual environments existed prior to this update,
+there was no previously standardised mechanism for declaring or discovering them.
+
+
+Runtime detection of virtual environments
+=========================================
+
+At runtime, virtual environments can be identified by virtue of
+:py:data:`sys.prefix` (the filesystem location of the running interpreter)
+having a different value from :py:data:`sys.base_prefix` (the default filesystem
+location of the standard library directories).
+
+:ref:`venv-explanation` in the Python standard library documentation for the
+:py:mod:`venv` module covers this along with the concept of "activating" a
+virtual environment in an interactive operating system shell (this activation
+step is optional and hence the changes it makes can't be reliably used to
+detect whether a Python program is running in a virtual environment or not).
+
+
+Declaring installation environments as Python virtual environments
+==================================================================
+
+As described in :pep:`405`, a Python virtual environment in its simplest form
+consists of nothing more than a copy or symlink of the Python binary accompanied
+by a ``site-packages`` directory and a ``pyvenv.cfg`` file with a ``home`` key
+that indicates where to find the Python standard library modules.
+
+While designed to meet the needs of the standard :py:mod:`venv` module, this
+split installation and ``pyvenv.cfg`` file approach can be used by *any*
+Python installation provider that desires Python-specific tools to be aware that
+they are already operating in a virtual environment and no further environment
+nesting is required or desired.
+
+Even in the absence of a ``pyvenv.cfg`` file, any approach (e.g.
+``sitecustomize.py``, patching the installed Python runtime) that results in
+:py:data:`sys.prefix` and :py:data:`sys.base_prefix` having different values,
+while still providing a matching default package installation scheme in
+:py:mod:`sysconfig`, will be detected and behave as a Python virtual environment.
+
+
+History
+=======
+
+- May 2012: This specification was approved through :pep:`405`.
diff --git a/source/specifications/well-known-project-urls.rst b/source/specifications/well-known-project-urls.rst
new file mode 100644
index 000000000..30fefd12b
--- /dev/null
+++ b/source/specifications/well-known-project-urls.rst
@@ -0,0 +1,176 @@
+.. _`well-known-project-urls`:
+
+===================================
+Well-known Project URLs in Metadata
+===================================
+
+.. important::
+
+    This document is primarily of interest to metadata *consumers*,
+    who should use the normalization rules and well-known list below
+    to make their presentation of project URLs consistent across the
+    Python ecosystem.
+
+    Metadata *producers* (such as build tools and individual package
+    maintainers) may continue to use any labels they please, within the
+    overall ``Project-URL`` length restrictions. However, when possible, users are
+    *encouraged* to pick meaningful labels that normalize to well-known
+    labels.
+
+.. note::
+
+    See :ref:`Writing your pyproject.toml - urls `
+    for user-oriented guidance on choosing project URL labels in your package's
+    metadata.
+
+.. note:: This specification was originally defined in :pep:`753`.
+
+:pep:`753` deprecates the :ref:`core-metadata-home-page` and
+:ref:`core-metadata-download-url` metadata fields in favor of
+:ref:`core-metadata-project-url`, and defines a normalization and
+lookup procedure for determining whether a ``Project-URL`` is
+"well-known," i.e. has the semantics assigned to ``Home-page``,
+``Download-URL``, or other common project URLs.
+
+This allows indices (such as the Python Package Index) and other downstream
+metadata consumers to present project URLs in a
+consistent manner.
+
+.. _project-url-label-normalization:
+
+Label normalization
+===================
+
+.. note::
+
+    Label normalization is performed by metadata *consumers*, not metadata
+    producers.
+
+To determine whether a ``Project-URL`` label is "well-known," metadata
+consumers should normalize the label before comparing it to the
+:ref:`list of well-known labels `.
+
+The normalization procedure for ``Project-URL`` labels is defined
+by the following Python function:
+
+.. code-block:: python
+
+    import string
+
+    def normalize_label(label: str) -> str:
+        chars_to_remove = string.punctuation + string.whitespace
+        removal_map = str.maketrans("", "", chars_to_remove)
+        return label.translate(removal_map).lower()
+
+In plain language: a label is *normalized* by deleting all ASCII punctuation
+and whitespace, and then converting the result to lowercase.
+
+The following table shows examples of labels before (raw) and after
+normalization:
+
+.. list-table::
+    :header-rows: 1
+
+    * - Raw
+      - Normalized
+    * - ``Homepage``
+      - ``homepage``
+    * - ``Home-page``
+      - ``homepage``
+    * - ``Home page``
+      - ``homepage``
+    * - ``Change_Log``
+      - ``changelog``
+    * - ``What's New?``
+      - ``whatsnew``
+    * - ``github``
+      - ``github``
+
+.. _well-known-labels:
+
+Well-known labels
+=================
+
+.. note::
+
+    The list of well-known labels is a living standard, maintained as part of
+    this document.
+
+The following table lists labels that are well-known for the purpose of
+specializing the presentation of ``Project-URL`` metadata:
+
+.. list-table::
+   :header-rows: 1
+
+   * - Label (Human-readable equivalent)
+     - Description
+     - Aliases
+   * - ``homepage`` (Homepage)
+     - The project's home page
+     - *(none)*
+   * - ``source`` (Source Code)
+     - The project's hosted source code or repository
+     - ``repository``, ``sourcecode``, ``github``
+   * - ``download`` (Download)
+     - A download URL for the current distribution, equivalent to ``Download-URL``
+     - *(none)*
+   * - ``changelog`` (Changelog)
+     - The project's comprehensive changelog
+     - ``changes``, ``whatsnew``, ``history``
+   * - ``releasenotes`` (Release Notes)
+     - The project's curated release notes
+     - *(none)*
+   * - ``documentation`` (Documentation)
+     - The project's online documentation
+     - ``docs``
+   * - ``issues`` (Issue Tracker)
+     - The project's bug tracker
+     - ``bugs``, ``issue``, ``tracker``, ``issuetracker``, ``bugtracker``
+   * - ``funding`` (Funding)
+     - Funding Information
+     - ``sponsor``, ``donate``, ``donation``
+
+Package metadata consumers may choose to render aliased labels the same as
+their "parent" well known label, or further specialize them.
+
+Example behavior
+================
+
+The following shows the flow of project URL metadata from
+``pyproject.toml`` to core metadata to a potential index presentation:
+
+.. code-block:: toml
+    :caption: Example project URLs in standard configuration
+
+    [project.urls]
+    "Home Page" = "https://example.com/"
+    DOCUMENTATION = "https://readthedocs.org/"
+    Repository = "https://upstream.example.com/me/spam.git"
+    GitHub = "https://github.com/example/spam"
+
+.. code-block:: email
+    :caption: Core metadata representation
+
+    Project-URL: Home page, https://example.com
+    Project-URL: DOCUMENTATION, https://readthedocs.org
+    Project-URL: Repository, https://upstream.example.com/me/spam.git
+    Project-URL: GitHub, https://github.com/example/spam
+
+.. code-block:: text
+    :caption: Potential rendering
+
+    Homepage: https://example.com
+    Documentation: https://readthedocs.org
+    Source Code: https://upstream.example.com/me/spam.git
+    Source Code (GitHub): https://github.com/example/spam
+
+Observe that the core metadata appears in the form provided by the user
+(since metadata *producers* do not perform normalization), but the
+metadata *consumer* normalizes and identifies appropriate
+human-readable equivalents based on the normalized form:
+
+* ``Home page`` becomes ``homepage``, which is rendered as ``Homepage``
+* ``DOCUMENTATION`` becomes ``documentation``, which is rendered as ``Documentation``
+* ``Repository`` becomes ``repository``, which is rendered as ``Source Code``
+* ``GitHub`` becomes ``github``, which is rendered as ``Source Code (GitHub)``
+  (as a specialization of ``Source Code``)
diff --git a/source/support.rst b/source/support.rst
index 5f81705f5..7dc945eef 100644
--- a/source/support.rst
+++ b/source/support.rst
@@ -2,11 +2,10 @@
 How to Get Support
 ==================
 
-:Page Status: Complete
-:Last Reviewed: 2015-09-08
-
 For support related to a specific project, see the links on the :doc:`Projects
 ` page.
 
-For something more general, or when you're just not sure, use the `distutils-sig
-`_ list.
+For something more general, or when you're just not sure, please
+`open an issue `_ on
+the `packaging-problems `_
+repository on GitHub.
diff --git a/source/themes/pug/static/pug.css b/source/themes/pug/static/pug.css
deleted file mode 100644
index 6672f2ea7..000000000
--- a/source/themes/pug/static/pug.css
+++ /dev/null
@@ -1,10 +0,0 @@
-.rst-content table.field-list .field-name {
-    background-color: #eee;
-    border: 1px solid #ddd;
-    padding: 5px;
-}
-
-.rst-content table.field-list .field-body {
-    border: 1px solid #ddd;
-    padding: 5px;
-}
\ No newline at end of file
diff --git a/source/themes/pug/templates/page.html b/source/themes/pug/templates/page.html
deleted file mode 100644
index e992de02d..000000000
--- a/source/themes/pug/templates/page.html
+++ /dev/null
@@ -1,2 +0,0 @@
-{% extends "!page.html" %}
-{% set css_files = css_files + ["_static/pug.css"] %}
diff --git a/source/themes/pydoctheme/_static/sidebar.js b/source/themes/pydoctheme/_static/sidebar.js
deleted file mode 100644
index e8d58f4bf..000000000
--- a/source/themes/pydoctheme/_static/sidebar.js
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * sidebar.js
- * ~~~~~~~~~~
- *
- * This script makes the Sphinx sidebar collapsible and implements intelligent
- * scrolling.
- *
- * .sphinxsidebar contains .sphinxsidebarwrapper.  This script adds in
- * .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton used to
- * collapse and expand the sidebar.
- *
- * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden and the
- * width of the sidebar and the margin-left of the document are decreased.
- * When the sidebar is expanded the opposite happens.  This script saves a
- * per-browser/per-session cookie used to remember the position of the sidebar
- * among the pages.  Once the browser is closed the cookie is deleted and the
- * position reset to the default (expanded).
- *
- * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
- * :license: BSD, see LICENSE for details.
- *
- */
-
-$(function() {
-  // global elements used by the functions.
-  // the 'sidebarbutton' element is defined as global after its
-  // creation, in the add_sidebar_button function
-  var jwindow = $(window);
-  var jdocument = $(document);
-  var bodywrapper = $('.bodywrapper');
-  var sidebar = $('.sphinxsidebar');
-  var sidebarwrapper = $('.sphinxsidebarwrapper');
-
-  // original margin-left of the bodywrapper and width of the sidebar
-  // with the sidebar expanded
-  var bw_margin_expanded = bodywrapper.css('margin-left');
-  var ssb_width_expanded = sidebar.width();
-
-  // margin-left of the bodywrapper and width of the sidebar
-  // with the sidebar collapsed
-  var bw_margin_collapsed = '.8em';
-  var ssb_width_collapsed = '.8em';
-
-  // colors used by the current theme
-  var dark_color = '#AAAAAA';
-  var light_color = '#CCCCCC';
-
-  function get_viewport_height() {
-    if (window.innerHeight)
-      return window.innerHeight;
-    else
-      return jwindow.height();
-  }
-
-  function sidebar_is_collapsed() {
-    return sidebarwrapper.is(':not(:visible)');
-  }
-
-  function toggle_sidebar() {
-    if (sidebar_is_collapsed())
-      expand_sidebar();
-    else
-      collapse_sidebar();
-    // adjust the scrolling of the sidebar
-    scroll_sidebar();
-  }
-
-  function collapse_sidebar() {
-    sidebarwrapper.hide();
-    sidebar.css('width', ssb_width_collapsed);
-    bodywrapper.css('margin-left', bw_margin_collapsed);
-    sidebarbutton.css({
-        'margin-left': '0',
-        'height': bodywrapper.height(),
-        'border-radius': '5px'
-    });
-    sidebarbutton.find('span').text('»');
-    sidebarbutton.attr('title', _('Expand sidebar'));
-    document.cookie = 'sidebar=collapsed';
-  }
-
-  function expand_sidebar() {
-    bodywrapper.css('margin-left', bw_margin_expanded);
-    sidebar.css('width', ssb_width_expanded);
-    sidebarwrapper.show();
-    sidebarbutton.css({
-        'margin-left': ssb_width_expanded-12,
-        'height': bodywrapper.height(),
-        'border-radius': '0 5px 5px 0'
-    });
-    sidebarbutton.find('span').text('«');
-    sidebarbutton.attr('title', _('Collapse sidebar'));
-    //sidebarwrapper.css({'padding-top':
-    //  Math.max(window.pageYOffset - sidebarwrapper.offset().top, 10)});
-    document.cookie = 'sidebar=expanded';
-  }
-
-  function add_sidebar_button() {
-    sidebarwrapper.css({
-        'float': 'left',
-        'margin-right': '0',
-        'width': ssb_width_expanded - 28
-    });
-    // create the button
-    sidebar.append(
-      '
«
' - ); - var sidebarbutton = $('#sidebarbutton'); - // find the height of the viewport to center the '<<' in the page - var viewport_height = get_viewport_height(); - var sidebar_offset = sidebar.offset().top; - var sidebar_height = Math.max(bodywrapper.height(), sidebar.height()); - sidebarbutton.find('span').css({ - 'display': 'block', - 'position': 'fixed', - 'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10 - }); - - sidebarbutton.click(toggle_sidebar); - sidebarbutton.attr('title', _('Collapse sidebar')); - sidebarbutton.css({ - 'border-radius': '0 5px 5px 0', - 'color': '#444444', - 'background-color': '#CCCCCC', - 'font-size': '1.2em', - 'cursor': 'pointer', - 'height': sidebar_height, - 'padding-top': '1px', - 'padding-left': '1px', - 'margin-left': ssb_width_expanded - 12 - }); - - sidebarbutton.hover( - function () { - $(this).css('background-color', dark_color); - }, - function () { - $(this).css('background-color', light_color); - } - ); - } - - function set_position_from_cookie() { - if (!document.cookie) - return; - var items = document.cookie.split(';'); - for(var k=0; k wintop && curbot > winbot) { - sidebarwrapper.css('top', $u.max([wintop - offset - 10, 0])); - } - else if (curtop < wintop && curbot < winbot) { - sidebarwrapper.css('top', $u.min([winbot - sidebar_height - offset - 20, - jdocument.height() - sidebar_height - 200])); - } - } - } - jwindow.scroll(scroll_sidebar); -}); diff --git a/source/themes/pydoctheme/_templates/page.html b/source/themes/pydoctheme/_templates/page.html deleted file mode 100644 index 3f8a06725..000000000 --- a/source/themes/pydoctheme/_templates/page.html +++ /dev/null @@ -1,12 +0,0 @@ -{% extends "!page.html" %} - -{% block extrahead %} -{# -You can't override layout.html in RTD -https://github.com/rtfd/readthedocs.org/issues/152 -Therefore, placing in page.html -#} - -{{ super() }} -{% endblock %} - diff --git a/source/themes/pydoctheme/_templates/sidebar.html b/source/themes/pydoctheme/_templates/sidebar.html deleted file mode 100644 index 09c018192..000000000 --- a/source/themes/pydoctheme/_templates/sidebar.html +++ /dev/null @@ -1,11 +0,0 @@ -
- -

This Page

- - - -
diff --git a/source/themes/pydoctheme/static/pydoctheme.css b/source/themes/pydoctheme/static/pydoctheme.css deleted file mode 100644 index 3d995d81b..000000000 --- a/source/themes/pydoctheme/static/pydoctheme.css +++ /dev/null @@ -1,178 +0,0 @@ -@import url("https://github.com/default.css"); - -body { - background-color: white; - margin-left: 1em; - margin-right: 1em; -} - -div.related { - margin-bottom: 1.2em; - padding: 0.5em 0; - border-top: 1px solid #ccc; - margin-top: 0.5em; -} - -div.related a:hover { - color: #0095C4; -} - -div.related:first-child { - border-top: 0; - border-bottom: 1px solid #ccc; -} - -div.sphinxsidebar { - background-color: #eeeeee; - border-radius: 5px; - line-height: 130%; - font-size: smaller; -} - -div.sphinxsidebar h3, div.sphinxsidebar h4 { - margin-top: 1.5em; -} - -div.sphinxsidebarwrapper > h3:first-child { - margin-top: 0.2em; -} - -div.sphinxsidebarwrapper > ul > li > ul > li { - margin-bottom: 0.4em; -} - -div.sphinxsidebar a:hover { - color: #0095C4; -} - -div.sphinxsidebar input { - font-family: 'Lucida Grande',Arial,sans-serif; - border: 1px solid #999999; - font-size: smaller; - border-radius: 3px; -} - -div.sphinxsidebar input[type=text] { - max-width: 150px; -} - -div.body { - padding: 0 0 0 1.2em; -} - -div.body p { - line-height: 140%; -} - -div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { - margin: 0; - border: 0; - padding: 0.3em 0; -} - -div.body hr { - border: 0; - background-color: #ccc; - height: 1px; -} - -div.body pre { - border-radius: 3px; - border: 1px solid #ac9; -} - -div.body div.admonition, div.body div.impl-detail { - border-radius: 3px; -} - -div.body div.impl-detail > p { - margin: 0; -} - -div.body div.seealso { - border: 1px solid #dddd66; -} - -div.body a { - color: #00608f; -} - -div.body a:visited { - color: #30306f; -} - -div.body a:hover { - color: #00B0E4; -} - -tt, pre { - font-family: monospace, sans-serif; - font-size: 96.5%; -} - -div.body tt { - border-radius: 3px; -} - -div.body tt.descname { - font-size: 120%; -} - -div.body tt.xref, div.body a tt { - font-weight: normal; -} - -.deprecated { - border-radius: 3px; -} - -table.docutils { - border: 1px solid #ddd; - min-width: 20%; - border-radius: 3px; - margin-top: 10px; - margin-bottom: 10px; -} - -table.docutils td, table.docutils th { - border: 1px solid #ddd !important; - border-radius: 3px; -} - -table p, table li { - text-align: left !important; -} - -table.docutils th { - background-color: #eee; - padding: 0.3em 0.5em; -} - -table.docutils td { - background-color: white; - padding: 0.3em 0.5em; -} - -table.footnote, table.footnote td { - border: 0 !important; -} - -div.footer { - line-height: 150%; - margin-top: -2em; - text-align: right; - width: auto; - margin-right: 10px; -} - -div.footer a:hover { - color: #0095C4; -} - -.refcount { - color: #060; -} - -.stableabi { - color: #229; -} diff --git a/source/themes/pydoctheme/theme.conf b/source/themes/pydoctheme/theme.conf deleted file mode 100644 index 0c4388167..000000000 --- a/source/themes/pydoctheme/theme.conf +++ /dev/null @@ -1,23 +0,0 @@ -[theme] -inherit = default -stylesheet = pydoctheme.css -pygments_style = sphinx - -[options] -bodyfont = 'Lucida Grande', Arial, sans-serif -headfont = 'Lucida Grande', Arial, sans-serif -footerbgcolor = white -footertextcolor = #555555 -relbarbgcolor = white -relbartextcolor = #666666 -relbarlinkcolor = #444444 -sidebarbgcolor = white -sidebartextcolor = #444444 -sidebarlinkcolor = #444444 -bgcolor = white -textcolor = #222222 -linkcolor = #0090c0 -visitedlinkcolor = #00608f -headtextcolor = #1a1a1a -headbgcolor = white -headlinkcolor = #aaaaaa diff --git a/source/tutorial.rst b/source/tutorial.rst deleted file mode 100644 index a35910797..000000000 --- a/source/tutorial.rst +++ /dev/null @@ -1,6 +0,0 @@ -:orphan: - -This content has moved to the following locations: - -* :doc:`installing` -* :doc:`distributing` diff --git a/source/tutorials/creating-documentation.rst b/source/tutorials/creating-documentation.rst new file mode 100644 index 000000000..5d9dae787 --- /dev/null +++ b/source/tutorials/creating-documentation.rst @@ -0,0 +1,7 @@ +:orphan: + +Creating documentation +====================== + +This tutorial has been removed since it is not related to packaging and was unmaintained. +Please see the `Sphinx tutorial `_ instead. diff --git a/source/tutorials/index.rst b/source/tutorials/index.rst new file mode 100644 index 000000000..33ab4f98a --- /dev/null +++ b/source/tutorials/index.rst @@ -0,0 +1,13 @@ +Tutorials +========= + +**Tutorials** are opinionated step-by-step guides to help you get familiar +with packaging concepts. For more detailed information on specific packaging +topics, see :doc:`/guides/index`. + +.. toctree:: + :maxdepth: 1 + + installing-packages + managing-dependencies + packaging-projects diff --git a/source/tutorials/installing-packages.rst b/source/tutorials/installing-packages.rst new file mode 100644 index 000000000..3a9aa23bb --- /dev/null +++ b/source/tutorials/installing-packages.rst @@ -0,0 +1,682 @@ +.. _installing-packages: + +=================== +Installing Packages +=================== + +This section covers the basics of how to install Python :term:`packages +`. + +It's important to note that the term "package" in this context is being used to +describe a bundle of software to be installed (i.e. as a synonym for a +:term:`distribution `). It does not refer to the kind +of :term:`package ` that you import in your Python source code +(i.e. a container of modules). It is common in the Python community to refer to +a :term:`distribution ` using the term "package". Using +the term "distribution" is often not preferred, because it can easily be +confused with a Linux distribution, or another larger software distribution +like Python itself. + + +.. _installing_requirements: + +Requirements for Installing Packages +==================================== + +This section describes the steps to follow before installing other Python +packages. + + +Ensure you can run Python from the command line +----------------------------------------------- + +Before you go any further, make sure you have Python and that the expected +version is available from your command line. You can check this by running: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 --version + +.. tab:: Windows + + .. code-block:: bat + + py --version + + +You should get some output like ``Python 3.6.3``. If you do not have Python, +please install the latest 3.x version from `python.org`_ or refer to the +:ref:`Installing Python ` section of the Hitchhiker's Guide to Python. + +.. Note:: If you're a newcomer and you get an error like this: + + .. code-block:: pycon + + >>> python3 --version + Traceback (most recent call last): + File "", line 1, in + NameError: name 'python3' is not defined + + It's because this command and other suggested commands in this tutorial + are intended to be run in a *shell* (also called a *terminal* or + *console*). See the Python for Beginners `getting started tutorial`_ for + an introduction to using your operating system's shell and interacting with + Python. + +.. Note:: If you're using an enhanced shell like IPython or the Jupyter + notebook, you can run system commands like those in this tutorial by + prefacing them with a ``!`` character: + + .. code-block:: text + + In [1]: import sys + !{sys.executable} --version + Python 3.6.3 + + It's recommended to write ``{sys.executable}`` rather than plain ``python`` in + order to ensure that commands are run in the Python installation matching + the currently running notebook (which may not be the same Python + installation that the ``python`` command refers to). + +.. Note:: Due to the way most Linux distributions are handling the Python 3 + migration, Linux users using the system Python without creating a virtual + environment first should replace the ``python`` command in this tutorial + with ``python3`` and the ``python -m pip`` command with ``python3 -m pip --user``. Do *not* + run any of the commands in this tutorial with ``sudo``: if you get a + permissions error, come back to the section on creating virtual environments, + set one up, and then continue with the tutorial as written. + +.. _getting started tutorial: https://opentechschool.github.io/python-beginners/en/getting_started.html#what-is-python-exactly +.. _python.org: https://www.python.org + +Ensure you can run pip from the command line +-------------------------------------------- + +Additionally, you'll need to make sure you have :ref:`pip` available. You can +check this by running: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip --version + +.. tab:: Windows + + .. code-block:: bat + + py -m pip --version + +If you installed Python from source, with an installer from `python.org`_, or +via `Homebrew`_ you should already have pip. If you're on Linux and installed +using your OS package manager, you may have to install pip separately, see +:doc:`/guides/installing-using-linux-tools`. + +.. _Homebrew: https://brew.sh + +If ``pip`` isn't already installed, then first try to bootstrap it from the +standard library: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m ensurepip --default-pip + +.. tab:: Windows + + .. code-block:: bat + + py -m ensurepip --default-pip + +If that still doesn't allow you to run ``python -m pip``: + +* Securely Download `get-pip.py + `_ [1]_ + +* Run ``python get-pip.py``. [2]_ This will install or upgrade pip. + Additionally, it will install :ref:`setuptools` and :ref:`wheel` if they're + not installed already. + + .. warning:: + + Be cautious if you're using a Python install that's managed by your + operating system or another package manager. get-pip.py does not + coordinate with those tools, and may leave your system in an + inconsistent state. You can use ``python get-pip.py --prefix=/usr/local/`` + to install in ``/usr/local`` which is designed for locally-installed + software. + + +Ensure pip, setuptools, and wheel are up to date +------------------------------------------------ + +While ``pip`` alone is sufficient to install from pre-built binary archives, +up to date copies of the ``setuptools`` and ``wheel`` projects are useful +to ensure you can also install from source archives: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade pip setuptools wheel + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade pip setuptools wheel + +Optionally, create a virtual environment +---------------------------------------- + +See :ref:`section below ` for details, +but here's the basic :doc:`venv ` [3]_ command to use on a typical Linux system: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m venv tutorial_env + source tutorial_env/bin/activate + +.. tab:: Windows + + .. code-block:: bat + + py -m venv tutorial_env + tutorial_env\Scripts\activate + +This will create a new virtual environment in the ``tutorial_env`` subdirectory, +and configure the current shell to use it as the default ``python`` environment. + + +.. _Creating and using Virtual Environments: + +Creating Virtual Environments +============================= + +Python "Virtual Environments" allow Python :term:`packages ` to be installed in an isolated location for a particular application, +rather than being installed globally. If you are looking to safely install +global command line tools, +see :doc:`/guides/installing-stand-alone-command-line-tools`. + +Imagine you have an application that needs version 1 of LibFoo, but another +application requires version 2. How can you use both these applications? If you +install everything into /usr/lib/python3.6/site-packages (or whatever your +platform’s standard location is), it’s easy to end up in a situation where you +unintentionally upgrade an application that shouldn’t be upgraded. + +Or more generally, what if you want to install an application and leave it be? +If an application works, any change in its libraries or the versions of those +libraries can break the application. + +Also, what if you can’t install :term:`packages ` into the +global site-packages directory? For instance, on a shared host. + +In all these cases, virtual environments can help you. They have their own +installation directories and they don’t share libraries with other virtual +environments. + +Currently, there are two common tools for creating Python virtual environments: + +* :doc:`venv ` is available by default in Python 3.3 and later, and installs + :ref:`pip` into created virtual environments in Python 3.4 and later + (Python versions prior to 3.12 also installed :ref:`setuptools`). +* :ref:`virtualenv` needs to be installed separately, but supports Python 2.7+ + and Python 3.3+, and :ref:`pip`, :ref:`setuptools` and :ref:`wheel` are + installed into created virtual environments by default. Note that ``setuptools`` is no longer + included by default starting with Python 3.12 (and ``virtualenv`` follows this behavior). + +The basic usage is like so: + +Using :doc:`venv `: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m venv + source /bin/activate + +.. tab:: Windows + + .. code-block:: bat + + py -m venv + \Scripts\activate + +Using :ref:`virtualenv`: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m virtualenv + source /bin/activate + +.. tab:: Windows + + .. code-block:: bat + + virtualenv + \Scripts\activate + +For more information, see the :doc:`venv ` docs or +the :doc:`virtualenv ` docs. + +The use of :command:`source` under Unix shells ensures +that the virtual environment's variables are set within the current +shell, and not in a subprocess (which then disappears, having no +useful effect). + +In both of the above cases, Windows users should *not* use the +:command:`source` command, but should rather run the :command:`activate` +script directly from the command shell like so: + +.. code-block:: bat + + \Scripts\activate + + + +Managing multiple virtual environments directly can become tedious, so the +:ref:`dependency management tutorial ` introduces a +higher level tool, :ref:`Pipenv`, that automatically manages a separate +virtual environment for each project and application that you work on. + + +Use pip for Installing +====================== + +:ref:`pip` is the recommended installer. Below, we'll cover the most common +usage scenarios. For more detail, see the :doc:`pip docs `, +which includes a complete :doc:`Reference Guide `. + + +Installing from PyPI +==================== + +The most common usage of :ref:`pip` is to install from the :term:`Python Package +Index ` using a :term:`requirement specifier +`. Generally speaking, a requirement specifier is +composed of a project name followed by an optional :term:`version specifier +`. A full description of the supported specifiers can be +found in the :ref:`Version specifier specification `. +Below are some examples. + +To install the latest version of "SomeProject": + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install "SomeProject" + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "SomeProject" + +To install a specific version: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install "SomeProject==1.4" + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "SomeProject==1.4" + +To install greater than or equal to one version and less than another: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install "SomeProject>=1,<2" + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "SomeProject>=1,<2" + + +To install a version that's :ref:`compatible ` +with a certain version: [4]_ + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install "SomeProject~=1.4.2" + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "SomeProject~=1.4.2" + +In this case, this means to install any version "==1.4.*" version that's also +">=1.4.2". + + +Source Distributions vs Wheels +============================== + +:ref:`pip` can install from either :term:`Source Distributions (sdist) ` or :term:`Wheels `, but if both are present +on PyPI, pip will prefer a compatible :term:`wheel `. You can override +pip`s default behavior by e.g. using its :ref:`--no-binary +` option. + +:term:`Wheels ` are a pre-built :term:`distribution ` format that provides faster installation compared to :term:`Source +Distributions (sdist) `, especially when a +project contains compiled extensions. + +If :ref:`pip` does not find a wheel to install, it will locally build a wheel +and cache it for future installs, instead of rebuilding the source distribution +in the future. + + +Upgrading packages +================== + +Upgrade an already installed ``SomeProject`` to the latest from PyPI. + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade SomeProject + +.. _`Installing to the User Site`: + +Installing to the User Site +=========================== + +To install :term:`packages ` that are isolated to the +current user, use the ``--user`` flag: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --user SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --user SomeProject + +For more information see the `User Installs +`_ section +from the pip docs. + +Note that the ``--user`` flag has no effect when inside a virtual environment +- all installation commands will affect the virtual environment. + +If ``SomeProject`` defines any command-line scripts or console entry points, +``--user`` will cause them to be installed inside the `user base`_'s binary +directory, which may or may not already be present in your shell's +:envvar:`PATH`. (Starting in version 10, pip displays a warning when +installing any scripts to a directory outside :envvar:`PATH`.) If the scripts +are not available in your shell after installation, you'll need to add the +directory to your :envvar:`PATH`: + +- On Linux and macOS you can find the user base binary directory by running + ``python -m site --user-base`` and adding ``bin`` to the end. For example, + this will typically print ``~/.local`` (with ``~`` expanded to the absolute + path to your home directory) so you'll need to add ``~/.local/bin`` to your + ``PATH``. You can set your ``PATH`` permanently by `modifying ~/.profile`_. + +- On Windows you can find the user base binary directory by running ``py -m + site --user-site`` and replacing ``site-packages`` with ``Scripts``. For + example, this could return + ``C:\Users\Username\AppData\Roaming\Python36\site-packages`` so you would + need to set your ``PATH`` to include + ``C:\Users\Username\AppData\Roaming\Python36\Scripts``. You can set your user + ``PATH`` permanently in the `Control Panel`_. You may need to log out for the + ``PATH`` changes to take effect. + +.. _user base: https://docs.python.org/3/library/site.html#site.USER_BASE +.. _modifying ~/.profile: https://stackoverflow.com/a/14638025 +.. _Control Panel: https://docs.microsoft.com/en-us/windows/win32/shell/user-environment-variables?redirectedfrom=MSDN + +Requirements files +================== + +Install a list of requirements specified in a :ref:`Requirements File +`. + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install -r requirements.txt + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install -r requirements.txt + +Installing from VCS +=================== + +Install a project from VCS in "editable" mode. For a full breakdown of the +syntax, see pip's section on :ref:`VCS Support `. + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install -e SomeProject @ git+https://git.repo/some_pkg.git # from git + python3 -m pip install -e SomeProject @ hg+https://hg.repo/some_pkg # from mercurial + python3 -m pip install -e SomeProject @ svn+svn://svn.repo/some_pkg/trunk/ # from svn + python3 -m pip install -e SomeProject @ git+https://git.repo/some_pkg.git@feature # from a branch + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install -e SomeProject @ git+https://git.repo/some_pkg.git # from git + py -m pip install -e SomeProject @ hg+https://hg.repo/some_pkg # from mercurial + py -m pip install -e SomeProject @ svn+svn://svn.repo/some_pkg/trunk/ # from svn + py -m pip install -e SomeProject @ git+https://git.repo/some_pkg.git@feature # from a branch + +Installing from other Indexes +============================= + +Install from an alternate index + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --index-url http://my.package.repo/simple/ SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --index-url http://my.package.repo/simple/ SomeProject + +Search an additional index during install, in addition to :term:`PyPI ` + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --extra-index-url http://my.package.repo/simple SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --extra-index-url http://my.package.repo/simple SomeProject + +Installing from a local src tree +================================ + + +Installing from local src in +:doc:`Development Mode `, +i.e. in such a way that the project appears to be installed, but yet is +still editable from the src tree. + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install -e + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install -e + +You can also install normally from src + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install + +Installing from local archives +============================== + +Install a particular source archive file. + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install ./downloads/SomeProject-1.0.4.tar.gz + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install ./downloads/SomeProject-1.0.4.tar.gz + +Install from a local directory containing archives (and don't check :term:`PyPI +`) + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --no-index --find-links=file:///local/dir/ SomeProject + python3 -m pip install --no-index --find-links=/local/dir/ SomeProject + python3 -m pip install --no-index --find-links=relative/dir/ SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --no-index --find-links=file:///local/dir/ SomeProject + py -m pip install --no-index --find-links=/local/dir/ SomeProject + py -m pip install --no-index --find-links=relative/dir/ SomeProject + +Installing from other sources +============================= + +To install from other data sources (for example Amazon S3 storage) +you can create a helper application that presents the data +in a format compliant with the :ref:`simple repository API `:, +and use the ``--extra-index-url`` flag to direct pip to use that index. + +.. code-block:: bash + + ./s3helper --port=7777 + python -m pip install --extra-index-url http://localhost:7777 SomeProject + + +Installing Prereleases +====================== + +Find pre-release and development versions, in addition to stable versions. By +default, pip only finds stable versions. + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --pre SomeProject + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --pre SomeProject + +Installing "Extras" +=================== + +Extras are optional "variants" of a package, which may include +additional dependencies, and thereby enable additional functionality +from the package. If you wish to install an extra for a package which +you know publishes one, you can include it in the pip installation command: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install 'SomePackage[PDF]' + python3 -m pip install 'SomePackage[PDF]==3.0' + python3 -m pip install -e '.[PDF]' # editable project in current directory + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install "SomePackage[PDF]" + py -m pip install "SomePackage[PDF]==3.0" + py -m pip install -e ".[PDF]" # editable project in current directory + +---- + +.. [1] "Secure" in this context means using a modern browser or a + tool like :command:`curl` that verifies SSL certificates when + downloading from https URLs. + +.. [2] Depending on your platform, this may require root or Administrator + access. :ref:`pip` is currently considering changing this by `making user + installs the default behavior + `_. + +.. [3] Beginning with Python 3.4, ``venv`` (a stdlib alternative to + :ref:`virtualenv`) will create virtualenv environments with ``pip`` + pre-installed, thereby making it an equal alternative to + :ref:`virtualenv`. + +.. [4] The compatible release specifier was accepted in :pep:`440` + and support was released in :ref:`setuptools` v8.0 and + :ref:`pip` v6.0 diff --git a/source/tutorials/managing-dependencies.rst b/source/tutorials/managing-dependencies.rst new file mode 100644 index 000000000..db3b82533 --- /dev/null +++ b/source/tutorials/managing-dependencies.rst @@ -0,0 +1,179 @@ +.. _managing-dependencies: + +Managing Application Dependencies +================================= + +The :ref:`package installation tutorial ` +covered the basics of getting set up to install and update Python packages. + +However, running these commands interactively can get tedious even for your +own personal projects, and things get even more difficult when trying to set up +development environments automatically for projects with multiple contributors. + +This tutorial walks you through the use of :ref:`Pipenv` to manage dependencies +for an application. It will show you how to install and use the necessary tools +and make strong recommendations on best practices. + +Keep in mind that Python is used for a great many different purposes, and +precisely how you want to manage your dependencies may change based on how you +decide to publish your software. The guidance presented here is most directly +applicable to the development and deployment of network services (including +web applications), but is also very well suited to managing development and +testing environments for any kind of project. + +For alternatives, see `Other Tools for Application Dependency Management`_. + +Installing Pipenv +----------------- + +:ref:`Pipenv` is a dependency manager for Python projects. If you're familiar +with Node.js' `npm`_ or Ruby's `bundler`_, it is similar in spirit to those +tools. While :ref:`pip` alone is often sufficient for personal use, Pipenv is +recommended for collaborative projects as it's a higher-level tool that +simplifies dependency management for common use cases. + +Use ``pip`` to install Pipenv: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --user pipenv + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --user pipenv + +.. _pipenv-user-base: + +.. Note:: This does a `user installation`_ to prevent breaking any system-wide + packages. If ``pipenv`` isn't available in your shell after installation, + you'll need to add the :py:data:`user base `'s + binary directory to your ``PATH``. + See :ref:`Installing to the User Site` for more information. + +.. _npm: https://www.npmjs.com/ +.. _bundler: https://bundler.io/ +.. _user installation: https://pip.pypa.io/en/stable/user_guide/#user-installs + +Installing packages for your project +------------------------------------ + +Pipenv manages dependencies on a per-project basis. To install packages, +change into your project's directory (or just an empty directory for this +tutorial) and run: + +.. code-block:: bash + + cd myproject + pipenv install requests + +Pipenv will install the `Requests`_ library and create a ``Pipfile`` +for you in your project's directory. The :ref:`Pipfile` is used to track which +dependencies your project needs in case you need to re-install them, such as +when you share your project with others. You should get output similar to this +(although the exact paths shown will vary): + +.. code-block:: text + + Creating a Pipfile for this project... + Creating a virtualenv for this project... + Using base prefix '/usr/local/Cellar/python3/3.6.2/Frameworks/Python.framework/Versions/3.6' + New python executable in ~/.local/share/virtualenvs/tmp-agwWamBd/bin/python3.6 + Also creating executable in ~/.local/share/virtualenvs/tmp-agwWamBd/bin/python + Installing setuptools, pip, wheel...done. + + Virtualenv location: ~/.local/share/virtualenvs/tmp-agwWamBd + Installing requests... + Collecting requests + Using cached requests-2.18.4-py2.py3-none-any.whl + Collecting idna<2.7,>=2.5 (from requests) + Using cached idna-2.6-py2.py3-none-any.whl + Collecting urllib3<1.23,>=1.21.1 (from requests) + Using cached urllib3-1.22-py2.py3-none-any.whl + Collecting chardet<3.1.0,>=3.0.2 (from requests) + Using cached chardet-3.0.4-py2.py3-none-any.whl + Collecting certifi>=2017.4.17 (from requests) + Using cached certifi-2017.7.27.1-py2.py3-none-any.whl + Installing collected packages: idna, urllib3, chardet, certifi, requests + Successfully installed certifi-2017.7.27.1 chardet-3.0.4 idna-2.6 requests-2.18.4 urllib3-1.22 + + Adding requests to Pipfile's [packages]... + +.. _Requests: https://pypi.org/project/requests/ + + +Using installed packages +------------------------ + +Now that Requests is installed you can create a simple :file:`main.py` file +to use it: + +.. code-block:: python + + import requests + + response = requests.get('https://httpbin.org/ip') + + print('Your IP is {0}'.format(response.json()['origin'])) + +Then you can run this script using ``pipenv run``: + +.. code-block:: bash + + pipenv run python main.py + +You should get output similar to this: + +.. code-block:: text + + Your IP is 8.8.8.8 + +Using ``pipenv run`` ensures that your installed packages are available to +your script. It's also possible to spawn a new shell that ensures all commands +have access to your installed packages with ``pipenv shell``. + + +Next steps +---------- + +Congratulations, you now know how to effectively manage dependencies and +development environments on a collaborative Python project! ✨ 🍰 ✨ + +If you're interested in creating and distributing your own Python packages, see +the :ref:`tutorial on packaging and distributing packages `. + +Note that when your application includes definitions of Python source packages, +they (and their dependencies) can be added to your ``pipenv`` environment with +``pipenv install -e `` (e.g. +``pipenv install -e .`` or ``pipenv install -e src``). + + +.. _other-dependency-management-tools: + +Other Tools for Application Dependency Management +------------------------------------------------- + +If you find this particular approach to managing application dependencies isn't +working well for you or your use case, you may want to explore these other tools +and techniques, listed in alphabetical order, to see if one of them is a better fit: + +* `hatch `_ for opinionated coverage of even + more steps in the project management workflow, such as incrementing versions + and creating new skeleton projects from project templates. +* `micropipenv `_ for a lightweight + wrapper around pip that supports ``requirements.txt``, Pipenv and Poetry lock files, + or converting them to pip-tools compatible output. Designed for containerized + Python applications, but not limited to them. +* `PDM `_ for a modern Python package + management relying on standards such as :pep:`517` and :pep:`621`. +* `pip-tools `_ for creating a lock file of all + dependencies from a list of packages directly used in a project, and ensuring that + only those dependencies are installed. +* `Poetry `__ for a tool comparable in scope + to Pipenv that focuses more directly on use cases where the project being managed is + structured as a distributable Python package with a valid ``pyproject.toml`` file. + By contrast, Pipenv explicitly avoids making the assumption that the application + being worked on will support distribution as a ``pip``-installable Python package. diff --git a/source/tutorials/packaging-projects.rst b/source/tutorials/packaging-projects.rst new file mode 100644 index 000000000..f2c0851ba --- /dev/null +++ b/source/tutorials/packaging-projects.rst @@ -0,0 +1,537 @@ +Packaging Python Projects +========================= + +This tutorial walks you through how to package a simple Python project. It will +show you how to add the necessary files and structure to create the package, how +to build the package, and how to upload it to the Python Package Index (PyPI). + +.. tip:: + + If you have trouble running the commands in this tutorial, please copy the command + and its output, then `open an issue`_ on the `packaging-problems`_ repository on + GitHub. We'll do our best to help you! + +.. _open an issue: https://github.com/pypa/packaging-problems/issues/new?template=packaging_tutorial.yml&title=Trouble+with+the+packaging+tutorial&guide=https://packaging.python.org/tutorials/packaging-projects + +.. _packaging-problems: https://github.com/pypa/packaging-problems + +Some of the commands require a newer version of :ref:`pip`, so start by making +sure you have the latest version installed: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade pip + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade pip + + +A simple project +---------------- + +This tutorial uses a simple project named +``example_package_YOUR_USERNAME_HERE``. If your username is ``me``, then the +package would be ``example_package_me``; this ensures that you have a unique +package name that doesn't conflict with packages uploaded by other people +following this tutorial. We recommend following this tutorial as-is using this +project, before packaging your own project. + +Create the following file structure locally: + +.. code-block:: text + + packaging_tutorial/ + └── src/ + └── example_package_YOUR_USERNAME_HERE/ + ├── __init__.py + └── example.py + +The directory containing the Python files should match the project name. This +simplifies the configuration and is more obvious to users who install the package. + +Creating the file :file:`__init__.py` is recommended because the existence of an +:file:`__init__.py` file allows users to import the directory as a regular package, +even if (as is the case in this tutorial) :file:`__init__.py` is empty. +[#namespace-packages]_ + +:file:`example.py` is an example of a module within the package that could +contain the logic (functions, classes, constants, etc.) of your package. +Open that file and enter the following content: + +.. code-block:: python + + def add_one(number): + return number + 1 + +If you are unfamiliar with Python's :term:`modules ` and +:term:`import packages `, take a few minutes to read over the +`Python documentation for packages and modules`_. + +Once you create this structure, you'll want to run all of the commands in this +tutorial within the ``packaging_tutorial`` directory. + +.. _Python documentation for packages and modules: + https://docs.python.org/3/tutorial/modules.html#packages + + +Creating the package files +-------------------------- + +You will now add files that are used to prepare the project for distribution. +When you're done, the project structure will look like this: + + +.. code-block:: text + + packaging_tutorial/ + ├── LICENSE + ├── pyproject.toml + ├── README.md + ├── src/ + │ └── example_package_YOUR_USERNAME_HERE/ + │ ├── __init__.py + │ └── example.py + └── tests/ + + +Creating a test directory +------------------------- + +:file:`tests/` is a placeholder for test files. Leave it empty for now. + + +.. _choosing-build-backend: + +Choosing a build backend +------------------------ + +Tools like :ref:`pip` and :ref:`build` do not actually convert your sources +into a :term:`distribution package ` (like a wheel); +that job is performed by a :term:`build backend `. The build backend determines how +your project will specify its configuration, including metadata (information +about the project, for example, the name and tags that are displayed on PyPI) +and input files. Build backends have different levels of functionality, such as +whether they support building :term:`extension modules `, and +you should choose one that suits your needs and preferences. + +You can choose from a number of backends; this tutorial uses :ref:`Hatchling +` by default, but it will work identically with :ref:`setuptools`, +:ref:`Flit `, :ref:`PDM `, and others that support the ``[project]`` +table for :ref:`metadata `. + +.. note:: + + Some build backends are part of larger tools that provide a command-line + interface with additional features like project initialization and version + management, as well as building, uploading, and installing packages. This + tutorial uses single-purpose tools that work independently. + +The :file:`pyproject.toml` tells :term:`build frontend ` tools like :ref:`pip` and +:ref:`build` which backend to use for your project. Below are some +examples for common build backends, but check your backend's own documentation +for more details. + +.. include:: ../shared/build-backend-tabs.rst + +The ``requires`` key is a list of packages that are needed to build your package. +The :term:`frontend ` should install them automatically when building your package. +Frontends usually run builds in isolated environments, so omitting dependencies +here may cause build-time errors. +This should always include your backend's package, and might have other build-time +dependencies. +The minimum version specified in the above code block is the one that introduced support +for :ref:`the new license metadata `. + +The ``build-backend`` key is the name of the Python object that frontends will use +to perform the build. + +Both of these values will be provided by the documentation for your build +backend, or generated by its command line interface. There should be no need for +you to customize these settings. + +Additional configuration of the build tool will either be in a ``tool`` section +of the ``pyproject.toml``, or in a special file defined by the build tool. For +example, when using ``setuptools`` as your build backend, additional configuration +may be added to a ``setup.py`` or ``setup.cfg`` file, and specifying +``setuptools.build_meta`` in your build allows the tools to locate and use these +automatically. + +.. _configuring metadata: + +Configuring metadata +^^^^^^^^^^^^^^^^^^^^ + +Open :file:`pyproject.toml` and enter the following content. Change the ``name`` +to include your username; this ensures that you have a unique +package name that doesn't conflict with packages uploaded by other people +following this tutorial. + +.. code-block:: toml + + [project] + name = "example_package_YOUR_USERNAME_HERE" + version = "0.0.1" + authors = [ + { name="Example Author", email="author@example.com" }, + ] + description = "A small example package" + readme = "README.md" + requires-python = ">=3.9" + classifiers = [ + "Programming Language :: Python :: 3", + "Operating System :: OS Independent", + ] + license = "MIT" + license-files = ["LICEN[CS]E*"] + + [project.urls] + Homepage = "https://github.com/pypa/sampleproject" + Issues = "https://github.com/pypa/sampleproject/issues" + +- ``name`` is the *distribution name* of your package. This can be any name as + long as it only contains letters, numbers, ``.``, ``_`` , and ``-``. It also + must not already be taken on PyPI. **Be sure to update this with your + username** for this tutorial, as this ensures you won't try to upload a + package with the same name as one which already exists. +- ``version`` is the package version. (Some build backends allow it to be + specified another way, such as from a file or Git tag.) +- ``authors`` is used to identify the author of the package; you specify a name + and an email for each author. You can also list ``maintainers`` in the same + format. +- ``description`` is a short, one-sentence summary of the package. +- ``readme`` is a path to a file containing a detailed description of the + package. This is shown on the package detail page on PyPI. + In this case, the description is loaded from :file:`README.md` (which is a + common pattern). There also is a more advanced table form described in the + :ref:`pyproject.toml guide `. +- ``requires-python`` gives the versions of Python supported by your + project. An installer like :ref:`pip` will look back through older versions of + packages until it finds one that has a matching Python version. +- ``classifiers`` gives the index and :ref:`pip` some additional metadata + about your package. In this case, the package is only compatible with Python + 3 and is OS-independent. You should + always include at least which version(s) of Python your package works on + and which operating systems + your package will work on. For a complete list of classifiers, see + https://pypi.org/classifiers/. +- ``license`` is the :term:`SPDX license expression ` of + your package. +- ``license-files`` is the list of glob paths to the license files, + relative to the directory where :file:`pyproject.toml` is located. +- ``urls`` lets you list any number of extra links to show on PyPI. + Generally this could be to the source, documentation, issue trackers, etc. + +See the :ref:`pyproject.toml guide ` for details +on these and other fields that can be defined in the ``[project]`` +table. Other common fields are ``keywords`` to improve discoverability +and the ``dependencies`` that are required to install your package. + + +Creating README.md +------------------ + +Open :file:`README.md` and enter the following content. You can customize this +if you'd like. + +.. code-block:: md + + # Example Package + + This is a simple example package. You can use + [GitHub-flavored Markdown](https://guides.github.com/features/mastering-markdown/) + to write your content. + + +Creating a LICENSE +------------------ + +It's important for every package uploaded to the Python Package Index to include +a license. This tells users who install your package the terms under which they +can use your package. For help picking a license, see +https://choosealicense.com/. Once you have chosen a license, open +:file:`LICENSE` and enter the license text. For example, if you had chosen the +MIT license: + +.. code-block:: text + + Copyright (c) 2018 The Python Packaging Authority + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +Most build backends automatically include license files in packages. See your +backend's documentation for more details. +If you include the path to license in the ``license-files`` key of +:file:`pyproject.toml`, and your build backend supports :pep:`639`, +the file will be automatically included in the package. + + +Including other files +--------------------- + +The files listed above will be included automatically in your +:term:`source distribution `. If you want to +include additional files, see the documentation for your build backend. + +.. _generating archives: + +Generating distribution archives +-------------------------------- + +The next step is to generate :term:`distribution packages ` +for the package. These are archives that are uploaded to the Python +Package Index and can be installed by :ref:`pip`. + +Make sure you have the latest version of PyPA's :ref:`build` installed: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade build + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade build + +.. tip:: If you have trouble installing these, see the + :doc:`installing-packages` tutorial. + +Now run this command from the same directory where :file:`pyproject.toml` is located: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m build + +.. tab:: Windows + + .. code-block:: bat + + py -m build + +This command should output a lot of text and once completed should generate two +files in the :file:`dist` directory: + +.. code-block:: text + + dist/ + ├── example_package_YOUR_USERNAME_HERE-0.0.1-py3-none-any.whl + └── example_package_YOUR_USERNAME_HERE-0.0.1.tar.gz + + +The ``tar.gz`` file is a :term:`source distribution ` +whereas the ``.whl`` file is a :term:`built distribution `. +Newer :ref:`pip` versions preferentially install built distributions, but will +fall back to source distributions if needed. You should always upload a source +distribution and provide built distributions for the platforms your project is +compatible with. In this case, our example package is compatible with Python on +any platform so only one built distribution is needed. + +Uploading the distribution archives +----------------------------------- + +Finally, it's time to upload your package to the Python Package Index! + +The first thing you'll need to do is register an account on TestPyPI, which +is a separate instance of the package index intended for testing and +experimentation. It's great for things like this tutorial where we don't +necessarily want to upload to the real index. To register an account, go to +https://test.pypi.org/account/register/ and complete the steps on that page. +You will also need to verify your email address before you're able to upload +any packages. For more details, see :doc:`/guides/using-testpypi`. + +To securely upload your project, you'll need a PyPI `API token`_. Create one at +https://test.pypi.org/manage/account/#api-tokens, setting the "Scope" to "Entire +account". **Don't close the page until you have copied and saved the token — you +won't see that token again.** + +.. _API token: https://test.pypi.org/help/#apitoken + +Now that you are registered, you can use :ref:`twine` to upload the +distribution packages. You'll need to install Twine: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --upgrade twine + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --upgrade twine + +Once installed, run Twine to upload all of the archives under :file:`dist`: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m twine upload --repository testpypi dist/* + +.. tab:: Windows + + .. code-block:: bat + + py -m twine upload --repository testpypi dist/* + +You will be prompted for an API token. Use the token value, including the ``pypi-`` +prefix. Note that the input will be hidden, so be sure to paste correctly. + +After the command completes, you should see output similar to this: + +.. code-block:: + + Uploading distributions to https://test.pypi.org/legacy/ + Enter your API token: + Uploading example_package_YOUR_USERNAME_HERE-0.0.1-py3-none-any.whl + 100% ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 8.2/8.2 kB • 00:01 • ? + Uploading example_package_YOUR_USERNAME_HERE-0.0.1.tar.gz + 100% ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 6.8/6.8 kB • 00:00 • ? + +Once uploaded, your package should be viewable on TestPyPI; for example: +``https://test.pypi.org/project/example_package_YOUR_USERNAME_HERE``. + + +Installing your newly uploaded package +-------------------------------------- + +You can use :ref:`pip` to install your package and verify that it works. +Create a :ref:`virtual environment ` +and install your package from TestPyPI: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 -m pip install --index-url https://test.pypi.org/simple/ --no-deps example-package-YOUR-USERNAME-HERE + +.. tab:: Windows + + .. code-block:: bat + + py -m pip install --index-url https://test.pypi.org/simple/ --no-deps example-package-YOUR-USERNAME-HERE + +Make sure to specify your username in the package name! + +pip should install the package from TestPyPI and the output should look +something like this: + +.. code-block:: text + + Collecting example-package-YOUR-USERNAME-HERE + Downloading https://test-files.pythonhosted.org/packages/.../example_package_YOUR_USERNAME_HERE_0.0.1-py3-none-any.whl + Installing collected packages: example_package_YOUR_USERNAME_HERE + Successfully installed example_package_YOUR_USERNAME_HERE-0.0.1 + +.. note:: This example uses ``--index-url`` flag to specify TestPyPI instead of + live PyPI. Additionally, it specifies ``--no-deps``. Since TestPyPI doesn't + have the same packages as the live PyPI, it's possible that attempting to + install dependencies may fail or install something unexpected. While our + example package doesn't have any dependencies, it's a good practice to avoid + installing dependencies when using TestPyPI. + +You can test that it was installed correctly by importing the package. +Make sure you're still in your virtual environment, then run Python: + +.. tab:: Unix/macOS + + .. code-block:: bash + + python3 + +.. tab:: Windows + + .. code-block:: bat + + py + +and import the package: + +.. code-block:: pycon + + >>> from example_package_YOUR_USERNAME_HERE import example + >>> example.add_one(2) + 3 + + +Next steps +---------- + +**Congratulations, you've packaged and distributed a Python project!** +✨ 🍰 ✨ + +Keep in mind that this tutorial showed you how to upload your package to Test +PyPI, which isn't a permanent storage. The Test system occasionally deletes +packages and accounts. It is best to use TestPyPI for testing and experiments +like this tutorial. + +When you are ready to upload a real package to the Python Package Index you can +do much the same as you did in this tutorial, but with these important +differences: + +* Choose a memorable and unique name for your package. You don't have to append + your username as you did in the tutorial, but you can't use an existing name. +* Register an account on https://pypi.org - note that these are two separate + servers and the login details from the test server are not shared with the + main server. +* Use ``twine upload dist/*`` to upload your package and enter your credentials + for the account you registered on the real PyPI. Now that you're uploading + the package in production, you don't need to specify ``--repository``; the + package will upload to https://pypi.org/ by default. +* Install your package from the real PyPI using ``python3 -m pip install [your-package]``. + +At this point if you want to read more on packaging Python libraries here are +some things you can do: + +* Read about advanced configuration for your chosen build backend: + `Hatchling `_, + :doc:`setuptools `, + :doc:`Flit `, `PDM `_. +* Look at the :doc:`guides ` on this site for more advanced + practical information, or the :doc:`discussions ` + for explanations and background on specific topics. +* Consider packaging tools that provide a single command-line interface for + project management and packaging, such as :ref:`hatch`, :ref:`flit`, + :ref:`pdm`, and :ref:`poetry`. + + +---- + +.. rubric:: Notes + +.. [#namespace-packages] + Technically, you can also create Python packages without an ``__init__.py`` file, + but those are called :doc:`namespace packages ` + and considered an **advanced topic** (not covered in this tutorial). + If you are only getting started with Python packaging, it is recommended to + stick with *regular packages* and ``__init__.py`` (even if the file is empty). + + +.. _hatchling-config: https://hatch.pypa.io/latest/config/metadata/ +.. _pdm-config: https://pdm-project.org/latest/reference/pep621/ diff --git a/source/wheel_egg.rst b/source/wheel_egg.rst deleted file mode 100644 index 723a02cd0..000000000 --- a/source/wheel_egg.rst +++ /dev/null @@ -1,52 +0,0 @@ -.. _`Wheel vs Egg`: - -============ -Wheel vs Egg -============ - -:Page Status: Complete -:Last Reviewed: 2015-09-10 - -:term:`Wheel` and :term:`Egg` are both packaging formats that aim to support the -use case of needing an install artifact that doesn't require building or -compilation, which can be costly in testing and production workflows. - -The :term:`Egg` format was introduced by :ref:`setuptools` in 2004, whereas the -:term:`Wheel` format was introduced by :pep:`427` in 2012. - -:term:`Wheel` is currently considered the standard for :term:`built ` and :term:`binary ` packaging for Python. - -Here's a breakdown of the important differences between :term:`Wheel` and :term:`Egg`. - - -* :term:`Wheel` has an :pep:`official PEP <427>`. :term:`Egg` did not. - -* :term:`Wheel` is a :term:`distribution ` format, i.e a packaging - format. [1]_ :term:`Egg` was both a distribution format and a runtime - installation format (if left zipped), and was designed to be importable. - -* :term:`Wheel` archives do not include .pyc files. Therefore, when the - distribution only contains python files (i.e. no compiled extensions), and is - compatible with Python 2 and 3, it's possible for a wheel to be "universal", - similar to an :term:`sdist `. - -* :term:`Wheel` uses :pep:`PEP376-compliant <376>` ``.dist-info`` - directories. Egg used ``.egg-info``. - -* :term:`Wheel` has a :pep:`richer file naming convention <425>`. A single - wheel archive can indicate its compatibility with a number of Python language - versions and implementations, ABIs, and system architectures. - -* :term:`Wheel` is versioned. Every wheel file contains the version of the wheel - specification and the implementation that packaged it. - -* :term:`Wheel` is internally organized by `sysconfig path type - `_, - therefore making it easier to convert to other formats. - ----- - -.. [1] Circumstantially, in some cases, wheels can be used as an importable - runtime format, although :pep:`this is not officially supported at this time - <427#is-it-possible-to-import-python-code-directly-from-a-wheel-file>`.