diff --git a/.coveragerc b/.coveragerc
deleted file mode 100644
index a033179..0000000
--- a/.coveragerc
+++ /dev/null
@@ -1,21 +0,0 @@
-[report]
-ignore_errors = True
-fail_under = 100
-exclude_lines =
- pragma: no cover
- def __repr__
- if self.debug:
- if settings.DEBUG
- raise AssertionError
- raise NotImplementedError
- if 0:
- if __name__ == .__main__.:
- typing.Protocol
-
-omit =
- portalocker/redis.py
-
-[run]
-source = src
-branch = True
-
diff --git a/.github/actions/setup-python-uv/action.yml b/.github/actions/setup-python-uv/action.yml
new file mode 100644
index 0000000..fd402e5
--- /dev/null
+++ b/.github/actions/setup-python-uv/action.yml
@@ -0,0 +1,32 @@
+# .github/actions/setup-python-uv/action.yml
+# Composite action to checkout code, setup Python with UV, and install dependencies.
+
+name: 'Setup Python Environment with UV'
+description: 'Checks out code, sets up UV with a specific Python version, installs dependencies, and lists them.'
+
+inputs:
+ python-version:
+ description: 'Python version to set up (e.g., "3.9", "3.11")'
+ required: true
+
+runs:
+ using: "composite"
+ steps:
+ # Step 1: Setup uv (Python package installer and resolver)
+ - name: Setup uv for Python ${{ inputs.python-version }}
+ uses: astral-sh/setup-uv@v6
+ with:
+ python-version: ${{ inputs.python-version }}
+ activate-environment: true # Makes uv-managed Python available on PATH for subsequent steps.
+
+ # Step 2: Install dependencies using uv sync
+ # This command assumes your project dependencies are defined in a way uv can understand
+ # (e.g., pyproject.toml, requirements.txt).
+ - name: Install dependencies with uv
+ run: |
+ uv sync -p ${{ inputs.python-version }}
+ echo "----------------------------------------"
+ echo "Installed packages for Python ${{ inputs.python-version }}:"
+ uv pip list
+ echo "----------------------------------------"
+ shell: bash
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..532bf89
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,25 @@
+version: 2
+updates:
+ - package-ecosystem: github-actions
+ directory: /
+ target-branch: master
+ labels:
+ - "meta: CI"
+ schedule:
+ interval: monthly
+ groups:
+ actions:
+ patterns:
+ - "*"
+
+ - package-ecosystem: pip
+ directory: /
+ target-branch: master
+ labels:
+ - "meta: deps"
+ schedule:
+ interval: monthly
+ groups:
+ actions:
+ patterns:
+ - "*"
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 4c92829..e5288c8 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -1,46 +1,66 @@
-name: lint
+# .github/workflows/reusable-lint-docs.yml
+# Callable workflow for linting, documentation building, and repository review.
-on:
- push:
- pull_request:
+name: Reusable Lint and Docs
-env:
- FORCE_COLOR: 1
+on:
+ workflow_call:
+ inputs:
+ python-version:
+ description: 'Python version to use for linting and docs'
+ type: string
+ default: '3.13'
+ os:
+ description: 'Operating system to run on'
+ type: string
+ default: 'ubuntu-latest'
+ # If this workflow needs secrets (e.g., for publishing docs), define them here.
+ # secrets:
+ # SOME_SECRET:
+ # required: true
jobs:
- lint:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- python-version: ['3.8', '3.8', '3.10', '3.11']
+ lint_docs_repo_review:
+ name: Linting, Docs Build, and Repo Standards
+ runs-on: ${{ inputs.os }}
+ timeout-minutes: 10
steps:
- - uses: actions/checkout@v4
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
+ # Step 1: Checkout repository
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ # Step 2: Setup Python environment, checkout code, and install dependencies.
+ # This uses the composite action, ensuring consistency with other Python jobs.
+ - name: Setup Python Environment & Install Dependencies
+ uses: ./.github/actions/setup-python-uv
with:
- python-version: ${{ matrix.python-version }}
- cache: 'pip'
- - name: Python version
- run: python --version
-
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip setuptools wheel mypy
- python -m pip install -e '.[tests]'
-
- - name: Linting with pyright
- uses: jakebailey/pyright-action@v1
+ python-version: ${{ inputs.python-version }}
+
+ # Step 3: Check repository best-practices using Scientific Python's repo-review.
+ - name: Check repository best-practices
+ uses: scientific-python/repo-review@v0.12.2
with:
- path: portalocker portalocker_tests
+ plugins: sp-repo-review # Uses standard Scientific Python plugins.
+
+ # Step 4: Run ruff linter.
+ - name: Run ruff linter
+ uses: astral-sh/ruff-action@v3
+ # Default 'check' command is used.
- - name: Linting with ruff
- uses: jpetrucciani/ruff-check@main
+ # Step 5: Run ruff formatter in check mode.
+ - name: Run ruff formatter (check mode)
+ uses: astral-sh/ruff-action@v3
with:
- extra-args: portalocker portalocker_tests
+ args: 'format --check --diff'
- - name: Linting with mypy
- run: |
- python -m mypy portalocker portalocker_tests
+ # Step 6: Build Sphinx documentation.
+ # Assumes a 'docs' environment is configured in tox.ini (or similar for uvx).
+ - name: Build sphinx docs
+ run: uvx tox -e docs # Ensure your tox.ini or pyproject.toml has a 'docs' env setup for uvx.
+ # Step 7: Typo checking using codespell.
+ - name: Typo checking (codespell)
+ uses: codespell-project/actions-codespell@v2
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
new file mode 100644
index 0000000..0345fdf
--- /dev/null
+++ b/.github/workflows/main.yml
@@ -0,0 +1,115 @@
+# .github/workflows/main-ci.yml
+# Main CI workflow: orchestrates linting, documentation, and tests.
+
+name: CI Checks and Tests
+
+on:
+ push:
+ pull_request:
+ branches:
+ - develop
+
+env:
+ FORCE_COLOR: 1 # Ensures colored output for tools that support it.
+
+permissions:
+ contents: read # Required to checkout the repository.
+ pull-requests: write # For actions that might comment on PRs (e.g., repo-review if configured, or future coverage).
+ # Add other permissions if specific actions require them (e.g., id-token: write for OIDC).
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ # Job: Call the reusable linting and documentation workflow.
+ linting_and_documentation:
+ name: Lint, Docs, & Repo Review
+ uses: ./.github/workflows/lint.yml
+ secrets: inherit # Pass down secrets like GITHUB_TOKEN if needed by the callable workflow.
+
+ # Job: Generate the test matrix for different OS and Python versions.
+ generate_test_matrix:
+ name: Generate Test Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.generate-matrix.outputs.matrix }}
+ steps:
+ - name: Generate Matrix for Tests
+ id: generate-matrix
+ uses: coactions/dynamic-matrix@v3
+ with:
+ platforms: 'linux,macos,windows'
+ min_python: '3.9'
+ linux: full
+ windows: full
+ macos: full
+
+ # Job: Perform static type checking across the generated matrix.
+ type_checking:
+ name: Type py${{ matrix.python_version }}, ${{ matrix.os }})
+ needs: generate_test_matrix
+ runs-on: ${{ matrix.os || 'ubuntu-latest' }}
+ timeout-minutes: 10
+
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJSON(needs.generate_test_matrix.outputs.matrix) }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Setup Python Environment & Install Dependencies
+ uses: ./.github/actions/setup-python-uv # Reusable composite action
+ with:
+ python-version: ${{ matrix.python_version }}
+
+ - name: Run pyright
+ run: pyright
+
+ - name: Run mypy
+ run: |
+ mypy --cache-dir=/dev/null .
+
+ # Job: Run pytest tests across the generated matrix.
+ tests:
+ name: test py${{ matrix.python_version }}, ${{ matrix.os }})
+ needs: generate_test_matrix
+ runs-on: ${{ matrix.os || 'ubuntu-latest' }}
+ timeout-minutes: 15
+
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJSON(needs.generate_test_matrix.outputs.matrix) }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Setup Python Environment & Install Dependencies
+ uses: ./.github/actions/setup-python-uv
+ with:
+ python-version: ${{ matrix.python_version }}
+
+ - name: Start Redis (if applicable)
+ uses: supercharge/redis-github-action@1.8.0
+ with:
+ redis-version: 'latest'
+ continue-on-error: true # Allow failure if Redis isn't critical or supported on all OS.
+
+ - name: Run pytest
+ run: |
+ pytest --junitxml=junit/test-results-${{ matrix.os }}-py${{ matrix.python_version }}.xml
+
+ - name: Upload pytest test results
+ if: ${{ always() }} # Ensure results are uploaded even if tests fail.
+ uses: actions/upload-artifact@v4
+ with:
+ name: pytest-results-${{ matrix.os }}-py${{ matrix.python_version }}
+ path: junit/test-results-${{ matrix.os }}-py${{ matrix.python_version }}.xml
+ retention-days: 7
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
deleted file mode 100644
index 71209cc..0000000
--- a/.github/workflows/python-package.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-name: test
-
-on:
- push:
- branches: [ develop, master ]
- pull_request:
- branches: [ develop ]
-
-env:
- FORCE_COLOR: 1
-
-jobs:
- # Run os specific tests on the slower OS X/Windows machines
- windows_osx:
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- python-version: ['3.8', '3.9', '3.10', '3.11']
- os: ['macos-latest', 'windows-latest']
-
- steps:
- - uses: actions/checkout@v4
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- cache: 'pip'
- - name: Python version
- run: python --version
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip setuptools wheel
- python -m pip install -e ".[tests]"
- - name: Test with pytest
- run: python -m pytest
-
- # Run all tests including Redis on Linux
- linux:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- python-version: ['3.8', '3.9', '3.10', '3.11']
-
- steps:
- - uses: actions/checkout@v4
- - name: Start Redis
- uses: supercharge/redis-github-action@1.7.0
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- cache: 'pip'
- - name: Python version
- run: python --version
- - name: Install dependencies
- run: |
- python -m pip install tox
- - name: Test with pytest
- run: tox -p all
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index 7101b3f..c3b6dc2 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -9,9 +9,9 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@v8
+ - uses: actions/stale@v9
with:
days-before-stale: 30
+ days-before-pr-stale: -1
exempt-issue-labels: in-progress,help-wanted,pinned,security,enhancement
- exempt-all-pr-assignees: true
-
+ remove-issue-stale-when-updated: true
diff --git a/.gitignore b/.gitignore
index 6167d32..bd03df6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,9 @@
+# ruff: skip-file
build/
locked_file
dist
htmlcov
*.egg-info
.cache
+.aider*
+.env
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..27ac480
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,73 @@
+ci:
+ autoupdate_branch: "master"
+ autoupdate_commit_msg: "⬆️ update pre-commit hooks"
+ skip:
+ - basedpyright
+
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v5.0.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-ast
+ - id: check-case-conflict
+ - id: check-docstring-first
+ - id: check-executables-have-shebangs
+ - id: check-illegal-windows-names
+ - id: check-json
+ - id: check-merge-conflict
+ - id: check-shebang-scripts-are-executable
+ - id: check-symlinks
+ - id: check-toml
+ - id: check-vcs-permalinks
+ - id: check-xml
+ - id: check-yaml
+ - id: debug-statements
+ - id: destroyed-symlinks
+ - id: detect-aws-credentials
+ args: [--allow-missing-credentials]
+ - id: detect-private-key
+ - id: fix-byte-order-marker
+ - id: forbid-submodules
+ - id: name-tests-test
+ args: [--pytest-test-first]
+ - id: no-commit-to-branch
+ args: [--branch, master]
+ - id: trailing-whitespace
+ args: [--markdown-linebreak-ext=md]
+
+ - repo: https://github.com/igorshubovych/markdownlint-cli
+ rev: v0.43.0
+ hooks:
+ - id: markdownlint
+
+ - repo: https://github.com/executablebooks/mdformat
+ rev: 0.7.21
+ hooks:
+ - id: mdformat
+ additional_dependencies:
+ - mdformat-gfm
+ - mdformat-gfm-alerts
+
+ - repo: https://github.com/crate-ci/typos
+ rev: v1.28.4
+ hooks:
+ - id: typos
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.8.4
+ hooks:
+ - id: ruff
+ args: [--fix, --show-fixes]
+ types_or: [python, pyi]
+
+ - id: ruff-format
+ types_or: [python, pyi]
+
+ - repo: local
+ hooks:
+ - id: basedpyright
+ name: basedpyright
+ entry: uv run --no-sync --locked basedpyright
+ language: system
+ types_or: [python, pyi]
diff --git a/README.rst b/README.rst
index c5ef42f..1ce17da 100644
--- a/README.rst
+++ b/README.rst
@@ -2,13 +2,9 @@
portalocker - Cross-platform locking library
############################################
-.. image:: https://github.com/WoLpH/portalocker/actions/workflows/python-package.yml/badge.svg?branch=master
+.. image:: https://github.com/wolph/portalocker/actions/workflows/main.yml/badge.svg?branch=master
:alt: Linux Test Status
- :target: https://github.com/WoLpH/portalocker/actions/
-
-.. image:: https://ci.appveyor.com/api/projects/status/mgqry98hgpy4prhh?svg=true
- :alt: Windows Tests Status
- :target: https://ci.appveyor.com/project/WoLpH/portalocker
+ :target: https://github.com/wolph/portalocker/actions/workflows/main.yml
.. image:: https://coveralls.io/repos/WoLpH/portalocker/badge.svg?branch=master
:alt: Coverage Status
diff --git a/appveyor.yml b/appveyor.yml
index 0b57035..9cd6124 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -6,16 +6,17 @@ image:
environment:
matrix:
- - TOXENV: py38
- TOXENV: py39
- TOXENV: py310
- TOXENV: py311
+ - TOXENV: py312
+ - TOXENV: py313
install:
- - py -m pip install -U tox setuptools wheel
- - py -m pip install -Ue ".[tests]"
+ - powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
build: false # Not a C# project, build stuff at the test step instead.
test_script:
- - py -m tox"
+ - dir
+ - C:\Users\appveyor\.local\bin\uvx.exe tox
diff --git a/docs/_theme/flask_theme_support.py b/docs/_theme/flask_theme_support.py
index 555c116..fedc85d 100644
--- a/docs/_theme/flask_theme_support.py
+++ b/docs/_theme/flask_theme_support.py
@@ -1,7 +1,9 @@
# flasky extensions. flasky pygments style based on tango style
from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
+from pygments.token import (
+ Keyword, Name, Comment, String, Error,
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+)
class FlaskyStyle(Style):
diff --git a/docs/conf.py b/docs/conf.py
index 10570a6..0fb6d4a 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,19 +1,6 @@
-#
-# Documentation build configuration file, created by
-# sphinx-quickstart on Thu Feb 27 20:00:23 2014.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
+import datetime
import os
import sys
-import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -24,9 +11,6 @@
# -- General configuration ------------------------------------------------
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
@@ -48,18 +32,12 @@
# The suffix of source filenames.
source_suffix = '.rst'
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = metadata.__package_name__.replace('-', ' ').capitalize()
-copyright = '{}, {}'.format(
- datetime.date.today().year,
- metadata.__author__,
-)
+copyright = f'{datetime.date.today().year}, {metadata.__author__}'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -70,16 +48,6 @@
# The full version, including alpha/beta/rc tags.
release = metadata.__version__
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
@@ -88,266 +56,27 @@
# documents.
default_role = 'py:obj'
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-# If true, keep warnings as "system message" paragraphs in the built documents.
-#keep_warnings = False
-
-
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'wolph'
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
-# The name for this set of Sphinx documents. If None, it defaults to
-# " v documentation".
-#html_title = None
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-#html_extra_path = []
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
# Output file base name for HTML help builder.
htmlhelp_basename = metadata.__package_name__ + '-doc'
-
-# -- Options for LaTeX output ---------------------------------------------
-
-# latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-# }
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [(
- 'index',
- '%s.tex' % metadata.__package_name__,
- '%s Documentation' % metadata.__package_name__.replace('-', ' ').capitalize(),
- metadata.__author__,
- 'manual',
-)]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [(
- 'index',
- metadata.__package_name__,
- '%s Documentation' % metadata.__package_name__.replace('-', ' ').capitalize(),
- [metadata.__author__],
- 1,
-)]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [(
- 'index',
- metadata.__package_name__,
- '%s Documentation' % metadata.__package_name__.replace('-', ' ').capitalize(),
- metadata.__author__,
- metadata.__package_name__,
- metadata.__description__,
- 'Miscellaneous',
-)]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
-
-
-# -- Options for Epub output ----------------------------------------------
-
-# Bibliographic Dublin Core info.
-epub_title = metadata.__package_name__.replace('-', ' ').capitalize()
-epub_author = metadata.__author__
-epub_publisher = metadata.__author__
-epub_copyright = copyright
-
-# The HTML theme for the epub output. Since the default themes are not optimized
-# for small screen space, using the same theme for HTML and epub output is
-# usually not wise. This defaults to 'epub', a theme designed to save visual
-# space.
-#epub_theme = 'epub'
-
-# The language of the text. It defaults to the language option
-# or en if the language is not set.
-#epub_language = ''
-
-# The scheme of the identifier. Typical schemes are ISBN or URL.
-#epub_scheme = ''
-
-# The unique identifier of the text. This can be a ISBN number
-# or the project homepage.
-#epub_identifier = ''
-
-# A unique identification for the text.
-#epub_uid = ''
-
-# A tuple containing the cover image and cover page html template filenames.
-#epub_cover = ()
-
-# A sequence of (type, uri, title) tuples for the guide element of content.opf.
-#epub_guide = ()
-
-# HTML files that should be inserted before the pages created by sphinx.
-# The format is a list of tuples containing the path and title.
-#epub_pre_files = []
-
-# HTML files that should be inserted after the pages created by sphinx.
-# The format is a list of tuples containing the path and title.
-#epub_post_files = []
-
-# A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
-
-# The depth of the table of contents in toc.ncx.
-#epub_tocdepth = 3
-
-# Allow duplicate toc entries.
-#epub_tocdup = True
-
-# Choose between 'default' and 'includehidden'.
-#epub_tocscope = 'default'
-
-# Fix unsupported image types using the PIL.
-#epub_fix_images = False
-
-# Scale large images.
-#epub_max_image_width = 0
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#epub_show_urls = 'inline'
-
-# If false, no index is generated.
-#epub_use_index = True
-
-
# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'http://docs.python.org/3/': None}
+intersphinx_mapping = dict(
+ python=('http://docs.python.org/3/', None),
+)
diff --git a/docs/portalocker.rst b/docs/portalocker.rst
index 9050d7a..7f1603a 100644
--- a/docs/portalocker.rst
+++ b/docs/portalocker.rst
@@ -11,6 +11,7 @@ Submodules
portalocker.exceptions
portalocker.portalocker
portalocker.utils
+ portalocker.types
Module contents
---------------
diff --git a/docs/portalocker.types.rst b/docs/portalocker.types.rst
new file mode 100644
index 0000000..a12cf29
--- /dev/null
+++ b/docs/portalocker.types.rst
@@ -0,0 +1,7 @@
+portalocker.types module
+========================
+
+.. automodule:: portalocker.types
+ :members:
+ :show-inheritance:
+ :undoc-members:
diff --git a/lefthook.yml b/lefthook.yml
new file mode 100644
index 0000000..2dadf6f
--- /dev/null
+++ b/lefthook.yml
@@ -0,0 +1,30 @@
+#pre-push:
+# parallel: true
+
+templates:
+ ruff: 'uvx ruff'
+
+pre-commit:
+ parallel: true
+ commands:
+ uv-lock-sort:
+ glob: 'pyproject.toml'
+ run: uv lock
+ stage_fixed: true
+ uv-sort:
+ glob: 'pyproject.toml'
+ run: uvx uv-sort pyproject.toml
+ stage_fixed: true
+ validate-pyproject:
+ glob: 'pyproject.toml'
+ run: uvx --with=tomli --with=packaging validate-pyproject pyproject.toml
+ ruff:
+ glob: '*.py'
+ run: |
+ {ruff} check --exit-zero --fix {staged_files}
+ {ruff} format {staged_files}
+ stage_fixed: true
+ nbstripout:
+ glob: '*.ipynb'
+ run: uv run nbstripout {staged_files}
+ stage_fixed: true
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index 5c5b7e3..0000000
--- a/mypy.ini
+++ /dev/null
@@ -1,8 +0,0 @@
-[mypy]
-warn_return_any = True
-warn_unused_configs = True
-files = portalocker
-
-ignore_missing_imports = True
-
-check_untyped_defs = True
\ No newline at end of file
diff --git a/portalocker/__about__.py b/portalocker/__about__.py
index a0b817a..59b031d 100644
--- a/portalocker/__about__.py
+++ b/portalocker/__about__.py
@@ -1,6 +1,65 @@
+"""Version and package metadata helpers for portalocker.
+
+This module resolves the runtime version by preferring installed package
+metadata and falling back to parsing the local ``pyproject.toml`` when
+needed.
+"""
+
+import re
+from importlib import metadata as importlib_metadata
+from pathlib import Path
+from typing import Optional
+
__package_name__ = 'portalocker'
__author__ = 'Rick van Hattem'
__email__ = 'wolph@wol.ph'
-__version__ = '2.10.1'
-__description__ = '''Wraps the portalocker recipe for easy usage'''
+__description__ = 'Wraps the portalocker recipe for easy usage'
__url__ = 'https://github.com/WoLpH/portalocker'
+
+
+def _read_pyproject_version(path: Path) -> Optional[str]: # pragma: no cover
+ """Read the version from a ``pyproject.toml`` file if available.
+
+ This uses a small regex parser that looks for the ``[project]`` table
+ and extracts the ``version`` value. It's intentionally minimal to avoid
+ runtime dependencies while keeping types precise.
+
+ Args:
+ path: Path to the ``pyproject.toml`` file.
+
+ Returns:
+ The version string if it could be determined, otherwise ``None``.
+ """
+ try:
+ content = path.read_text(encoding='utf-8')
+ except Exception:
+ return None
+
+ match = re.search(
+ r"(?ms)^\[project\].*?^version\s*=\s*['\"]([^'\"]+)['\"]",
+ content,
+ )
+ return match.group(1) if match else None
+
+
+def get_version() -> str: # pragma: no cover
+ """Return the package version at runtime.
+
+ Prefers installed package metadata. When running from a source tree it
+ falls back to parsing the ``pyproject.toml`` ``[project].version``
+ field.
+
+ Returns:
+ The resolved version string. Returns ``'0.0.0'`` as a last resort.
+ """
+ try:
+ return importlib_metadata.version(__package_name__)
+ except Exception:
+ pass
+
+ root = Path(__file__).resolve().parent.parent
+ version = _read_pyproject_version(root / 'pyproject.toml')
+ return version or '0.0.0'
+
+
+__version__ = get_version()
diff --git a/portalocker/__init__.py b/portalocker/__init__.py
index 7e757ef..1aa48ea 100644
--- a/portalocker/__init__.py
+++ b/portalocker/__init__.py
@@ -2,6 +2,8 @@
from .utils import (
BoundedSemaphore,
Lock,
+ NamedBoundedSemaphore,
+ PidFileLock,
RLock,
TemporaryFileLock,
open_atomic,
@@ -10,7 +12,7 @@
try: # pragma: no cover
from .redis import RedisLock
except ImportError: # pragma: no cover
- RedisLock = None # type: ignore
+ RedisLock = None # type: ignore[assignment,misc]
#: The package name on Pypi
@@ -20,7 +22,7 @@
#: Current author's email address
__email__ = __about__.__email__
#: Version number
-__version__ = '2.10.1'
+__version__ = __about__.__version__
#: Package description for Pypi
__description__ = __about__.__description__
#: Package homepage
@@ -61,19 +63,21 @@
#: context wrappers
__all__ = [
- 'lock',
- 'unlock',
'LOCK_EX',
- 'LOCK_SH',
'LOCK_NB',
+ 'LOCK_SH',
'LOCK_UN',
- 'LockFlags',
- 'LockException',
- 'Lock',
- 'RLock',
'AlreadyLocked',
'BoundedSemaphore',
+ 'Lock',
+ 'LockException',
+ 'LockFlags',
+ 'NamedBoundedSemaphore',
+ 'PidFileLock',
+ 'RLock',
+ 'RedisLock',
'TemporaryFileLock',
+ 'lock',
'open_atomic',
- 'RedisLock',
+ 'unlock',
]
diff --git a/portalocker/__main__.py b/portalocker/__main__.py
index ecac207..a573ad7 100644
--- a/portalocker/__main__.py
+++ b/portalocker/__main__.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
import argparse
import logging
-import os
import pathlib
import re
+import subprocess
import typing
base_path = pathlib.Path(__file__).parent.parent
@@ -25,7 +27,7 @@
logger = logging.getLogger(__name__)
-def main(argv=None):
+def main(argv: typing.Sequence[str] | None = None) -> None:
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(required=True)
@@ -46,64 +48,91 @@ def main(argv=None):
args.func(args)
-def _read_file(path: pathlib.Path, seen_files: typing.Set[pathlib.Path]):
+def _read_file( # noqa: C901
+ path: pathlib.Path,
+ seen_files: set[pathlib.Path],
+) -> typing.Iterator[str]:
if path in seen_files:
return
- names = set()
+ names: set[str] = set()
seen_files.add(path)
paren = False
from_ = None
- for line in path.open():
- if paren:
- if ')' in line:
- line = line.split(')', 1)[1]
- paren = False
+ try:
+ for line in path.open(encoding='ascii'):
+ if '__future__' in line:
continue
- match = _NAMES_RE.match(line)
- else:
- match = _RELATIVE_IMPORT_RE.match(line)
-
- if match:
- if not paren:
- paren = bool(match.group('paren'))
- from_ = match.group('from')
+ if paren:
+ if ')' in line:
+ line = line.split(')', 1)[1]
+ paren = False
+ continue
- if from_:
- names.add(from_)
- yield from _read_file(src_path / f'{from_}.py', seen_files)
+ match = _NAMES_RE.match(line)
+ else:
+ match = _RELATIVE_IMPORT_RE.match(line)
+
+ if match:
+ if not paren:
+ paren = bool(match.group('paren'))
+ from_ = match.group('from')
+
+ if from_:
+ names.add(from_)
+ yield from _read_file(src_path / f'{from_}.py', seen_files)
+ else:
+ for name in match.group('names').split(','):
+ name = name.strip()
+ names.add(name)
+ yield from _read_file(
+ src_path / f'{name}.py', seen_files
+ )
else:
- for name in match.group('names').split(','):
- name = name.strip()
- names.add(name)
- yield from _read_file(src_path / f'{name}.py', seen_files)
- else:
- yield _clean_line(line, names)
+ yield _clean_line(line, names)
+ except UnicodeDecodeError as exception: # pragma: no cover
+ _, text, start_byte, end_byte, error = exception.args
+ offset = 100
+ snippet = text[start_byte - offset : end_byte + offset]
+ logger.error( # noqa: TRY400
+ f'Invalid encoding for {path}: {error} at byte '
+ f'({start_byte}:{end_byte})\n'
+ f'Snippet: {snippet!r}'
+ )
+ raise
-def _clean_line(line, names):
+
+def _clean_line(line: str, names: set[str]) -> str:
# Replace `some_import.spam` with `spam`
if names:
joined_names = '|'.join(names)
- line = re.sub(fr'\b({joined_names})\.', '', line)
+ line = re.sub(rf'\b({joined_names})\.', '', line)
# Replace useless assignments (e.g. `spam = spam`)
return _USELESS_ASSIGNMENT_RE.sub('', line)
-def combine(args):
+def combine(args: argparse.Namespace) -> None:
output_file = args.output_file
pathlib.Path(output_file.name).parent.mkdir(parents=True, exist_ok=True)
+ # We're handling this separately because it has to be the first import.
+ output_file.write('from __future__ import annotations\n')
+
output_file.write(
- _TEXT_TEMPLATE.format((base_path / 'README.rst').read_text()),
+ _TEXT_TEMPLATE.format(
+ (base_path / 'README.rst').read_text(encoding='ascii')
+ ),
)
output_file.write(
- _TEXT_TEMPLATE.format((base_path / 'LICENSE').read_text()),
+ _TEXT_TEMPLATE.format(
+ (base_path / 'LICENSE').read_text(encoding='ascii')
+ ),
)
- seen_files: typing.Set[pathlib.Path] = set()
+ seen_files: set[pathlib.Path] = set()
for line in _read_file(src_path / '__init__.py', seen_files):
output_file.write(line)
@@ -111,10 +140,18 @@ def combine(args):
output_file.close()
logger.info(f'Wrote combined file to {output_file.name}')
- # Run black and ruff if available. If not then just run the file.
- os.system(f'black {output_file.name}')
- os.system(f'ruff --fix {output_file.name}')
- os.system(f'python3 {output_file.name}')
+ # Run ruff if available. If not then just run the file.
+ try: # pragma: no cover
+ subprocess.run(['ruff', 'format', output_file.name], timeout=3)
+ subprocess.run(
+ ['ruff', 'check', '--fix', '--fix-only', output_file.name],
+ timeout=3,
+ )
+ except FileNotFoundError: # pragma: no cover
+ logger.warning(
+ 'Ruff is not installed. Skipping linting and formatting step.'
+ )
+ subprocess.run(['python3', output_file.name])
if __name__ == '__main__':
diff --git a/portalocker/constants.py b/portalocker/constants.py
index 2099f1f..5787725 100644
--- a/portalocker/constants.py
+++ b/portalocker/constants.py
@@ -1,4 +1,4 @@
-'''
+"""
Locking constants
Lock types:
@@ -13,7 +13,7 @@
Manually unlock, only needed internally
- `UNBLOCK` unlock
-'''
+"""
import enum
import os
@@ -30,19 +30,19 @@
#: non-blocking
LOCK_NB = 0x4
#: unlock
- LOCK_UN = msvcrt.LK_UNLCK # type: ignore
+ LOCK_UN = msvcrt.LK_UNLCK # type: ignore[attr-defined]
elif os.name == 'posix': # pragma: no cover
import fcntl
#: exclusive lock
- LOCK_EX = fcntl.LOCK_EX
+ LOCK_EX = fcntl.LOCK_EX # type: ignore[attr-defined]
#: shared lock
- LOCK_SH = fcntl.LOCK_SH
+ LOCK_SH = fcntl.LOCK_SH # type: ignore[attr-defined]
#: non-blocking
- LOCK_NB = fcntl.LOCK_NB
+ LOCK_NB = fcntl.LOCK_NB # type: ignore[attr-defined]
#: unlock
- LOCK_UN = fcntl.LOCK_UN
+ LOCK_UN = fcntl.LOCK_UN # type: ignore[attr-defined]
else: # pragma: no cover
raise RuntimeError('PortaLocker only defined for nt and posix platforms')
diff --git a/portalocker/exceptions.py b/portalocker/exceptions.py
index e871d13..00142b0 100644
--- a/portalocker/exceptions.py
+++ b/portalocker/exceptions.py
@@ -1,17 +1,26 @@
import typing
+from . import types
+
class BaseLockException(Exception): # noqa: N818
# Error codes:
- LOCK_FAILED = 1
+ LOCK_FAILED: typing.Final = 1
+
+ strerror: typing.Optional[str] = None # ensure attribute always exists
def __init__(
self,
*args: typing.Any,
- fh: typing.Union[typing.IO, None, int] = None,
+ fh: typing.Union[types.IO, None, int, types.HasFileno] = None,
**kwargs: typing.Any,
) -> None:
self.fh = fh
+ self.strerror = (
+ str(args[1])
+ if len(args) > 1 and isinstance(args[1], str)
+ else None
+ )
Exception.__init__(self, *args)
diff --git a/portalocker/portalocker.py b/portalocker/portalocker.py
index ceceeaa..006cb57 100644
--- a/portalocker/portalocker.py
+++ b/portalocker/portalocker.py
@@ -1,154 +1,444 @@
+# pyright: reportUnknownMemberType=false, reportAttributeAccessIssue=false
+"""Module portalocker.
+
+This module provides cross-platform file locking functionality.
+The Windows implementation now supports two variants:
+
+ 1. A default method using the Win32 API (win32file.LockFileEx/UnlockFileEx).
+ 2. An alternative that uses msvcrt.locking for exclusive locks (shared
+ locks still use the Win32 API).
+
+This version uses classes to encapsulate locking logic, while maintaining
+the original external API, including the LOCKER constant for specific
+backwards compatibility (POSIX) and Windows behavior.
+"""
+
+import io
import os
import typing
+from typing import (
+ Any,
+ Callable,
+ Optional,
+ Union,
+ cast,
+)
-from . import constants, exceptions
+from . import constants, exceptions, types
-# Alias for readability. Due to import recursion issues we cannot do:
-# from .constants import LockFlags
+# Alias for readability
LockFlags = constants.LockFlags
-class HasFileno(typing.Protocol):
- def fileno(self) -> int: ...
+# Define a protocol for callable lockers
+class LockCallable(typing.Protocol):
+ def __call__(
+ self, file_obj: types.FileArgument, flags: LockFlags
+ ) -> None: ...
-LOCKER: typing.Optional[typing.Callable[
- [typing.Union[int, HasFileno], int], typing.Any]] = None
+class UnlockCallable(typing.Protocol):
+ def __call__(self, file_obj: types.FileArgument) -> None: ...
-if os.name == 'nt': # pragma: no cover
- import msvcrt
+class BaseLocker:
+ """Base class for locker implementations."""
- import pywintypes
- import win32con
- import win32file
- import winerror
+ def lock(self, file_obj: types.FileArgument, flags: LockFlags) -> None:
+ """Lock the file."""
+ raise NotImplementedError
- __overlapped = pywintypes.OVERLAPPED()
+ def unlock(self, file_obj: types.FileArgument) -> None:
+ """Unlock the file."""
+ raise NotImplementedError
- def lock(file_: typing.Union[typing.IO, int], flags: LockFlags):
- # Windows locking does not support locking through `fh.fileno()` so
- # we cast it to make mypy and pyright happy
- file_ = typing.cast(typing.IO, file_)
- mode = 0
- if flags & LockFlags.NON_BLOCKING:
- mode |= win32con.LOCKFILE_FAIL_IMMEDIATELY
+# Define refined LockerType with more specific types
+LockerType = Union[
+ # POSIX-style fcntl.flock callable
+ Callable[[Union[int, types.HasFileno], int], Any],
+ # Tuple of lock and unlock functions
+ tuple[LockCallable, UnlockCallable],
+ # BaseLocker instance
+ BaseLocker,
+ # BaseLocker class
+ type[BaseLocker],
+]
- if flags & LockFlags.EXCLUSIVE:
- mode |= win32con.LOCKFILE_EXCLUSIVE_LOCK
+LOCKER: LockerType
- # Save the old position so we can go back to that position but
- # still lock from the beginning of the file
- savepos = file_.tell()
- if savepos:
- file_.seek(0)
+if os.name == 'nt': # pragma: not-posix
+ # Windows-specific helper functions
+ def _prepare_windows_file(
+ file_obj: types.FileArgument,
+ ) -> tuple[int, Optional[typing.IO[Any]], Optional[int]]:
+ """Prepare file for Windows: get fd, optionally seek and save pos."""
+ if isinstance(file_obj, int):
+ # Plain file descriptor
+ return file_obj, None, None
+
+ # Full IO objects (have tell/seek) -> preserve and restore position
+ if isinstance(file_obj, io.IOBase):
+ fd: int = file_obj.fileno()
+ original_pos = file_obj.tell()
+ if original_pos != 0:
+ file_obj.seek(0)
+ return fd, typing.cast(typing.IO[Any], file_obj), original_pos
+ # cast satisfies mypy: IOBase -> IO[Any]
+
+ # Fallback: an object that only implements fileno() (HasFileno)
+ fd = typing.cast(types.HasFileno, file_obj).fileno() # type: ignore[redundant-cast]
+ return fd, None, None
+
+ def _restore_windows_file_pos(
+ file_io_obj: Optional[typing.IO[Any]],
+ original_pos: Optional[int],
+ ) -> None:
+ """Restore file position if it was an IO object and pos was saved."""
+ if file_io_obj and original_pos is not None and original_pos != 0:
+ file_io_obj.seek(original_pos)
+
+ class Win32Locker(BaseLocker):
+ """Locker using Win32 API (LockFileEx/UnlockFileEx)."""
+
+ _overlapped: Any # pywintypes.OVERLAPPED
+ _lock_bytes_low: int = -0x10000
+
+ def __init__(self) -> None:
+ try:
+ import pywintypes
+ except ImportError as e:
+ raise ImportError(
+ 'pywintypes is required for Win32Locker but not '
+ 'found. Please install pywin32.'
+ ) from e
+ self._overlapped = pywintypes.OVERLAPPED()
+
+ def _get_os_handle(self, fd: int) -> int:
+ try:
+ import msvcrt
+ except ImportError as e:
+ raise ImportError(
+ 'msvcrt is required for _get_os_handle on Windows '
+ 'but not found.'
+ ) from e
+ return cast(int, msvcrt.get_osfhandle(fd)) # type: ignore[attr-defined,redundant-cast]
+
+ def lock(self, file_obj: types.FileArgument, flags: LockFlags) -> None:
+ import pywintypes
+ import win32con
+ import win32file
+ import winerror
+
+ fd, io_obj_ctx, pos_ctx = _prepare_windows_file(file_obj)
+ os_fh = self._get_os_handle(fd)
+
+ mode = 0
+ if flags & LockFlags.NON_BLOCKING:
+ mode |= win32con.LOCKFILE_FAIL_IMMEDIATELY
+ if flags & LockFlags.EXCLUSIVE:
+ mode |= win32con.LOCKFILE_EXCLUSIVE_LOCK
+
+ try:
+ win32file.LockFileEx(
+ os_fh, mode, 0, self._lock_bytes_low, self._overlapped
+ )
+ except pywintypes.error as exc_value: # type: ignore[misc]
+ if exc_value.winerror == winerror.ERROR_LOCK_VIOLATION:
+ raise exceptions.AlreadyLocked(
+ exceptions.LockException.LOCK_FAILED,
+ exc_value.strerror,
+ fh=file_obj, # Pass original file_obj
+ ) from exc_value
+ else:
+ raise
+ finally:
+ _restore_windows_file_pos(io_obj_ctx, pos_ctx)
+
+ def unlock(self, file_obj: types.FileArgument) -> None:
+ import pywintypes
+ import win32file
+ import winerror
+
+ fd, io_obj_ctx, pos_ctx = _prepare_windows_file(file_obj)
+ os_fh = self._get_os_handle(fd)
- os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore
- try:
- win32file.LockFileEx(os_fh, mode, 0, -0x10000, __overlapped)
- except pywintypes.error as exc_value:
- # error: (33, 'LockFileEx', 'The process cannot access the file
- # because another process has locked a portion of the file.')
- if exc_value.winerror == winerror.ERROR_LOCK_VIOLATION:
- raise exceptions.AlreadyLocked(
- exceptions.LockException.LOCK_FAILED,
- exc_value.strerror,
- fh=file_,
- ) from exc_value
- else:
- # Q: Are there exceptions/codes we should be dealing with
- # here?
- raise
- finally:
- if savepos:
- file_.seek(savepos)
-
- def unlock(file_: typing.IO):
- try:
- savepos = file_.tell()
- if savepos:
- file_.seek(0)
-
- os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore
try:
win32file.UnlockFileEx(
- os_fh,
- 0,
- -0x10000,
- __overlapped,
+ os_fh, 0, self._lock_bytes_low, self._overlapped
)
- except pywintypes.error as exc:
+ except pywintypes.error as exc: # type: ignore[misc]
if exc.winerror != winerror.ERROR_NOT_LOCKED:
- # Q: Are there exceptions/codes we should be
- # dealing with here?
- raise
+ raise exceptions.LockException(
+ exceptions.LockException.LOCK_FAILED,
+ exc.strerror,
+ fh=file_obj, # Pass original file_obj
+ ) from exc
+ except OSError as exc:
+ raise exceptions.LockException(
+ exceptions.LockException.LOCK_FAILED,
+ exc.strerror,
+ fh=file_obj, # Pass original file_obj
+ ) from exc
finally:
- if savepos:
- file_.seek(savepos)
- except OSError as exc:
- raise exceptions.LockException(
- exceptions.LockException.LOCK_FAILED,
- exc.strerror,
- fh=file_,
- ) from exc
-
-elif os.name == 'posix': # pragma: no cover
- import errno
- import fcntl
+ _restore_windows_file_pos(io_obj_ctx, pos_ctx)
+
+ class MsvcrtLocker(BaseLocker):
+ _win32_locker: Win32Locker
+ _msvcrt_lock_length: int = 0x10000
- # The locking implementation.
- # Expected values are either fcntl.flock() or fcntl.lockf(),
- # but any callable that matches the syntax will be accepted.
- LOCKER = fcntl.flock
-
- def lock(file_: typing.Union[typing.IO, int], flags: LockFlags):
- assert LOCKER is not None, 'We need a locking function in `LOCKER` '
- # Locking with NON_BLOCKING without EXCLUSIVE or SHARED enabled
- # results in an error
- if (flags & LockFlags.NON_BLOCKING) and not flags & (
- LockFlags.SHARED | LockFlags.EXCLUSIVE
- ):
- raise RuntimeError(
- 'When locking in non-blocking mode the SHARED '
- 'or EXCLUSIVE flag must be specified as well',
+ def __init__(self) -> None:
+ self._win32_locker = Win32Locker()
+ try:
+ import msvcrt
+ except ImportError as e:
+ raise ImportError(
+ 'msvcrt is required for MsvcrtLocker but not found.'
+ ) from e
+
+ attrs = ['LK_LOCK', 'LK_RLCK', 'LK_NBLCK', 'LK_UNLCK', 'LK_NBRLCK']
+ defaults = [0, 1, 2, 3, 2] # LK_NBRLCK often same as LK_NBLCK (2)
+ for attr, default_val in zip(attrs, defaults):
+ if not hasattr(msvcrt, attr):
+ setattr(msvcrt, attr, default_val)
+
+ def lock(self, file_obj: types.FileArgument, flags: LockFlags) -> None:
+ import msvcrt
+
+ if flags & LockFlags.SHARED:
+ win32_api_flags = LockFlags(0)
+ if flags & LockFlags.NON_BLOCKING:
+ win32_api_flags |= LockFlags.NON_BLOCKING
+ self._win32_locker.lock(file_obj, win32_api_flags)
+ return
+
+ fd, io_obj_ctx, pos_ctx = _prepare_windows_file(file_obj)
+ mode = (
+ msvcrt.LK_NBLCK # type: ignore[attr-defined]
+ if flags & LockFlags.NON_BLOCKING
+ else msvcrt.LK_LOCK # type: ignore[attr-defined]
)
- try:
- LOCKER(file_, flags)
- except OSError as exc_value:
- # Python can use one of several different exception classes to
- # represent timeout (most likely is BlockingIOError and IOError),
- # but these errors may also represent other failures. On some
- # systems, `IOError is OSError` which means checking for either
- # IOError or OSError can mask other errors.
- # The safest check is to catch OSError (from which the others
- # inherit) and check the errno (which should be EACCESS or EAGAIN
- # according to the spec).
- if exc_value.errno in (errno.EACCES, errno.EAGAIN):
- # A timeout exception, wrap this so the outer code knows to try
- # again (if it wants to).
- raise exceptions.AlreadyLocked(
- exc_value,
- fh=file_,
+ try:
+ msvcrt.locking( # type: ignore[attr-defined]
+ fd,
+ mode,
+ self._msvcrt_lock_length,
+ )
+ except OSError as exc_value:
+ if exc_value.errno in (13, 16, 33, 36):
+ raise exceptions.AlreadyLocked(
+ exceptions.LockException.LOCK_FAILED,
+ str(exc_value),
+ fh=file_obj, # Pass original file_obj
+ ) from exc_value
+ raise exceptions.LockException(
+ exceptions.LockException.LOCK_FAILED,
+ str(exc_value),
+ fh=file_obj, # Pass original file_obj
) from exc_value
+ finally:
+ _restore_windows_file_pos(io_obj_ctx, pos_ctx)
+
+ def unlock(self, file_obj: types.FileArgument) -> None:
+ import msvcrt
+
+ fd, io_obj_ctx, pos_ctx = _prepare_windows_file(file_obj)
+ took_fallback_path = False
+
+ try:
+ msvcrt.locking( # type: ignore[attr-defined]
+ fd,
+ msvcrt.LK_UNLCK, # type: ignore[attr-defined]
+ self._msvcrt_lock_length,
+ )
+ except OSError as exc:
+ if exc.errno == 13: # EACCES (Permission denied)
+ took_fallback_path = True
+ # Restore position before calling win32_locker,
+ # as it will re-prepare.
+ _restore_windows_file_pos(io_obj_ctx, pos_ctx)
+ try:
+ self._win32_locker.unlock(
+ file_obj
+ ) # win32_locker handles its own seeking
+ except exceptions.LockException as win32_exc:
+ raise exceptions.LockException(
+ exceptions.LockException.LOCK_FAILED,
+ f'msvcrt unlock failed ({exc.strerror}), and '
+ f'win32 fallback failed ({win32_exc.strerror})',
+ fh=file_obj,
+ ) from win32_exc
+ except Exception as final_exc:
+ raise exceptions.LockException(
+ exceptions.LockException.LOCK_FAILED,
+ f'msvcrt unlock failed ({exc.strerror}), and '
+ f'win32 fallback failed with unexpected error: '
+ f'{final_exc!s}',
+ fh=file_obj,
+ ) from final_exc
+ else:
+ raise exceptions.LockException(
+ exceptions.LockException.LOCK_FAILED,
+ exc.strerror,
+ fh=file_obj,
+ ) from exc
+ finally:
+ if not took_fallback_path:
+ _restore_windows_file_pos(io_obj_ctx, pos_ctx)
+
+ _locker_instances: dict[type[BaseLocker], BaseLocker] = dict()
+
+ LOCKER = MsvcrtLocker # type: ignore[reportConstantRedefinition]
+
+ def lock(file: types.FileArgument, flags: LockFlags) -> None:
+ if isinstance(LOCKER, BaseLocker):
+ # If LOCKER is a BaseLocker instance, use its lock method
+ locker: Callable[[types.FileArgument, LockFlags], None] = (
+ LOCKER.lock
+ )
+ elif isinstance(LOCKER, tuple):
+ locker = LOCKER[0] # type: ignore[reportUnknownVariableType]
+ elif issubclass(LOCKER, BaseLocker): # type: ignore[unreachable,arg-type] # pyright: ignore [reportUnnecessaryIsInstance]
+ locker_instance = _locker_instances.get(LOCKER) # type: ignore[arg-type]
+ if locker_instance is None:
+ # Create an instance of the locker class if not already done
+ _locker_instances[LOCKER] = locker_instance = LOCKER() # type: ignore[ignore,index,call-arg]
+
+ locker = locker_instance.lock
+ else:
+ raise TypeError(
+ f'LOCKER must be a BaseLocker instance, a tuple of lock and '
+ f'unlock functions, or a subclass of BaseLocker, '
+ f'got {type(LOCKER)}.'
+ )
+
+ locker(file, flags)
+
+ def unlock(file: types.FileArgument) -> None:
+ if isinstance(LOCKER, BaseLocker):
+ # If LOCKER is a BaseLocker instance, use its lock method
+ unlocker: Callable[[types.FileArgument], None] = LOCKER.unlock
+ elif isinstance(LOCKER, tuple):
+ unlocker = LOCKER[1] # type: ignore[reportUnknownVariableType]
+
+ elif issubclass(LOCKER, BaseLocker): # type: ignore[unreachable,arg-type] # pyright: ignore [reportUnnecessaryIsInstance]
+ locker_instance = _locker_instances.get(LOCKER) # type: ignore[arg-type]
+ if locker_instance is None:
+ # Create an instance of the locker class if not already done
+ _locker_instances[LOCKER] = locker_instance = LOCKER() # type: ignore[ignore,index,call-arg]
+
+ unlocker = locker_instance.unlock
+ else:
+ raise TypeError(
+ f'LOCKER must be a BaseLocker instance, a tuple of lock and '
+ f'unlock functions, or a subclass of BaseLocker, '
+ f'got {type(LOCKER)}.'
+ )
+
+ unlocker(file)
+
+else: # pragma: not-nt
+ import errno
+ import fcntl
+
+ # PosixLocker methods accept FileArgument | HasFileno
+ PosixFileArgument = Union[types.FileArgument, types.HasFileno]
+
+ class PosixLocker(BaseLocker):
+ """Locker implementation using the `LOCKER` constant"""
+
+ _locker: Optional[
+ Callable[[Union[int, types.HasFileno], int], Any]
+ ] = None
+
+ @property
+ def locker(self) -> Callable[[Union[int, types.HasFileno], int], Any]:
+ if self._locker is None:
+ # On POSIX systems ``LOCKER`` is a callable (fcntl.flock) but
+ # mypy also sees the Windows-only tuple assignment. Explicitly
+ # cast so mypy knows we are returning the callable variant
+ # here.
+ return cast(
+ Callable[[Union[int, types.HasFileno], int], Any], LOCKER
+ ) # pyright: ignore[reportUnnecessaryCast]
+
+ # mypy does not realise ``self._locker`` is non-None after the
+ # check
+ assert self._locker is not None
+ return self._locker
+
+ def _get_fd(self, file_obj: PosixFileArgument) -> int:
+ if isinstance(file_obj, int):
+ return file_obj
+ # Check for fileno() method; covers typing.IO and HasFileno
+ elif hasattr(file_obj, 'fileno') and callable(file_obj.fileno):
+ return file_obj.fileno()
else:
- # Something else went wrong; don't wrap this so we stop
- # immediately.
+ # Should not be reached if PosixFileArgument is correct.
+ # isinstance(file_obj, io.IOBase) could be an
+ # alternative check
+ # but hasattr is more general for HasFileno.
+ raise TypeError(
+ "Argument 'file_obj' must be an int, an IO object "
+ 'with fileno(), or implement HasFileno.'
+ )
+
+ def lock(self, file_obj: PosixFileArgument, flags: LockFlags) -> None:
+ if (flags & LockFlags.NON_BLOCKING) and not flags & (
+ LockFlags.SHARED | LockFlags.EXCLUSIVE
+ ):
+ raise RuntimeError(
+ 'When locking in non-blocking mode on POSIX, '
+ 'the SHARED or EXCLUSIVE flag must be specified as well.'
+ )
+
+ fd = self._get_fd(file_obj)
+ try:
+ self.locker(fd, flags)
+ except OSError as exc_value:
+ if exc_value.errno in (errno.EACCES, errno.EAGAIN):
+ raise exceptions.AlreadyLocked(
+ exc_value,
+ strerror=str(exc_value),
+ fh=file_obj, # Pass original file_obj
+ ) from exc_value
+ else:
+ raise exceptions.LockException(
+ exc_value,
+ strerror=str(exc_value),
+ fh=file_obj, # Pass original file_obj
+ ) from exc_value
+ except EOFError as exc_value: # NFS specific
raise exceptions.LockException(
exc_value,
- fh=file_,
+ strerror=str(exc_value),
+ fh=file_obj, # Pass original file_obj
) from exc_value
- except EOFError as exc_value:
- # On NFS filesystems, flock can raise an EOFError
- raise exceptions.LockException(
- exc_value,
- fh=file_,
- ) from exc_value
-
- def unlock(file_: typing.IO):
- assert LOCKER is not None, 'We need a locking function in `LOCKER` '
- LOCKER(file_.fileno(), LockFlags.UNBLOCK)
-
-else: # pragma: no cover
- raise RuntimeError('PortaLocker only defined for nt and posix platforms')
+
+ def unlock(self, file_obj: PosixFileArgument) -> None:
+ fd = self._get_fd(file_obj)
+ self.locker(fd, LockFlags.UNBLOCK)
+
+ class FlockLocker(PosixLocker):
+ """FlockLocker is a PosixLocker implementation using fcntl.flock."""
+
+ LOCKER = fcntl.flock # type: ignore[attr-defined]
+
+ class LockfLocker(PosixLocker):
+ """LockfLocker is a PosixLocker implementation using fcntl.lockf."""
+
+ LOCKER = fcntl.lockf # type: ignore[attr-defined]
+
+ # LOCKER constant for POSIX is fcntl.flock for backward compatibility.
+ # Type matches: Callable[[Union[int, HasFileno], int], Any]
+ LOCKER = fcntl.flock # type: ignore[attr-defined,reportConstantRedefinition]
+
+ _posix_locker_instance = PosixLocker()
+
+ # Public API for POSIX uses the PosixLocker instance
+ def lock(file: types.FileArgument, flags: LockFlags) -> None:
+ _posix_locker_instance.lock(file, flags)
+
+ def unlock(file: types.FileArgument) -> None:
+ _posix_locker_instance.unlock(file)
diff --git a/portalocker/redis.py b/portalocker/redis.py
index 11ee876..f2e11e6 100644
--- a/portalocker/redis.py
+++ b/portalocker/redis.py
@@ -1,3 +1,6 @@
+# pyright: reportUnknownMemberType=false
+from __future__ import annotations
+
import _thread
import json
import logging
@@ -5,7 +8,7 @@
import time
import typing
-from redis import client
+import redis
from . import exceptions, utils
@@ -15,8 +18,8 @@
DEFAULT_THREAD_SLEEP_TIME = 0.1
-class PubSubWorkerThread(client.PubSubWorkerThread): # type: ignore
- def run(self):
+class PubSubWorkerThread(redis.client.PubSubWorkerThread):
+ def run(self) -> None:
try:
super().run()
except Exception: # pragma: no cover
@@ -25,7 +28,7 @@ def run(self):
class RedisLock(utils.LockBase):
- '''
+ """
An extremely reliable Redis lock based on pubsub with a keep-alive thread
As opposed to most Redis locking systems based on key/value pairs,
@@ -59,31 +62,32 @@ class RedisLock(utils.LockBase):
to override these you need to explicitly specify a value (e.g.
`health_check_interval=0`)
- '''
+ """
- redis_kwargs: typing.Dict[str, typing.Any]
- thread: typing.Optional[PubSubWorkerThread]
+ redis_kwargs: dict[str, typing.Any]
+ thread: PubSubWorkerThread | None
channel: str
timeout: float
- connection: typing.Optional[client.Redis]
- pubsub: typing.Optional[client.PubSub] = None
+ connection: redis.client.Redis[str] | None
+ pubsub: redis.client.PubSub | None = None
close_connection: bool
- DEFAULT_REDIS_KWARGS: typing.ClassVar[typing.Dict[str, typing.Any]] = dict(
+ DEFAULT_REDIS_KWARGS: typing.ClassVar[dict[str, typing.Any]] = dict(
health_check_interval=10,
+ decode_responses=True,
)
def __init__(
self,
channel: str,
- connection: typing.Optional[client.Redis] = None,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = False,
+ connection: redis.client.Redis[str] | None = None,
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = False,
thread_sleep_time: float = DEFAULT_THREAD_SLEEP_TIME,
unavailable_timeout: float = DEFAULT_UNAVAILABLE_TIMEOUT,
- redis_kwargs: typing.Optional[typing.Dict] = None,
- ):
+ redis_kwargs: dict[str, typing.Any] | None = None,
+ ) -> None:
# We don't want to close connections given as an argument
self.close_connection = not connection
@@ -103,18 +107,22 @@ def __init__(
fail_when_locked=fail_when_locked,
)
- def get_connection(self) -> client.Redis:
+ def get_connection(self) -> redis.client.Redis[str]:
if not self.connection:
- self.connection = client.Redis(**self.redis_kwargs)
+ self.connection = redis.client.Redis(**self.redis_kwargs)
return self.connection
- def channel_handler(self, message):
+ def channel_handler(self, message: dict[str, str]) -> None:
if message.get('type') != 'message': # pragma: no cover
return
+ raw_data = message.get('data')
+ if not raw_data:
+ return
+
try:
- data = json.loads(message.get('data'))
+ data = json.loads(raw_data)
except TypeError: # pragma: no cover
logger.debug('TypeError while parsing: %r', message)
return
@@ -123,15 +131,35 @@ def channel_handler(self, message):
self.connection.publish(data['response_channel'], str(time.time()))
@property
- def client_name(self):
+ def client_name(self) -> str:
return f'{self.channel}-lock'
+ def _timeout_generator(
+ self, timeout: float | None, check_interval: float | None
+ ) -> typing.Iterator[int]:
+ if timeout is None:
+ timeout = 0.0
+ if check_interval is None:
+ check_interval = self.thread_sleep_time
+ deadline = time.monotonic() + timeout
+ first = True
+ while first or time.monotonic() < deadline:
+ first = False
+ effective_interval = (
+ check_interval
+ if check_interval > 0
+ else self.thread_sleep_time
+ )
+ sleep_time = effective_interval * (0.5 + random.random())
+ time.sleep(sleep_time)
+ yield 0
+
def acquire( # type: ignore[override]
self,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = None,
- ) -> 'RedisLock':
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
+ ) -> RedisLock:
timeout = utils.coalesce(timeout, self.timeout, 0.0)
check_interval = utils.coalesce(
check_interval,
@@ -176,7 +204,7 @@ def acquire( # type: ignore[override]
sleep_time=self.thread_sleep_time,
)
self.thread.start()
-
+ time.sleep(0.01)
subscribers = connection.pubsub_numsub(self.channel)[0][1]
if subscribers == 1: # pragma: no branch
return self
@@ -185,11 +213,15 @@ def acquire( # type: ignore[override]
self.release()
if fail_when_locked: # pragma: no cover
- raise exceptions.AlreadyLocked(exceptions)
+ raise exceptions.AlreadyLocked()
- raise exceptions.AlreadyLocked(exceptions)
+ raise exceptions.AlreadyLocked()
- def check_or_kill_lock(self, connection, timeout):
+ def check_or_kill_lock(
+ self,
+ connection: redis.client.Redis[str],
+ timeout: float,
+ ) -> bool | None:
# Random channel name to get messages back from the lock
response_channel = f'{self.channel}-{random.random()}'
@@ -217,10 +249,12 @@ def check_or_kill_lock(self, connection, timeout):
for client_ in connection.client_list('pubsub'): # pragma: no cover
if client_.get('name') == self.client_name:
logger.warning('Killing unavailable redis client: %r', client_)
- connection.client_kill_filter(client_.get('id'))
+ connection.client_kill_filter( # pyright: ignore
+ client_.get('id'),
+ )
return None
- def release(self):
+ def release(self) -> None:
if self.thread: # pragma: no branch
self.thread.stop()
self.thread.join()
@@ -232,5 +266,5 @@ def release(self):
self.pubsub.close()
self.pubsub = None
- def __del__(self):
+ def __del__(self) -> None:
self.release()
diff --git a/portalocker/types.py b/portalocker/types.py
new file mode 100644
index 0000000..c08d426
--- /dev/null
+++ b/portalocker/types.py
@@ -0,0 +1,75 @@
+# noqa: A005
+from __future__ import annotations
+
+import io
+import pathlib
+import typing
+from typing import Union
+
+# spellchecker: off
+# fmt: off
+Mode = typing.Literal[
+ # Text modes
+ # Read text
+ 'r', 'rt', 'tr',
+ # Write text
+ 'w', 'wt', 'tw',
+ # Append text
+ 'a', 'at', 'ta',
+ # Exclusive creation text
+ 'x', 'xt', 'tx',
+ # Read and write text
+ 'r+', '+r', 'rt+', 'r+t', '+rt', 'tr+', 't+r', '+tr',
+ # Write and read text
+ 'w+', '+w', 'wt+', 'w+t', '+wt', 'tw+', 't+w', '+tw',
+ # Append and read text
+ 'a+', '+a', 'at+', 'a+t', '+at', 'ta+', 't+a', '+ta',
+ # Exclusive creation and read text
+ 'x+', '+x', 'xt+', 'x+t', '+xt', 'tx+', 't+x', '+tx',
+ # Universal newline support
+ 'U', 'rU', 'Ur', 'rtU', 'rUt', 'Urt', 'trU', 'tUr', 'Utr',
+
+ # Binary modes
+ # Read binary
+ 'rb', 'br',
+ # Write binary
+ 'wb', 'bw',
+ # Append binary
+ 'ab', 'ba',
+ # Exclusive creation binary
+ 'xb', 'bx',
+ # Read and write binary
+ 'rb+', 'r+b', '+rb', 'br+', 'b+r', '+br',
+ # Write and read binary
+ 'wb+', 'w+b', '+wb', 'bw+', 'b+w', '+bw',
+ # Append and read binary
+ 'ab+', 'a+b', '+ab', 'ba+', 'b+a', '+ba',
+ # Exclusive creation and read binary
+ 'xb+', 'x+b', '+xb', 'bx+', 'b+x', '+bx',
+ # Universal newline support in binary mode
+ 'rbU', 'rUb', 'Urb', 'brU', 'bUr', 'Ubr',
+]
+# spellchecker: on
+Filename = Union[str, pathlib.Path]
+IO = Union[ # type: ignore[name-defined]
+ typing.IO[str],
+ typing.IO[bytes],
+]
+
+
+class FileOpenKwargs(typing.TypedDict):
+ buffering: int | None
+ encoding: str | None
+ errors: str | None
+ newline: str | None
+ closefd: bool | None
+ opener: typing.Callable[[str, int], int] | None
+
+
+# Protocol for objects with a fileno() method.
+# Used for type-hinting fcntl.flock.
+class HasFileno(typing.Protocol):
+ def fileno(self) -> int: ...
+
+# Type alias for file arguments used in lock/unlock functions
+FileArgument = Union[typing.IO[typing.Any], io.TextIOWrapper, int, HasFileno]
diff --git a/portalocker/utils.py b/portalocker/utils.py
index 5115b0e..1287df3 100644
--- a/portalocker/utils.py
+++ b/portalocker/utils.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import abc
import atexit
import contextlib
@@ -9,8 +11,10 @@
import time
import typing
import warnings
+import weakref
-from . import constants, exceptions, portalocker
+from . import constants, exceptions, portalocker, types
+from .types import Filename, Mode
logger = logging.getLogger(__name__)
@@ -24,11 +28,9 @@
'open_atomic',
]
-Filename = typing.Union[str, pathlib.Path]
-
def coalesce(*args: typing.Any, test_value: typing.Any = None) -> typing.Any:
- '''Simple coalescing function that returns the first value that is not
+ """Simple coalescing function that returns the first value that is not
equal to the `test_value`. Or `None` if no value is valid. Usually this
means that the last given value is the default value.
@@ -48,7 +50,7 @@ def coalesce(*args: typing.Any, test_value: typing.Any = None) -> typing.Any:
# This won't work because of the `is not test_value` type testing:
>>> coalesce([], dict(spam='eggs'), test_value=[])
[]
- '''
+ """
return next((arg for arg in args if arg is not test_value), None)
@@ -56,8 +58,8 @@ def coalesce(*args: typing.Any, test_value: typing.Any = None) -> typing.Any:
def open_atomic(
filename: Filename,
binary: bool = True,
-) -> typing.Iterator[typing.IO]:
- '''Open a file for atomic writing. Instead of locking this method allows
+) -> typing.Iterator[types.IO]:
+ """Open a file for atomic writing. Instead of locking this method allows
you to write the entire file and move it to the actual location. Note that
this makes the assumption that a rename is atomic on your platform which
is generally the case but not a guarantee.
@@ -80,24 +82,28 @@ def open_atomic(
... written = fh.write(b'test')
>>> assert path_filename.exists()
>>> path_filename.unlink()
- '''
+ """
# `pathlib.Path` cast in case `path` is a `str`
- path: pathlib.Path = pathlib.Path(filename)
+ path: pathlib.Path
+ if isinstance(filename, pathlib.Path):
+ path = filename
+ else:
+ path = pathlib.Path(filename)
assert not path.exists(), f'{path!r} exists'
# Create the parent directory if it doesn't exist
path.parent.mkdir(parents=True, exist_ok=True)
- temp_fh = tempfile.NamedTemporaryFile(
- mode=binary and 'wb' or 'w',
+ with tempfile.NamedTemporaryFile(
+ mode=(binary and 'wb') or 'w',
dir=str(path.parent),
delete=False,
- )
- yield temp_fh
- temp_fh.flush()
- os.fsync(temp_fh.fileno())
- temp_fh.close()
+ ) as temp_fh:
+ yield temp_fh
+ temp_fh.flush()
+ os.fsync(temp_fh.fileno())
+
try:
os.rename(temp_fh.name, path)
finally:
@@ -115,10 +121,10 @@ class LockBase(abc.ABC): # pragma: no cover
def __init__(
self,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = None,
- ):
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
+ ) -> None:
self.timeout = coalesce(timeout, DEFAULT_TIMEOUT)
self.check_interval = coalesce(check_interval, DEFAULT_CHECK_INTERVAL)
self.fail_when_locked = coalesce(
@@ -129,15 +135,15 @@ def __init__(
@abc.abstractmethod
def acquire(
self,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = None,
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
) -> typing.IO[typing.AnyStr]: ...
def _timeout_generator(
self,
- timeout: typing.Optional[float],
- check_interval: typing.Optional[float],
+ timeout: float | None,
+ check_interval: float | None,
) -> typing.Iterator[int]:
f_timeout = coalesce(timeout, self.timeout, 0.0)
f_check_interval = coalesce(check_interval, self.check_interval, 0.0)
@@ -155,26 +161,31 @@ def _timeout_generator(
time.sleep(max(0.001, (i * f_check_interval) - since_start_time))
@abc.abstractmethod
- def release(self): ...
+ def release(self) -> None: ...
def __enter__(self) -> typing.IO[typing.AnyStr]:
return self.acquire()
def __exit__(
self,
- exc_type: typing.Optional[typing.Type[BaseException]],
- exc_value: typing.Optional[BaseException],
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
traceback: typing.Any, # Should be typing.TracebackType
- ) -> typing.Optional[bool]:
+ ) -> bool | None:
self.release()
return None
- def __delete__(self, instance):
+ def __delete__(self, instance: LockBase) -> None:
instance.release()
+ # Ensure cleanup on garbage collection as tests rely on this behaviour
+ def __del__(self) -> None: # pragma: no cover - best effort cleanup
+ with contextlib.suppress(Exception):
+ self.release()
+
class Lock(LockBase):
- '''Lock manager with built-in timeout
+ """Lock manager with built-in timeout
Args:
filename: filename
@@ -192,21 +203,31 @@ class Lock(LockBase):
Note that the file is opened first and locked later. So using 'w' as
mode will result in truncate _BEFORE_ the lock is checked.
- '''
+ """
+
+ fh: types.IO | None
+ filename: str
+ mode: str
+ truncate: bool
+ timeout: float
+ check_interval: float
+ fail_when_locked: bool
+ flags: constants.LockFlags
+ file_open_kwargs: dict[str, typing.Any]
def __init__(
self,
filename: Filename,
- mode: str = 'a',
- timeout: typing.Optional[float] = None,
+ mode: Mode = 'a',
+ timeout: float | None = None,
check_interval: float = DEFAULT_CHECK_INTERVAL,
fail_when_locked: bool = DEFAULT_FAIL_WHEN_LOCKED,
flags: constants.LockFlags = LOCK_METHOD,
- **file_open_kwargs,
- ):
+ **file_open_kwargs: typing.Any,
+ ) -> None:
if 'w' in mode:
truncate = True
- mode = mode.replace('w', 'a')
+ mode = typing.cast(Mode, mode.replace('w', 'a'))
else:
truncate = False
@@ -218,23 +239,21 @@ def __init__(
stacklevel=1,
)
- self.fh: typing.Optional[typing.IO] = None
- self.filename: str = str(filename)
- self.mode: str = mode
- self.truncate: bool = truncate
- self.timeout: float = timeout
- self.check_interval: float = check_interval
- self.fail_when_locked: bool = fail_when_locked
- self.flags: constants.LockFlags = flags
+ self.fh = None
+ self.filename = str(filename)
+ self.mode = mode
+ self.truncate = truncate
+ self.flags = flags
self.file_open_kwargs = file_open_kwargs
+ super().__init__(timeout, check_interval, fail_when_locked)
def acquire(
self,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = None,
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
) -> typing.IO[typing.AnyStr]:
- '''Acquire the locked filehandle'''
+ """Acquire the locked filehandle"""
fail_when_locked = coalesce(fail_when_locked, self.fail_when_locked)
@@ -248,14 +267,15 @@ def acquire(
)
# If we already have a filehandle, return it
- fh: typing.Optional[typing.IO] = self.fh
+ fh = self.fh
if fh:
- return fh
+ # Due to type invariance we need to cast the type
+ return typing.cast(typing.IO[typing.AnyStr], fh)
# Get a new filehandler
fh = self._get_fh()
- def try_close(): # pragma: no cover
+ def try_close() -> None: # pragma: no cover
# Silently try to close the handle if possible, ignore all issues
if fh is not None:
with contextlib.suppress(Exception):
@@ -296,41 +316,51 @@ def try_close(): # pragma: no cover
fh = self._prepare_fh(fh)
self.fh = fh
- return fh
+ return typing.cast(typing.IO[typing.AnyStr], fh)
def __enter__(self) -> typing.IO[typing.AnyStr]:
return self.acquire()
- def release(self):
- '''Releases the currently locked file handle'''
+ def release(self) -> None:
+ """Releases the currently locked file handle"""
if self.fh:
- portalocker.unlock(self.fh)
- self.fh.close()
- self.fh = None
-
- def _get_fh(self) -> typing.IO:
- '''Get a new filehandle'''
- return open( # noqa: SIM115
- self.filename,
- self.mode,
- **self.file_open_kwargs,
+ # On Windows, closing the handle also releases the lock. Ensure we
+ # always close, even if unlock raises due to edge cases when
+ # preparing/restoring file position.
+ try:
+ with contextlib.suppress(Exception):
+ portalocker.unlock(self.fh)
+ finally:
+ with contextlib.suppress(Exception):
+ self.fh.close()
+ self.fh = None
+
+ def _get_fh(self) -> types.IO:
+ """Get a new filehandle"""
+ return typing.cast(
+ types.IO,
+ open( # noqa: SIM115
+ self.filename,
+ self.mode,
+ **self.file_open_kwargs,
+ ),
)
- def _get_lock(self, fh: typing.IO) -> typing.IO:
- '''
+ def _get_lock(self, fh: types.IO) -> types.IO:
+ """
Try to lock the given filehandle
- returns LockException if it fails'''
+ returns LockException if it fails"""
portalocker.lock(fh, self.flags)
return fh
- def _prepare_fh(self, fh: typing.IO) -> typing.IO:
- '''
+ def _prepare_fh(self, fh: types.IO) -> types.IO:
+ """
Prepare the filehandle for usage
If truncate is a number, the file will be truncated to that amount of
bytes
- '''
+ """
if self.truncate:
fh.seek(0)
fh.truncate(0)
@@ -339,21 +369,21 @@ def _prepare_fh(self, fh: typing.IO) -> typing.IO:
class RLock(Lock):
- '''
+ """
A reentrant lock, functions in a similar way to threading.RLock in that it
can be acquired multiple times. When the corresponding number of release()
calls are made the lock will finally release the underlying file lock.
- '''
+ """
def __init__(
self,
- filename,
- mode='a',
- timeout=DEFAULT_TIMEOUT,
- check_interval=DEFAULT_CHECK_INTERVAL,
- fail_when_locked=False,
- flags=LOCK_METHOD,
- ):
+ filename: Filename,
+ mode: Mode = 'a',
+ timeout: float = DEFAULT_TIMEOUT,
+ check_interval: float = DEFAULT_CHECK_INTERVAL,
+ fail_when_locked: bool = False,
+ flags: constants.LockFlags = LOCK_METHOD,
+ ) -> None:
super().__init__(
filename,
mode,
@@ -366,25 +396,26 @@ def __init__(
def acquire(
self,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = None,
- ) -> typing.IO:
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
+ ) -> typing.IO[typing.AnyStr]:
+ fh: typing.IO[typing.AnyStr]
if self._acquire_count >= 1:
- fh = self.fh
+ fh = typing.cast(typing.IO[typing.AnyStr], self.fh)
else:
fh = super().acquire(timeout, check_interval, fail_when_locked)
self._acquire_count += 1
- assert fh
+ assert fh is not None
return fh
- def release(self):
- if self._acquire_count == 0:
+ def release(self) -> None:
+ if self._acquire_count == 0: # pragma: no branch - covered by tests
raise exceptions.LockException(
'Cannot release more times than acquired',
)
- if self._acquire_count == 1:
+ if self._acquire_count == 1: # pragma: no branch - trivial guard
super().release()
self._acquire_count -= 1
@@ -392,14 +423,13 @@ def release(self):
class TemporaryFileLock(Lock):
def __init__(
self,
- filename='.lock',
- timeout=DEFAULT_TIMEOUT,
- check_interval=DEFAULT_CHECK_INTERVAL,
- fail_when_locked=True,
- flags=LOCK_METHOD,
- ):
- Lock.__init__(
- self,
+ filename: str = '.lock',
+ timeout: float = DEFAULT_TIMEOUT,
+ check_interval: float = DEFAULT_CHECK_INTERVAL,
+ fail_when_locked: bool = True,
+ flags: constants.LockFlags = LOCK_METHOD,
+ ) -> None:
+ super().__init__(
filename=filename,
mode='w',
timeout=timeout,
@@ -407,16 +437,184 @@ def __init__(
fail_when_locked=fail_when_locked,
flags=flags,
)
- atexit.register(self.release)
+ # Avoid keeping a strong reference to self, otherwise GC can't
+ # collect and tests expecting deletion won't pass.
+ wr = weakref.ref(self)
+
+ def _finalize_release(
+ ref: weakref.ReferenceType[TemporaryFileLock] = wr, # type: ignore[arg-type]
+ ) -> None: # pragma: no cover - best effort
+ obj = ref()
+ if obj is not None:
+ with contextlib.suppress(Exception):
+ obj.release()
- def release(self):
+ atexit.register(_finalize_release)
+
+ def release(self) -> None: # pragma: no cover - platform-specific cleanup
+ """Release the file lock and remove the temporary file."""
Lock.release(self)
+ # Try to remove file with a short retry loop to avoid transient
+ # Windows share violations from background scanners.
if os.path.isfile(self.filename): # pragma: no branch
- os.unlink(self.filename)
+ for _ in range(5):
+ try:
+ os.unlink(self.filename)
+ break
+ except PermissionError: # pragma: no cover - rare on CI
+ time.sleep(0.05) # pragma: no cover - timing dependent
+ except FileNotFoundError: # pragma: no cover - race
+ break
+
+
+class PidFileLock(TemporaryFileLock):
+ """
+ A lock that writes the current process PID to the file and can read
+ the PID of the process that currently holds the lock.
+
+ When used as a context manager:
+ - Returns None if we successfully acquired the lock
+ - Returns the PID (int) if another process holds the lock
+ """
+
+ def __init__(
+ self,
+ filename: str = '.pid',
+ timeout: float = DEFAULT_TIMEOUT,
+ check_interval: float = DEFAULT_CHECK_INTERVAL,
+ fail_when_locked: bool = True,
+ flags: constants.LockFlags = LOCK_METHOD,
+ ) -> None:
+ super().__init__(
+ filename=filename,
+ timeout=timeout,
+ check_interval=check_interval,
+ fail_when_locked=fail_when_locked,
+ flags=flags,
+ )
+ self._acquired_lock = False
+ # Use a sidecar file for the actual OS-level lock so the PID file
+ # remains readable on platforms (notably Windows) with mandatory
+ # byte-range locking. This preserves existing public API/behavior.
+ self._lockfile = f'{self.filename}.lock'
+ self._inner_lock: Lock | None = None
+
+ def acquire(
+ self,
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
+ ) -> typing.IO[typing.AnyStr]:
+ """Acquire the lock and write the current PID to the file"""
+ fail_when_locked = coalesce(fail_when_locked, self.fail_when_locked)
+
+ # Acquire the sidecar lock file using a normal Lock instance.
+ self._inner_lock = Lock(
+ self._lockfile,
+ mode='a',
+ timeout=timeout if fail_when_locked is False else 0,
+ check_interval=coalesce(
+ check_interval if fail_when_locked is False else 0.0,
+ DEFAULT_CHECK_INTERVAL,
+ ),
+ fail_when_locked=True,
+ flags=LOCK_METHOD,
+ )
+ try:
+ self._inner_lock.acquire(
+ timeout=timeout,
+ check_interval=check_interval,
+ fail_when_locked=True,
+ )
+ except Exception:
+ # Propagate so __enter__ can return PID of holder
+ self._inner_lock = None
+ raise
+
+ # Write the current process PID to the public PID file
+ # Use unbuffered OS ops where possible
+ with open(self.filename, 'a+') as f:
+ try:
+ fd2 = f.fileno() # type: ignore[no-untyped-call]
+ os.lseek(fd2, 0, os.SEEK_SET)
+ try:
+ os.ftruncate(fd2, 0)
+ except Exception: # pragma: no cover - rare
+ # Fallback for platforms without os.ftruncate (e.g.
+ # Windows)
+ f.seek(0)
+ f.truncate()
+ os.write(fd2, str(os.getpid()).encode('ascii'))
+ with contextlib.suppress(Exception):
+ os.fsync(fd2)
+ except Exception: # pragma: no cover - rare
+ # Fallback for platforms without os.write/os.lseek (e.g.
+ # Jython)
+ f.seek(0)
+ f.truncate()
+ f.write(str(os.getpid())) # type: ignore[arg-type,call-overload]
+ with contextlib.suppress(Exception):
+ f.flush()
+
+ self._acquired_lock = True
+ # No need to keep a direct fh on the PID file; return the lock's fh
+ # to satisfy the context manager typing contract.
+ return typing.cast(typing.IO[typing.AnyStr], self._inner_lock.fh)
+
+ def read_pid(self) -> int | None:
+ """Read the PID from the lock file if it exists and is readable"""
+ try:
+ if os.path.exists(self.filename):
+ with open(self.filename) as f:
+ content = f.read().strip()
+ if content:
+ return int(content)
+ except (ValueError, OSError):
+ pass
+ return None
+
+ def __enter__(self) -> int | None: # type: ignore[override]
+ """
+ Context manager entry that returns:
+ - None if we successfully acquired the lock
+ - PID (int) if another process holds the lock
+ """
+ try:
+ self.acquire()
+ except exceptions.AlreadyLocked:
+ # Another process holds the lock, try to read its PID
+ return self.read_pid()
+
+ return None # We successfully acquired the lock
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: typing.Any,
+ ) -> bool | None:
+ if self._acquired_lock: # pragma: no branch - trivial guard
+ self.release()
+ self._acquired_lock = False
+ return None
+
+ def release(self) -> None:
+ """Release the sidecar lock and remove the PID file."""
+ # Release sidecar first
+ if self._inner_lock is not None:
+ with contextlib.suppress(Exception):
+ self._inner_lock.release()
+ self._inner_lock = None
+ # Then use default behavior to close/unlock any fh and unlink PID file
+ super().release()
+ # Try to remove sidecar file as well
+ with contextlib.suppress(Exception):
+ if os.path.isfile(self._lockfile):
+ os.unlink(self._lockfile)
class BoundedSemaphore(LockBase):
- '''
+ """
Bounded semaphore to prevent too many parallel processes from running
This method is deprecated because multiple processes that are completely
@@ -429,9 +627,9 @@ class BoundedSemaphore(LockBase):
'bounded_semaphore.00.lock'
>>> str(sorted(semaphore.get_random_filenames())[1])
'bounded_semaphore.01.lock'
- '''
+ """
- lock: typing.Optional[Lock]
+ lock: Lock | None
def __init__(
self,
@@ -439,15 +637,15 @@ def __init__(
name: str = 'bounded_semaphore',
filename_pattern: str = '{name}.{number:02d}.lock',
directory: str = tempfile.gettempdir(),
- timeout: typing.Optional[float] = DEFAULT_TIMEOUT,
- check_interval: typing.Optional[float] = DEFAULT_CHECK_INTERVAL,
- fail_when_locked: typing.Optional[bool] = True,
- ):
+ timeout: float | None = DEFAULT_TIMEOUT,
+ check_interval: float | None = DEFAULT_CHECK_INTERVAL,
+ fail_when_locked: bool | None = True,
+ ) -> None:
self.maximum = maximum
self.name = name
self.filename_pattern = filename_pattern
self.directory = directory
- self.lock: typing.Optional[Lock] = None
+ self.lock: Lock | None = None
super().__init__(
timeout=timeout,
check_interval=check_interval,
@@ -470,7 +668,7 @@ def get_random_filenames(self) -> typing.Sequence[pathlib.Path]:
random.shuffle(filenames)
return filenames
- def get_filename(self, number) -> pathlib.Path:
+ def get_filename(self, number: int) -> pathlib.Path:
return pathlib.Path(self.directory) / self.filename_pattern.format(
name=self.name,
number=number,
@@ -478,15 +676,15 @@ def get_filename(self, number) -> pathlib.Path:
def acquire( # type: ignore[override]
self,
- timeout: typing.Optional[float] = None,
- check_interval: typing.Optional[float] = None,
- fail_when_locked: typing.Optional[bool] = None,
- ) -> typing.Optional[Lock]:
+ timeout: float | None = None,
+ check_interval: float | None = None,
+ fail_when_locked: bool | None = None,
+ ) -> Lock | None:
assert not self.lock, 'Already locked'
filenames = self.get_filenames()
- for n in self._timeout_generator(timeout, check_interval): # pragma:
+ for n in self._timeout_generator(timeout, check_interval):
logger.debug('trying lock (attempt %d) %r', n, filenames)
# no branch
if self.try_lock(filenames): # pragma: no branch
@@ -515,14 +713,14 @@ def try_lock(self, filenames: typing.Sequence[Filename]) -> bool:
return False
- def release(self): # pragma: no cover
+ def release(self) -> None: # pragma: no cover
if self.lock is not None:
self.lock.release()
self.lock = None
class NamedBoundedSemaphore(BoundedSemaphore):
- '''
+ """
Bounded semaphore to prevent too many parallel processes from running
It's also possible to specify a timeout when acquiring the lock to wait
@@ -544,20 +742,20 @@ class NamedBoundedSemaphore(BoundedSemaphore):
>>> 'bounded_semaphore' in str(semaphore.get_filenames()[0])
True
- '''
+ """
def __init__(
self,
maximum: int,
- name: typing.Optional[str] = None,
+ name: str | None = None,
filename_pattern: str = '{name}.{number:02d}.lock',
directory: str = tempfile.gettempdir(),
- timeout: typing.Optional[float] = DEFAULT_TIMEOUT,
- check_interval: typing.Optional[float] = DEFAULT_CHECK_INTERVAL,
- fail_when_locked: typing.Optional[bool] = True,
- ):
+ timeout: float | None = DEFAULT_TIMEOUT,
+ check_interval: float | None = DEFAULT_CHECK_INTERVAL,
+ fail_when_locked: bool | None = True,
+ ) -> None:
if name is None:
- name = 'bounded_semaphore.%d' % random.randint(0, 1000000)
+ name = f'bounded_semaphore.{random.randint(0, 1000000):d}'
super().__init__(
maximum,
name,
diff --git a/portalocker_tests/conftest.py b/portalocker_tests/conftest.py
index 5650288..f8aba2e 100644
--- a/portalocker_tests/conftest.py
+++ b/portalocker_tests/conftest.py
@@ -1,10 +1,12 @@
import contextlib
import logging
import multiprocessing
+import os
import random
import pytest
+import portalocker
from portalocker import utils
logger = logging.getLogger(__name__)
@@ -27,6 +29,44 @@ def pytest_sessionstart(session):
@pytest.fixture(autouse=True)
def reduce_timeouts(monkeypatch):
- 'For faster testing we reduce the timeouts.'
+ "For faster testing we reduce the timeouts."
monkeypatch.setattr(utils, 'DEFAULT_TIMEOUT', 0.1)
monkeypatch.setattr(utils, 'DEFAULT_CHECK_INTERVAL', 0.05)
+
+
+LOCKERS: list[portalocker.portalocker.LockerType] = []
+# ------------------------------------------------------------------ #
+# Locker switching helpers (used by many parametrised tests)
+# ------------------------------------------------------------------ #
+if os.name == 'posix':
+ from fcntl import flock, lockf # type: ignore[attr-defined]
+
+ LOCKERS += [flock, lockf] # type: ignore[list-item]
+else:
+ win_locker = portalocker.portalocker.Win32Locker()
+ msvcrt_locker = portalocker.portalocker.MsvcrtLocker()
+
+ LOCKERS += [
+ (
+ win_locker.lock,
+ win_locker.unlock,
+ ),
+ (
+ msvcrt_locker.lock,
+ msvcrt_locker.unlock,
+ ),
+ portalocker.portalocker.Win32Locker,
+ portalocker.portalocker.MsvcrtLocker,
+ win_locker,
+ msvcrt_locker,
+ ]
+
+
+@pytest.fixture
+def locker(request, monkeypatch):
+ """Patch the low-level locker that portalocker uses for this test run."""
+ monkeypatch.setattr(portalocker.portalocker, 'LOCKER', request.param)
+ return request.param
+
+
+__all__ = ['LOCKERS']
diff --git a/portalocker_tests/mypy.ini b/portalocker_tests/mypy.ini
index 2f91b47..ca83dcf 100644
--- a/portalocker_tests/mypy.ini
+++ b/portalocker_tests/mypy.ini
@@ -3,3 +3,4 @@ warn_return_any = True
warn_unused_configs = True
ignore_missing_imports = True
+exclude = portalocker_tests/requirements\.txt$
diff --git a/portalocker_tests/requirements.txt b/portalocker_tests/requirements.txt
index 9628e3f..b73cc36 100644
--- a/portalocker_tests/requirements.txt
+++ b/portalocker_tests/requirements.txt
@@ -1 +1,2 @@
+# mypy: skip-file
-e.[tests]
diff --git a/portalocker_tests/test_combined.py b/portalocker_tests/test_combined.py
index dacda89..700611a 100644
--- a/portalocker_tests/test_combined.py
+++ b/portalocker_tests/test_combined.py
@@ -14,6 +14,6 @@ def test_combined(tmpdir):
sys.path.append(output_file.dirname)
# Combined is being generated above but linters won't understand that
- import combined # type: ignore
+ import combined # pyright: ignore[reportMissingImports]
assert combined
diff --git a/portalocker_tests/test_core_locking.py b/portalocker_tests/test_core_locking.py
new file mode 100644
index 0000000..fbf3011
--- /dev/null
+++ b/portalocker_tests/test_core_locking.py
@@ -0,0 +1,116 @@
+import pytest
+
+import portalocker
+from portalocker import exceptions, utils
+
+
+def test_utils_base():
+ """Test that LockBase can be subclassed."""
+
+ class Test(utils.LockBase):
+ pass
+
+
+def test_exceptions(tmpdir):
+ """Test that locking a file twice raises LockException."""
+ tmpfile = tmpdir.join('test_exceptions.lock')
+ with open(tmpfile, 'a') as a, open(tmpfile, 'a') as b:
+ # Lock exclusive non-blocking
+ lock_flags = portalocker.LOCK_EX | portalocker.LOCK_NB
+
+ # First lock file a
+ portalocker.lock(a, lock_flags)
+
+ # Now see if we can lock file b
+ with pytest.raises(portalocker.LockException):
+ portalocker.lock(b, lock_flags)
+
+
+def test_simple(tmpdir):
+ """Test that locking and writing to a file works as expected."""
+ tmpfile = tmpdir.join('test_simple.lock')
+ with open(tmpfile, 'w') as fh:
+ fh.write('spam and eggs')
+
+ with open(tmpfile, 'r+') as fh:
+ portalocker.lock(fh, portalocker.LOCK_EX)
+
+ fh.seek(13)
+ fh.write('foo')
+
+ # Make sure we didn't overwrite the original text
+ fh.seek(0)
+ assert fh.read(13) == 'spam and eggs'
+
+ portalocker.unlock(fh)
+
+
+def test_truncate(tmpdir):
+ """Test that truncating a file works as expected."""
+ tmpfile = tmpdir.join('test_truncate.lock')
+ with open(tmpfile, 'w') as fh:
+ fh.write('spam and eggs')
+
+ with portalocker.Lock(tmpfile, mode='a+') as fh:
+ # Make sure we didn't overwrite the original text
+ fh.seek(0)
+ assert fh.read(13) == 'spam and eggs'
+
+ with portalocker.Lock(tmpfile, mode='w+') as fh:
+ # Make sure we truncated the file
+ assert fh.read() == ''
+
+
+def test_class(tmpdir):
+ """Test that Lock context manager works as expected."""
+ tmpfile = tmpdir.join('test_class.lock')
+ lock = portalocker.Lock(tmpfile)
+ lock2 = portalocker.Lock(tmpfile, fail_when_locked=False, timeout=0.01)
+
+ with lock:
+ lock.acquire()
+
+ with pytest.raises(portalocker.LockException), lock2:
+ pass
+
+ with lock2:
+ pass
+
+
+def test_acquire_release(tmpdir):
+ """Test that acquire and release work as expected."""
+ tmpfile = tmpdir.join('test_acquire_release.lock')
+ lock = portalocker.Lock(tmpfile)
+ lock2 = portalocker.Lock(tmpfile, fail_when_locked=False)
+
+ lock.acquire() # acquire lock when nobody is using it
+ with pytest.raises(portalocker.LockException):
+ # another party should not be able to acquire the lock
+ lock2.acquire(timeout=0.01)
+
+ # re-acquire a held lock is a no-op
+ lock.acquire()
+
+ lock.release() # release the lock
+ lock.release() # second release does nothing
+
+
+def test_release_unacquired(tmpdir):
+ """Test that releasing an unacquired RLock raises LockException."""
+ tmpfile = tmpdir.join('test_release_unacquired.lock')
+ with pytest.raises(portalocker.LockException):
+ portalocker.RLock(tmpfile).release()
+
+
+def test_exception(monkeypatch, tmpdir):
+ """Do we stop immediately if the locking fails, even with a timeout?"""
+ tmpfile = tmpdir.join('test_exception.lock')
+
+ def patched_lock(*args, **kwargs):
+ raise ValueError('Test exception')
+
+ monkeypatch.setattr('portalocker.utils.portalocker.lock', patched_lock)
+ lock = portalocker.Lock(tmpfile, 'w', timeout=float('inf'))
+
+ with pytest.raises(exceptions.LockException):
+ lock.acquire()
diff --git a/portalocker_tests/test_lock_flags.py b/portalocker_tests/test_lock_flags.py
new file mode 100644
index 0000000..7dcde3d
--- /dev/null
+++ b/portalocker_tests/test_lock_flags.py
@@ -0,0 +1,100 @@
+import os
+
+import pytest
+
+import portalocker
+from portalocker import LockFlags
+from portalocker_tests.conftest import LOCKERS
+
+
+def test_exclusive(tmpdir):
+ """Test that exclusive lock prevents reading and writing by others."""
+ tmpfile = tmpdir.join('test_exclusive.lock')
+ text_0 = 'spam and eggs'
+ with open(tmpfile, 'w') as fh:
+ fh.write(text_0)
+
+ with open(tmpfile) as fh:
+ portalocker.lock(fh, portalocker.LOCK_EX | portalocker.LOCK_NB)
+
+ # Make sure we can't read the locked file
+ with (
+ pytest.raises(portalocker.LockException),
+ open(
+ tmpfile,
+ 'r+',
+ ) as fh2,
+ ):
+ portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB)
+ assert fh2.read() == text_0
+
+ # Make sure we can't write the locked file
+ with (
+ pytest.raises(portalocker.LockException),
+ open(
+ tmpfile,
+ 'w+',
+ ) as fh2,
+ ):
+ portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB)
+ fh2.write('surprise and fear')
+
+ # Make sure we can explicitly unlock the file
+ portalocker.unlock(fh)
+
+
+def test_shared(tmpdir):
+ """Test that shared lock allows reading but not writing by others."""
+ tmpfile = tmpdir.join('test_shared.lock')
+ with open(tmpfile, 'w') as fh:
+ fh.write('spam and eggs')
+
+ with open(tmpfile) as f:
+ portalocker.lock(f, portalocker.LOCK_SH | portalocker.LOCK_NB)
+
+ # Make sure we can read the locked file
+ with open(tmpfile) as fh2:
+ portalocker.lock(fh2, portalocker.LOCK_SH | portalocker.LOCK_NB)
+ assert fh2.read() == 'spam and eggs'
+
+ # Make sure we can't write the locked file
+ with (
+ pytest.raises(portalocker.LockException),
+ open(
+ tmpfile,
+ 'w+',
+ ) as fh2,
+ ):
+ portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB)
+ fh2.write('surprise and fear')
+
+ # Make sure we can explicitly unlock the file
+ portalocker.unlock(f)
+
+
+@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
+def test_blocking_timeout(tmpdir, locker):
+ """Test that a warning is raised when using a blocking timeout."""
+ tmpfile = tmpdir.join('test_blocking_timeout.lock')
+ flags = LockFlags.SHARED
+
+ with pytest.warns(UserWarning): # noqa: SIM117
+ with portalocker.Lock(tmpfile, 'a+', timeout=5, flags=flags):
+ pass
+
+ lock = portalocker.Lock(tmpfile, 'a+', flags=flags)
+ with pytest.warns(UserWarning):
+ lock.acquire(timeout=5)
+
+
+@pytest.mark.skipif(
+ os.name == 'nt',
+ reason='Windows uses an entirely different lockmechanism, which does not '
+ 'support NON_BLOCKING flag within a single process.',
+)
+@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
+def test_nonblocking(tmpdir, locker):
+ """Test that using NON_BLOCKING flag raises RuntimeError."""
+ tmpfile = tmpdir.join('test_nonblocking.lock')
+ with open(tmpfile, 'w') as fh, pytest.raises(RuntimeError):
+ portalocker.lock(fh, LockFlags.NON_BLOCKING)
diff --git a/portalocker_tests/test_mechanisms.py b/portalocker_tests/test_mechanisms.py
new file mode 100644
index 0000000..7bf4ed1
--- /dev/null
+++ b/portalocker_tests/test_mechanisms.py
@@ -0,0 +1,57 @@
+import types
+import typing
+
+import pytest
+
+import portalocker
+from portalocker import LockFlags
+from portalocker_tests.conftest import LOCKERS
+
+
+# @pytest.mark.skipif(
+# os.name == 'nt',
+# reason='Locking on Windows requires a file object',
+# )
+@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
+def test_lock_fileno(tmpdir, locker):
+ """Test that locking using fileno() works as expected."""
+ tmpfile = tmpdir.join('test_lock_fileno.lock')
+ with open(tmpfile, 'a+') as a, open(tmpfile, 'a+') as b:
+ # Lock shared non-blocking
+ flags = LockFlags.SHARED | LockFlags.NON_BLOCKING
+
+ # First lock file a
+ portalocker.lock(a, flags)
+
+ # Now see if we can lock using fileno()
+ portalocker.lock(b.fileno(), flags)
+
+
+@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
+def test_locker_mechanism(tmpdir, locker):
+ """Can we switch the locking mechanism?"""
+ tmpfile = tmpdir.join('test_locker_mechanism.lock')
+ # We can test for flock vs lockf based on their different behaviour re.
+ # locking the same file.
+ with portalocker.Lock(tmpfile, 'a+', flags=LockFlags.EXCLUSIVE):
+ # If we have lockf(), we cannot get another lock on the same file.
+ fcntl: typing.Optional[types.ModuleType]
+ try:
+ import fcntl
+ except ImportError:
+ fcntl = None
+
+ if fcntl is not None and locker is fcntl.lockf: # type: ignore[attr-defined]
+ portalocker.Lock(
+ tmpfile,
+ 'r+',
+ flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING,
+ ).acquire(timeout=0.1)
+ # But with other lock methods we can't
+ else:
+ with pytest.raises(portalocker.LockException):
+ portalocker.Lock(
+ tmpfile,
+ 'r+',
+ flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING,
+ ).acquire(timeout=0.1)
diff --git a/portalocker_tests/test_multiprocess.py b/portalocker_tests/test_multiprocess.py
new file mode 100644
index 0000000..0b3394d
--- /dev/null
+++ b/portalocker_tests/test_multiprocess.py
@@ -0,0 +1,158 @@
+import dataclasses
+import multiprocessing
+import platform
+import time
+import typing
+
+import pytest
+
+import portalocker
+from portalocker import LockFlags
+
+
+@dataclasses.dataclass(order=True)
+class LockResult:
+ """Helper dataclass for multiprocessing lock results."""
+
+ exception_class: typing.Union[type, None] = None
+ exception_message: typing.Union[str, None] = None
+ exception_repr: typing.Union[str, None] = None
+
+
+def lock(
+ filename: str,
+ fail_when_locked: bool,
+ flags: LockFlags,
+ timeout: float = 0.1,
+ keep_locked: float = 0.05,
+) -> LockResult:
+ """Helper function for multiprocessing lock tests."""
+ try:
+ with portalocker.Lock(
+ filename,
+ timeout=timeout,
+ fail_when_locked=fail_when_locked,
+ flags=flags,
+ ):
+ time.sleep(keep_locked)
+ return LockResult()
+
+ except Exception as exception:
+ return LockResult(
+ type(exception),
+ str(exception),
+ repr(exception),
+ )
+
+
+def shared_lock(filename, **kwargs):
+ """Helper for shared lock in multiprocessing tests."""
+ with portalocker.Lock(
+ filename,
+ timeout=0.1,
+ fail_when_locked=False,
+ flags=LockFlags.SHARED | LockFlags.NON_BLOCKING,
+ ):
+ time.sleep(0.2)
+ return True
+
+
+def shared_lock_fail(filename, **kwargs):
+ """Helper for shared lock fail in multiprocessing tests."""
+ with portalocker.Lock(
+ filename,
+ timeout=0.1,
+ fail_when_locked=True,
+ flags=LockFlags.SHARED | LockFlags.NON_BLOCKING,
+ ):
+ time.sleep(0.2)
+ return True
+
+
+def exclusive_lock(filename, **kwargs):
+ """Helper for exclusive lock in multiprocessing tests."""
+ with portalocker.Lock(
+ filename,
+ timeout=0.1,
+ fail_when_locked=False,
+ flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING,
+ ):
+ time.sleep(0.2)
+ return True
+
+
+@pytest.mark.parametrize('fail_when_locked', [True, False])
+@pytest.mark.skipif(
+ 'pypy' in platform.python_implementation().lower(),
+ reason='pypy3 does not support the multiprocessing test',
+)
+@pytest.mark.flaky(reruns=5, reruns_delay=1)
+def test_shared_processes(tmpdir, fail_when_locked):
+ """Test that shared locks work correctly across processes."""
+ tmpfile = tmpdir.join('test_shared_processes.lock')
+ flags = LockFlags.SHARED | LockFlags.NON_BLOCKING
+ with multiprocessing.Pool(processes=2) as pool:
+ args = tmpfile, fail_when_locked, flags
+ results = pool.starmap_async(lock, 2 * [args])
+
+ for result in results.get(timeout=2.0):
+ if result.exception_class is not None:
+ raise result.exception_class # type: ignore[reportGeneratlTypeIssues]
+ assert result == LockResult()
+
+
+@pytest.mark.parametrize('fail_when_locked', [True, False])
+@pytest.mark.parametrize(
+ 'locker',
+ [
+ # The actual locker param is handled by the test runner
+ ],
+ indirect=True,
+)
+@pytest.mark.skipif(
+ 'pypy' in platform.python_implementation().lower(),
+ reason='pypy3 does not support the multiprocessing test',
+)
+@pytest.mark.flaky(reruns=5, reruns_delay=1) # type: ignore[misc]
+def test_exclusive_processes(
+ tmpdir: str,
+ fail_when_locked: bool,
+ locker: typing.Callable[..., typing.Any],
+) -> None:
+ """Test that exclusive locks work correctly across processes."""
+ tmpfile = tmpdir.join('test_exclusive_processes.lock')
+ flags = LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING
+
+ with multiprocessing.Pool(processes=2) as pool:
+ # Submit tasks individually
+ result_a = pool.apply_async(lock, [tmpfile, fail_when_locked, flags])
+ result_b = pool.apply_async(lock, [tmpfile, fail_when_locked, flags])
+
+ try:
+ a = result_a.get(timeout=1.2) # Wait for 'a' with timeout
+ except multiprocessing.TimeoutError:
+ a = None
+
+ try:
+ # Lower timeout since we already waited with `a`
+ b = result_b.get(timeout=0.6) # Wait for 'b' with timeout
+ except multiprocessing.TimeoutError:
+ b = None
+
+ assert a or b
+ # Make sure a is always filled
+ if a is None:
+ b, a = a, b
+
+ assert a is not None
+
+ if b:
+ assert b is not None
+
+ assert not a.exception_class or not b.exception_class
+ assert issubclass(
+ a.exception_class or b.exception_class, # type: ignore[arg-type]
+ portalocker.LockException,
+ )
+ else:
+ assert not a.exception_class
diff --git a/portalocker_tests/test_pidfilelock.py b/portalocker_tests/test_pidfilelock.py
new file mode 100644
index 0000000..a6d7a1d
--- /dev/null
+++ b/portalocker_tests/test_pidfilelock.py
@@ -0,0 +1,253 @@
+"""Tests for PidFileLock class."""
+
+import multiprocessing
+import os
+import tempfile
+import time
+from pathlib import Path
+from typing import Optional
+from unittest import mock
+
+from portalocker import utils
+
+
+def test_pidfilelock_creation():
+ """Test basic PidFileLock creation."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_pidfilelock_creation.lock'
+ lock = utils.PidFileLock(str(lock_file))
+ assert lock.filename == str(lock_file)
+ assert not lock._acquired_lock
+
+
+def test_pidfilelock_acquire_writes_pid():
+ """Test that acquiring the lock writes the current PID."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_pidfilelock_acquire_writes_pid.lock'
+ lock = utils.PidFileLock(str(lock_file))
+
+ try:
+ lock.acquire()
+ assert lock._acquired_lock
+
+ # Check that PID was written to file
+ with open(lock_file) as f:
+ written_pid = int(f.read().strip())
+ assert written_pid == os.getpid()
+ finally:
+ lock.release()
+
+
+def test_pidfilelock_context_manager_success():
+ """Test context manager when we successfully acquire the lock."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = (
+ Path(tmpdir) / 'test_pidfilelock_context_manager_success.lock'
+ )
+ lock = utils.PidFileLock(str(lock_file))
+
+ with lock as result:
+ assert result is None # We acquired the lock
+ assert lock._acquired_lock
+
+ # Verify PID was written
+ with open(lock_file) as f:
+ written_pid = int(f.read().strip())
+ assert written_pid == os.getpid()
+
+ # Lock should be released and file cleaned up
+ # Check both conditions after context manager exit
+ lock_released: bool = not lock._acquired_lock
+ file_cleaned: bool = not os.path.exists(lock_file)
+
+ assert lock_released
+ assert file_cleaned
+
+
+def test_pidfilelock_context_manager_already_locked():
+ """Test context manager when another process holds the lock."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = (
+ Path(tmpdir)
+ / 'test_pidfilelock_context_manager_already_locked.lock'
+ )
+
+ # Create a lock file with a fake PID
+ fake_pid = 99999
+ with open(lock_file, 'w') as f:
+ f.write(str(fake_pid))
+
+ # Create another lock that tries to acquire the same file
+ lock1 = utils.PidFileLock(str(lock_file))
+ lock1.acquire() # This should succeed and write our PID
+
+ try:
+ lock2 = utils.PidFileLock(str(lock_file))
+ with lock2 as result:
+ assert result == os.getpid() # Should return the PID of lock1
+ assert not lock2._acquired_lock
+ finally:
+ lock1.release()
+
+
+def test_read_pid_nonexistent_file():
+ """Test reading PID from non-existent file."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_read_pid_nonexistent_file.lock'
+ lock = utils.PidFileLock(str(lock_file))
+ assert lock.read_pid() is None
+
+
+def test_read_pid_empty_file():
+ """Test reading PID from empty file."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_read_pid_empty_file.lock'
+ lock_file.touch() # Create empty file
+
+ lock = utils.PidFileLock(str(lock_file))
+ assert lock.read_pid() is None
+
+
+def test_read_pid_invalid_content():
+ """Test reading PID from file with invalid content."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_read_pid_invalid_content.lock'
+ with open(lock_file, 'w') as f:
+ f.write('not_a_number')
+
+ lock = utils.PidFileLock(str(lock_file))
+ assert lock.read_pid() is None
+
+
+def test_read_pid_valid_content():
+ """Test reading PID from file with valid content."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_read_pid_valid_content.lock'
+ test_pid = 12345
+ with open(lock_file, 'w') as f:
+ f.write(str(test_pid))
+
+ lock = utils.PidFileLock(str(lock_file))
+ assert lock.read_pid() == test_pid
+
+
+@mock.patch('builtins.open', side_effect=OSError('Permission denied'))
+def test_read_pid_permission_error(mock_open):
+ """Test reading PID when file cannot be opened."""
+ lock = utils.PidFileLock('test_read_pid_permission_error.lock')
+ assert lock.read_pid() is None
+
+
+def test_release_without_acquire():
+ """Test releasing without acquiring first."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_release_without_acquire.lock'
+ lock = utils.PidFileLock(str(lock_file))
+
+ # Should not raise an error
+ lock.release()
+ assert not lock._acquired_lock
+
+
+def test_multiple_context_manager_entries():
+ """Test multiple context manager entries."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_multiple_context_manager_entries.lock'
+ lock = utils.PidFileLock(str(lock_file))
+
+ with lock as result1:
+ assert result1 is None
+
+ # Try to enter context again while already locked
+ lock2 = utils.PidFileLock(str(lock_file))
+ with lock2 as result2:
+ assert result2 == os.getpid()
+
+
+def test_inheritance_from_temporaryfilelock():
+ """Test that PidFileLock properly inherits from TemporaryFileLock."""
+ lock = utils.PidFileLock()
+ assert isinstance(lock, utils.TemporaryFileLock)
+ assert isinstance(lock, utils.Lock)
+ assert isinstance(lock, utils.LockBase)
+
+
+def test_custom_parameters():
+ """Test PidFileLock with custom parameters."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_custom_parameters.lock'
+ lock = utils.PidFileLock(
+ filename=str(lock_file),
+ timeout=10.0,
+ check_interval=0.1,
+ fail_when_locked=False,
+ )
+
+ assert lock.filename == str(lock_file)
+ assert lock.timeout == 10.0
+ assert lock.check_interval == 0.1
+ assert lock.fail_when_locked is False
+
+
+def _worker_function(lock_file_path, result_queue, should_succeed):
+ """Worker function for multiprocessing tests."""
+ try:
+ lock = utils.PidFileLock(lock_file_path)
+ with lock as result:
+ if should_succeed:
+ # We expect to acquire the lock
+ result_queue.put(('success', result, os.getpid()))
+ time.sleep(0.5) # Hold the lock briefly
+ else:
+ # We expect to get the PID of another process
+ result_queue.put(('blocked', result, os.getpid()))
+ except Exception as e:
+ result_queue.put(('error', str(e), os.getpid()))
+
+
+def test_multiprocess_locking():
+ """Test that PidFileLock works correctly across processes."""
+ with tempfile.TemporaryDirectory() as tmpdir:
+ lock_file = Path(tmpdir) / 'test_multiprocess_locking.lock'
+ result_queue: multiprocessing.Queue[tuple[str, Optional[int], int]] = (
+ multiprocessing.Queue()
+ )
+
+ # Start first process that should acquire the lock
+ p1 = multiprocessing.Process(
+ target=_worker_function, args=(str(lock_file), result_queue, True)
+ )
+ p1.start()
+
+ # Give first process time to acquire lock
+ time.sleep(0.1)
+
+ # Start second process that should be blocked
+ p2 = multiprocessing.Process(
+ target=_worker_function, args=(str(lock_file), result_queue, False)
+ )
+ p2.start()
+
+ try:
+ # Get results from both processes
+ result1 = result_queue.get(timeout=2)
+ result2 = result_queue.get(timeout=2)
+
+ # First process should succeed
+ assert result1[0] == 'success'
+ assert result1[1] is None # Acquired lock successfully
+ p1_pid = result1[2]
+
+ # Second process should be blocked and get first process PID
+ assert result2[0] == 'blocked'
+ assert result2[1] == p1_pid # Should get PID of first process
+
+ finally:
+ p1.join(timeout=2)
+ p2.join(timeout=2)
+
+ # Clean up any remaining processes
+ if p1.is_alive():
+ p1.terminate()
+ if p2.is_alive():
+ p2.terminate()
diff --git a/portalocker_tests/test_redis.py b/portalocker_tests/test_redis.py
index e9bec02..a5c77a5 100644
--- a/portalocker_tests/test_redis.py
+++ b/portalocker_tests/test_redis.py
@@ -4,10 +4,10 @@
import time
import pytest
+from redis import client, exceptions
import portalocker
from portalocker import redis, utils
-from redis import client, exceptions
logger = logging.getLogger(__name__)
@@ -26,7 +26,7 @@ def set_redis_timeouts(monkeypatch):
monkeypatch.setattr(_thread, 'interrupt_main', lambda: None)
-def test_redis_lock():
+def test_redis_lock() -> None:
channel = str(random.random())
lock_a: redis.RedisLock = redis.RedisLock(channel)
@@ -46,7 +46,7 @@ def test_redis_lock():
@pytest.mark.parametrize('timeout', [None, 0, 0.001])
@pytest.mark.parametrize('check_interval', [None, 0, 0.0005])
def test_redis_lock_timeout(timeout, check_interval):
- connection = client.Redis()
+ connection: client.Redis[str] = client.Redis(decode_responses=True)
channel = str(random.random())
lock_a = redis.RedisLock(channel)
lock_a.acquire(timeout=timeout, check_interval=check_interval)
@@ -61,7 +61,7 @@ def test_redis_lock_timeout(timeout, check_interval):
lock_a.connection.close()
-def test_redis_lock_context():
+def test_redis_lock_context() -> None:
channel = str(random.random())
lock_a = redis.RedisLock(channel, fail_when_locked=True)
@@ -72,7 +72,7 @@ def test_redis_lock_context():
pass
-def test_redis_relock():
+def test_redis_relock() -> None:
channel = str(random.random())
lock_a = redis.RedisLock(channel, fail_when_locked=True)
diff --git a/portalocker_tests/test_redis_random_sleep.py b/portalocker_tests/test_redis_random_sleep.py
new file mode 100644
index 0000000..532e06f
--- /dev/null
+++ b/portalocker_tests/test_redis_random_sleep.py
@@ -0,0 +1,75 @@
+"""Validate the jitter that RedisLock adds to its sleep intervals."""
+
+import time
+from typing import Any
+
+from portalocker import redis
+
+
+class FakeLock(redis.RedisLock):
+ def __init__(
+ self, thread_sleep_time: float, *args: Any, **kwargs: Any
+ ) -> None:
+ # Channel doesn't affect sleep behavior.
+ super().__init__('test_channel', *args, **kwargs)
+ self.thread_sleep_time = thread_sleep_time
+
+
+def test_timeout_generator_with_positive_check_interval(monkeypatch):
+ """When check_interval > 0 the generator must sleep for a fraction
+ of that value (0.5 ≤ factor < 1.5)."""
+ sleep_times = []
+
+ def fake_sleep(t):
+ sleep_times.append(t)
+
+ monkeypatch.setattr(time, 'sleep', fake_sleep)
+
+ # For positive check_interval, effective_interval equals check_interval.
+ lock = FakeLock(thread_sleep_time=0.05)
+ gen = lock._timeout_generator(timeout=0.1, check_interval=0.02)
+ next(gen)
+ # Expected sleep time is 0.02 * (0.5 + random_value) in [0.01, 0.03].
+ assert len(sleep_times) == 1
+ sleep_time = sleep_times[0]
+ assert 0.01 <= sleep_time <= 0.03
+
+
+def test_timeout_generator_with_zero_check_interval(monkeypatch):
+ """When check_interval == 0 the generator must sleep for a fraction
+ of thread_sleep_time (0.5 ≤ factor < 1.5)."""
+ sleep_times = []
+
+ def fake_sleep(t):
+ sleep_times.append(t)
+
+ monkeypatch.setattr(time, 'sleep', fake_sleep)
+
+ # For zero check_interval, effective_interval is thread_sleep_time.
+ lock = FakeLock(thread_sleep_time=0.05)
+ gen = lock._timeout_generator(timeout=0.1, check_interval=0)
+ next(gen)
+ # Expected sleep time is 0.05 * (0.5 + random_value) in [0.025, 0.075].
+ assert len(sleep_times) == 1
+ sleep_time = sleep_times[0]
+ assert 0.025 <= sleep_time <= 0.075
+
+
+def test_timeout_generator_with_negative_check_interval(monkeypatch):
+ """When check_interval < 0 the generator must sleep for a fraction
+ of thread_sleep_time (0.5 ≤ factor < 1.5)."""
+ sleep_times = []
+
+ def fake_sleep(t):
+ sleep_times.append(t)
+
+ monkeypatch.setattr(time, 'sleep', fake_sleep)
+
+ # For negative check_interval, effective_interval is thread_sleep_time.
+ lock = FakeLock(thread_sleep_time=0.05)
+ gen = lock._timeout_generator(timeout=0.1, check_interval=-0.01)
+ next(gen)
+ # Expected sleep time is 0.05 * (0.5 + random_value) in [0.025, 0.075].
+ assert len(sleep_times) == 1
+ sleep_time = sleep_times[0]
+ assert 0.025 <= sleep_time <= 0.075
diff --git a/portalocker_tests/test_rlock_behaviour.py b/portalocker_tests/test_rlock_behaviour.py
new file mode 100644
index 0000000..bf57c9e
--- /dev/null
+++ b/portalocker_tests/test_rlock_behaviour.py
@@ -0,0 +1,38 @@
+import pytest
+
+import portalocker
+
+
+def test_rlock_acquire_release_count(tmpdir):
+ """Test that RLock acquire/release count works as expected."""
+ tmpfile = tmpdir.join('test_rlock_acquire_release_count.lock')
+ lock = portalocker.RLock(tmpfile)
+ # Twice acquire
+ h = lock.acquire()
+ assert not h.closed
+ lock.acquire()
+ assert not h.closed
+
+ # Two release
+ lock.release()
+ assert not h.closed
+ lock.release()
+ assert h.closed
+
+
+def test_rlock_acquire_release(tmpdir):
+ """Test that RLock acquire/release works as expected."""
+ tmpfile = tmpdir.join('test_rlock_acquire_release.lock')
+ lock = portalocker.RLock(tmpfile)
+ lock2 = portalocker.RLock(tmpfile, fail_when_locked=False)
+
+ lock.acquire() # acquire lock when nobody is using it
+ with pytest.raises(portalocker.LockException):
+ # another party should not be able to acquire the lock
+ lock2.acquire(timeout=0.01)
+
+ # Now acquire again
+ lock.acquire()
+
+ lock.release() # release the lock
+ lock.release() # second release does nothing
diff --git a/portalocker_tests/test_semaphore.py b/portalocker_tests/test_semaphore.py
index b6d4594..88880ce 100644
--- a/portalocker_tests/test_semaphore.py
+++ b/portalocker_tests/test_semaphore.py
@@ -1,3 +1,5 @@
+"""Tests for the BoundedSemaphore helper."""
+
import random
import pytest
@@ -9,6 +11,8 @@
@pytest.mark.parametrize('timeout', [None, 0, 0.001])
@pytest.mark.parametrize('check_interval', [None, 0, 0.0005])
def test_bounded_semaphore(timeout, check_interval, monkeypatch):
+ """Ensure that the semaphore honours *maximum*, *timeout* and
+ *check_interval* and raises AlreadyLocked when exhausted."""
n = 2
name: str = str(random.random())
monkeypatch.setattr(utils, 'DEFAULT_TIMEOUT', 0.0001)
diff --git a/portalocker_tests/temporary_file_lock.py b/portalocker_tests/test_temporary_file_lock.py
similarity index 52%
rename from portalocker_tests/temporary_file_lock.py
rename to portalocker_tests/test_temporary_file_lock.py
index ad35373..46327c8 100644
--- a/portalocker_tests/temporary_file_lock.py
+++ b/portalocker_tests/test_temporary_file_lock.py
@@ -1,9 +1,12 @@
import os
+import pathlib
import portalocker
def test_temporary_file_lock(tmpfile):
+ """The lock file must be deleted on context exit and GC must close
+ the lock gracefully."""
with portalocker.TemporaryFileLock(tmpfile):
pass
@@ -12,3 +15,6 @@ def test_temporary_file_lock(tmpfile):
lock = portalocker.TemporaryFileLock(tmpfile)
lock.acquire()
del lock
+ assert not pathlib.Path(tmpfile).exists(), (
+ 'Lock file should be removed on lock object deletion'
+ )
diff --git a/portalocker_tests/test_timeout_behaviour.py b/portalocker_tests/test_timeout_behaviour.py
new file mode 100644
index 0000000..5be6924
--- /dev/null
+++ b/portalocker_tests/test_timeout_behaviour.py
@@ -0,0 +1,47 @@
+import pytest
+
+import portalocker
+
+
+def test_with_timeout(tmpdir):
+ """
+ Test that AlreadyLocked is raised when a file is locked with a timeout.
+ """
+ tmpfile = tmpdir.join('test_with_timeout.lock')
+ # Open the file 2 times
+ with pytest.raises(portalocker.AlreadyLocked): # noqa: SIM117
+ with portalocker.Lock(tmpfile, timeout=0.1) as fh:
+ print('writing some stuff to my cache...', file=fh)
+ with portalocker.Lock(
+ tmpfile,
+ timeout=0.1,
+ mode='wb',
+ fail_when_locked=True,
+ ):
+ pass
+ print('writing more stuff to my cache...', file=fh)
+
+
+def test_without_timeout(tmpdir):
+ """
+ Test that LockException is raised when a file is locked without a
+ timeout."""
+ tmpfile = tmpdir.join('test_without_timeout.lock')
+ # Open the file 2 times
+ with pytest.raises(portalocker.LockException): # noqa: SIM117
+ with portalocker.Lock(tmpfile, timeout=None) as fh:
+ print('writing some stuff to my cache...', file=fh)
+ with portalocker.Lock(tmpfile, timeout=None, mode='w'):
+ pass
+ print('writing more stuff to my cache...', file=fh)
+
+
+def test_without_fail(tmpdir):
+ """Test that LockException is raised when fail_when_locked is False."""
+ tmpfile = tmpdir.join('test_without_fail.lock')
+ # Open the file 2 times
+ with pytest.raises(portalocker.LockException): # noqa: SIM117
+ with portalocker.Lock(tmpfile, timeout=0.1) as fh:
+ print('writing some stuff to my cache...', file=fh)
+ lock = portalocker.Lock(tmpfile, timeout=0.1)
+ lock.acquire(check_interval=0.05, fail_when_locked=False)
diff --git a/portalocker_tests/test_version.py b/portalocker_tests/test_version.py
new file mode 100644
index 0000000..bd2d6cd
--- /dev/null
+++ b/portalocker_tests/test_version.py
@@ -0,0 +1,99 @@
+"""Tests for version discovery logic in portalocker.__about__.
+
+These tests verify that the runtime version is resolved from
+importlib.metadata when available, and that it falls back to parsing the
+pyproject.toml when metadata is unavailable. They also validate the
+parser for pyproject.
+"""
+
+import importlib
+from pathlib import Path
+from typing import Callable, Optional
+
+import pytest
+
+import portalocker
+
+
+@pytest.fixture()
+def reload_about() -> Callable[[], None]:
+ """Return a function to reload portalocker.__about__ cleanly.
+
+ Returns:
+ A function to call which reloads portalocker.__about__.
+ """
+
+ def _reload() -> None:
+ import portalocker.__about__ as about
+
+ importlib.reload(about)
+
+ return _reload
+
+
+def test_get_version_prefers_importlib_metadata(
+ monkeypatch: pytest.MonkeyPatch,
+) -> None:
+ """get_version should prefer importlib metadata, when available."""
+ import portalocker.__about__ as about
+
+ # Patch the version function via string path to avoid mypy export checks
+ monkeypatch.setattr(
+ 'portalocker.__about__.importlib_metadata.version',
+ lambda _name: '9.9.9',
+ raising=True,
+ )
+
+ assert about.get_version() == '9.9.9'
+
+
+def test_get_version_fallback_pyproject(
+ monkeypatch: pytest.MonkeyPatch, reload_about: Callable[[], None]
+) -> None:
+ """get_version should fall back to reading pyproject.toml.
+
+ This test patches importlib's metadata.version to raise and patches
+ pathlib.Path.read_text to return a minimal pyproject.toml with a
+ specific version. It then verifies both get_version() and module
+ import-time __version__ resolve to the expected fallback version.
+ """
+ import portalocker.__about__ as about
+
+ def _raise(_: str) -> str:
+ raise RuntimeError('not installed')
+
+ # Patch upstream importlib.metadata so the module alias picks it up
+ monkeypatch.setattr('importlib.metadata.version', _raise, raising=True)
+
+ def fake_read_text(
+ _self: Path, encoding: str = 'utf-8', errors: Optional[str] = None
+ ) -> str:
+ return "[project]\nname = 'portalocker'\nversion = '1.2.3'\n"
+
+ monkeypatch.setattr('pathlib.Path.read_text', fake_read_text, raising=True)
+
+ # get_version should now read the fallback version
+ assert about.get_version() == '1.2.3'
+
+ # Reload the module so __version__ is recomputed at import time
+ reload_about()
+ import portalocker.__about__ as about2
+
+ assert about2.__version__ == '1.2.3'
+
+
+def test_read_pyproject_version_parses_value(tmp_path: Path) -> None:
+ """_read_pyproject_version must parse [project].version value."""
+ toml = "[project]\nname = 'portalocker'\nversion = '4.5.6'\n"
+ path = tmp_path / 'pyproject.toml'
+ path.write_text(toml, encoding='utf-8')
+
+ import portalocker.__about__ as about
+
+ assert about._read_pyproject_version(path) == '4.5.6'
+
+
+def test_dunder_version_is_string() -> None:
+ """portalocker.__version__ should be a non-empty string."""
+ assert isinstance(portalocker.__version__, str)
+ assert len(portalocker.__version__) > 0
diff --git a/portalocker_tests/tests.py b/portalocker_tests/tests.py
deleted file mode 100644
index ee0d91b..0000000
--- a/portalocker_tests/tests.py
+++ /dev/null
@@ -1,446 +0,0 @@
-import dataclasses
-import math
-import multiprocessing
-import os
-import time
-import typing
-
-import pytest
-
-import portalocker
-import portalocker.portalocker
-from portalocker import LockFlags, exceptions, utils
-
-if os.name == 'posix':
- import fcntl
-
- LOCKERS = [
- fcntl.flock,
- fcntl.lockf,
- ]
-else:
- LOCKERS = [None] # type: ignore
-
-
-@pytest.fixture
-def locker(request, monkeypatch):
- monkeypatch.setattr(portalocker.portalocker, 'LOCKER', request.param)
- return request.param
-
-
-def test_exceptions(tmpfile):
- with open(tmpfile, 'a') as a, open(tmpfile, 'a') as b:
- # Lock exclusive non-blocking
- lock_flags = portalocker.LOCK_EX | portalocker.LOCK_NB
-
- # First lock file a
- portalocker.lock(a, lock_flags)
-
- # Now see if we can lock file b
- with pytest.raises(portalocker.LockException):
- portalocker.lock(b, lock_flags)
-
-
-def test_utils_base():
- class Test(utils.LockBase):
- pass
-
-
-def test_with_timeout(tmpfile):
- # Open the file 2 times
- with pytest.raises(portalocker.AlreadyLocked):
- with portalocker.Lock(tmpfile, timeout=0.1) as fh:
- print('writing some stuff to my cache...', file=fh)
- with portalocker.Lock(
- tmpfile,
- timeout=0.1,
- mode='wb',
- fail_when_locked=True,
- ):
- pass
- print('writing more stuff to my cache...', file=fh)
-
-
-def test_without_timeout(tmpfile, monkeypatch):
- # Open the file 2 times
- with pytest.raises(portalocker.LockException):
- with portalocker.Lock(tmpfile, timeout=None) as fh:
- print('writing some stuff to my cache...', file=fh)
- with portalocker.Lock(tmpfile, timeout=None, mode='w'):
- pass
- print('writing more stuff to my cache...', file=fh)
-
-
-def test_without_fail(tmpfile):
- # Open the file 2 times
- with pytest.raises(portalocker.LockException):
- with portalocker.Lock(tmpfile, timeout=0.1) as fh:
- print('writing some stuff to my cache...', file=fh)
- lock = portalocker.Lock(tmpfile, timeout=0.1)
- lock.acquire(check_interval=0.05, fail_when_locked=False)
-
-
-def test_simple(tmpfile):
- with open(tmpfile, 'w') as fh:
- fh.write('spam and eggs')
-
- with open(tmpfile, 'r+') as fh:
- portalocker.lock(fh, portalocker.LOCK_EX)
-
- fh.seek(13)
- fh.write('foo')
-
- # Make sure we didn't overwrite the original text
- fh.seek(0)
- assert fh.read(13) == 'spam and eggs'
-
- portalocker.unlock(fh)
-
-
-def test_truncate(tmpfile):
- with open(tmpfile, 'w') as fh:
- fh.write('spam and eggs')
-
- with portalocker.Lock(tmpfile, mode='a+') as fh:
- # Make sure we didn't overwrite the original text
- fh.seek(0)
- assert fh.read(13) == 'spam and eggs'
-
- with portalocker.Lock(tmpfile, mode='w+') as fh:
- # Make sure we truncated the file
- assert fh.read() == ''
-
-
-def test_class(tmpfile):
- lock = portalocker.Lock(tmpfile)
- lock2 = portalocker.Lock(tmpfile, fail_when_locked=False, timeout=0.01)
-
- with lock:
- lock.acquire()
-
- with pytest.raises(portalocker.LockException), lock2:
- pass
-
- with lock2:
- pass
-
-
-def test_acquire_release(tmpfile):
- lock = portalocker.Lock(tmpfile)
- lock2 = portalocker.Lock(tmpfile, fail_when_locked=False)
-
- lock.acquire() # acquire lock when nobody is using it
- with pytest.raises(portalocker.LockException):
- # another party should not be able to acquire the lock
- lock2.acquire(timeout=0.01)
-
- # re-acquire a held lock is a no-op
- lock.acquire()
-
- lock.release() # release the lock
- lock.release() # second release does nothing
-
-
-def test_rlock_acquire_release_count(tmpfile):
- lock = portalocker.RLock(tmpfile)
- # Twice acquire
- h = lock.acquire()
- assert not h.closed
- lock.acquire()
- assert not h.closed
-
- # Two release
- lock.release()
- assert not h.closed
- lock.release()
- assert h.closed
-
-
-def test_rlock_acquire_release(tmpfile):
- lock = portalocker.RLock(tmpfile)
- lock2 = portalocker.RLock(tmpfile, fail_when_locked=False)
-
- lock.acquire() # acquire lock when nobody is using it
- with pytest.raises(portalocker.LockException):
- # another party should not be able to acquire the lock
- lock2.acquire(timeout=0.01)
-
- # Now acquire again
- lock.acquire()
-
- lock.release() # release the lock
- lock.release() # second release does nothing
-
-
-def test_release_unacquired(tmpfile):
- with pytest.raises(portalocker.LockException):
- portalocker.RLock(tmpfile).release()
-
-
-def test_exlusive(tmpfile):
- text_0 = 'spam and eggs'
- with open(tmpfile, 'w') as fh:
- fh.write(text_0)
-
- with open(tmpfile) as fh:
- portalocker.lock(fh, portalocker.LOCK_EX | portalocker.LOCK_NB)
-
- # Make sure we can't read the locked file
- with pytest.raises(portalocker.LockException), open(
- tmpfile,
- 'r+',
- ) as fh2:
- portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB)
- assert fh2.read() == text_0
-
- # Make sure we can't write the locked file
- with pytest.raises(portalocker.LockException), open(
- tmpfile,
- 'w+',
- ) as fh2:
- portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB)
- fh2.write('surprise and fear')
-
- # Make sure we can explicitly unlock the file
- portalocker.unlock(fh)
-
-
-def test_shared(tmpfile):
- with open(tmpfile, 'w') as fh:
- fh.write('spam and eggs')
-
- with open(tmpfile) as f:
- portalocker.lock(f, portalocker.LOCK_SH | portalocker.LOCK_NB)
-
- # Make sure we can read the locked file
- with open(tmpfile) as fh2:
- portalocker.lock(fh2, portalocker.LOCK_SH | portalocker.LOCK_NB)
- assert fh2.read() == 'spam and eggs'
-
- # Make sure we can't write the locked file
- with pytest.raises(portalocker.LockException), open(
- tmpfile,
- 'w+',
- ) as fh2:
- portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB)
- fh2.write('surprise and fear')
-
- # Make sure we can explicitly unlock the file
- portalocker.unlock(f)
-
-
-@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
-def test_blocking_timeout(tmpfile, locker):
- flags = LockFlags.SHARED
-
- with pytest.warns(UserWarning):
- with portalocker.Lock(tmpfile, 'a+', timeout=5, flags=flags):
- pass
-
- lock = portalocker.Lock(tmpfile, 'a+', flags=flags)
- with pytest.warns(UserWarning):
- lock.acquire(timeout=5)
-
-
-@pytest.mark.skipif(
- os.name == 'nt',
- reason='Windows uses an entirely different lockmechanism',
-)
-@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
-def test_nonblocking(tmpfile, locker):
- with open(tmpfile, 'w') as fh, pytest.raises(RuntimeError):
- portalocker.lock(fh, LockFlags.NON_BLOCKING)
-
-
-def shared_lock(filename, **kwargs):
- with portalocker.Lock(
- filename,
- timeout=0.1,
- fail_when_locked=False,
- flags=LockFlags.SHARED | LockFlags.NON_BLOCKING,
- ):
- time.sleep(0.2)
- return True
-
-
-def shared_lock_fail(filename, **kwargs):
- with portalocker.Lock(
- filename,
- timeout=0.1,
- fail_when_locked=True,
- flags=LockFlags.SHARED | LockFlags.NON_BLOCKING,
- ):
- time.sleep(0.2)
- return True
-
-
-def exclusive_lock(filename, **kwargs):
- with portalocker.Lock(
- filename,
- timeout=0.1,
- fail_when_locked=False,
- flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING,
- ):
- time.sleep(0.2)
- return True
-
-
-@dataclasses.dataclass(order=True)
-class LockResult:
- exception_class: typing.Union[typing.Type[Exception], None] = None
- exception_message: typing.Union[str, None] = None
- exception_repr: typing.Union[str, None] = None
-
-
-def lock(
- filename: str,
- fail_when_locked: bool,
- flags: LockFlags,
- timeout=0.1,
- keep_locked=0.05,
-) -> LockResult:
- # Returns a case of True, False or FileNotFound
- # https://thedailywtf.com/articles/what_is_truth_0x3f_
- # But seriously, the exception properties cannot be safely pickled so we
- # only return string representations of the exception properties
- try:
- with portalocker.Lock(
- filename,
- timeout=timeout,
- fail_when_locked=fail_when_locked,
- flags=flags,
- ):
- time.sleep(keep_locked)
- return LockResult()
-
- except Exception as exception:
- # The exceptions cannot be pickled so we cannot return them through
- # multiprocessing
- return LockResult(
- type(exception),
- str(exception),
- repr(exception),
- )
-
-
-@pytest.mark.parametrize('fail_when_locked', [True, False])
-def test_shared_processes(tmpfile, fail_when_locked):
- flags = LockFlags.SHARED | LockFlags.NON_BLOCKING
- print()
- print(f'{fail_when_locked=}, {flags=}, {os.name=}, {LOCKERS=}')
-
- with multiprocessing.Pool(processes=2) as pool:
- args = tmpfile, fail_when_locked, flags
- results = pool.starmap_async(lock, 2 * [args])
-
- # sourcery skip: no-loop-in-tests
- for result in results.get(timeout=1.0):
- print(f'{result=}')
- # sourcery skip: no-conditionals-in-tests
- if result.exception_class is not None:
- raise result.exception_class
- assert result == LockResult()
-
-
-@pytest.mark.parametrize('fail_when_locked', [True, False])
-@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
-def test_exclusive_processes(tmpfile: str, fail_when_locked: bool, locker):
- flags = LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING
-
- print('Locking', tmpfile, fail_when_locked, locker)
- with multiprocessing.Pool(processes=2) as pool:
- # Submit tasks individually
- result_a = pool.apply_async(lock, [tmpfile, fail_when_locked, flags])
- result_b = pool.apply_async(lock, [tmpfile, fail_when_locked, flags])
-
- try:
- a = result_a.get(timeout=1.1) # Wait for 'a' with timeout
- except multiprocessing.TimeoutError:
- a = None
-
- try:
- # Lower timeout since we already waited with `a`
- b = result_b.get(timeout=0.2) # Wait for 'b' with timeout
- except multiprocessing.TimeoutError:
- b = None
-
- assert a or b
- # Make sure a is always filled
- if b:
- b, a = b, a
-
- print(f'{a=}')
- print(f'{b=}')
-
- # make pyright happy
- assert a is not None
-
- if b:
- # make pyright happy
- assert b is not None
-
- assert not a.exception_class or not b.exception_class
- assert issubclass(
- a.exception_class or b.exception_class, # type: ignore
- portalocker.LockException,
- )
- else:
- assert not a.exception_class
-
-
-@pytest.mark.skipif(
- os.name == 'nt',
- reason='Locking on Windows requires a file object',
-)
-@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
-def test_lock_fileno(tmpfile, locker):
- with open(tmpfile, 'a+') as a:
- with open(tmpfile, 'a+') as b:
- # Lock shared non-blocking
- flags = LockFlags.SHARED | LockFlags.NON_BLOCKING
-
- # First lock file a
- portalocker.lock(a, flags)
-
- # Now see if we can lock using fileno()
- portalocker.lock(b.fileno(), flags)
-
-
-@pytest.mark.skipif(
- os.name != 'posix',
- reason='Only posix systems have different lockf behaviour',
-)
-@pytest.mark.parametrize('locker', LOCKERS, indirect=True)
-def test_locker_mechanism(tmpfile, locker):
- '''Can we switch the locking mechanism?'''
- # We can test for flock vs lockf based on their different behaviour re.
- # locking the same file.
- with portalocker.Lock(tmpfile, 'a+', flags=LockFlags.EXCLUSIVE):
- # If we have lockf(), we cannot get another lock on the same file.
- if locker is fcntl.lockf:
- portalocker.Lock(
- tmpfile,
- 'r+',
- flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING,
- ).acquire(timeout=0.1)
- # But with other lock methods we can't
- else:
- with pytest.raises(portalocker.LockException):
- portalocker.Lock(
- tmpfile,
- 'r+',
- flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING,
- ).acquire(timeout=0.1)
-
-
-def test_exception(monkeypatch, tmpfile):
- '''Do we stop immediately if the locking fails, even with a timeout?'''
-
- def patched_lock(*args, **kwargs):
- raise ValueError('Test exception')
-
- monkeypatch.setattr('portalocker.utils.portalocker.lock', patched_lock)
- lock = portalocker.Lock(tmpfile, 'w', timeout=math.inf)
-
- with pytest.raises(exceptions.LockException):
- lock.acquire()
diff --git a/pyproject.toml b/pyproject.toml
index 0c61e2b..2f937ec 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,25 +4,19 @@ requires = ['setuptools', 'setuptools-scm']
[project]
name = 'portalocker'
-dynamic = ['version']
-authors = [{name = 'Rick van Hattem', email = 'wolph@wol.ph'}]
-license = {text = 'BSD-3-Clause'}
+version = "3.3.0"
+dynamic = []
+authors = [{ name = 'Rick van Hattem', email = 'wolph@wol.ph' }]
+license = 'BSD-3-Clause'
+license-files = ['LICENSE']
description = 'Wraps the portalocker recipe for easy usage'
-keywords = [
- 'locking',
- 'locks',
- 'with',
- 'statement',
- 'windows',
- 'linux',
- 'unix',
-]
+keywords = ['locking', 'locks', 'with', 'statement', 'windows', 'linux', 'unix']
readme = 'README.rst'
+
classifiers = [
'Development Status :: 5 - Production/Stable',
'Development Status :: 6 - Mature',
'Intended Audience :: Developers',
- 'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: MacOS',
@@ -37,11 +31,11 @@ classifiers = [
'Operating System :: Unix',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
- 'Programming Language :: Python :: 3.8',
- 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.13',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: IronPython',
'Programming Language :: Python :: Implementation :: PyPy',
@@ -53,9 +47,12 @@ classifiers = [
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Monitoring',
+ 'Typing :: Typed',
+]
+requires-python = '>=3.9'
+dependencies = [
+ 'pywin32>=226; platform_system == "Windows"',
]
-requires-python = '>=3.8'
-dependencies = ['pywin32>=226; platform_system == "Windows"']
[project.urls]
bugs = 'https://github.com/wolph/portalocker/issues'
@@ -63,38 +60,147 @@ documentation = 'https://portalocker.readthedocs.io/en/latest/'
repository = 'https://github.com/wolph/portalocker/'
[project.optional-dependencies]
-docs = ['sphinx>=1.7.1']
+docs = ['portalocker[tests]']
tests = [
- 'pytest>=5.4.1',
+ 'coverage-conditional-plugin>=0.9.0',
+ 'portalocker[redis]',
'pytest-cov>=2.8.1',
+ 'pytest-mypy>=0.8.0',
+ 'pytest-rerunfailures>=15.0',
'pytest-timeout>=2.1.0',
+ 'pytest>=5.4.1',
'sphinx>=6.0.0',
- 'pytest-mypy>=0.8.0',
+ 'types-pywin32>=310.0.0.20250429',
'types-redis',
- 'redis',
]
redis = ['redis']
+win32 = ['pywin32>=226']
[tool.setuptools]
platforms = ['any']
include-package-data = false
-[tool.setuptools.dynamic]
-version = { attr = 'portalocker.__about__.__version__' }
-
[tool.setuptools.packages.find]
include = ['portalocker']
[tool.setuptools.package-data]
portalocker = ['py.typed', 'msvcrt.pyi']
-[tool.black]
-line-length = 79
-skip-string-normalization = true
-
[tool.codespell]
skip = '*/htmlcov,./docs/_build,*.asc'
[tool.pyright]
+pythonVersion = '3.9'
include = ['portalocker', 'portalocker_tests']
exclude = ['dist/*']
+strict = ['portalocker']
+
+[tool.mypy]
+python_version = '3.9'
+strict = true
+warn_return_any = true
+warn_unused_configs = true
+warn_unused_ignores = false
+packages = ['portalocker', 'portalocker_tests']
+ignore_missing_imports = true
+check_untyped_defs = true
+exclude = [
+ 'dist',
+ 'docs',
+ '.venv',
+ 'venv',
+ 'build',
+]
+enable_error_code = ['ignore-without-code', 'truthy-bool', 'redundant-expr']
+warn_unreachable = true
+
+[[tool.mypy.overrides]]
+module = ['portalocker_tests.*']
+disallow_untyped_defs = false
+
+[dependency-groups]
+dev = [
+ 'lefthook>=1.11.13',
+ 'mypy>=1.15.0',
+ 'portalocker[tests]',
+ 'pyright>=1.1.401',
+ 'ruff>=0.11.11',
+ 'tox>=4.26.0',
+]
+
+[tool.ruff]
+src = ['portalocker', 'portalocker_tests']
+include = ['portalocker/**/*.py', 'portalocker_tests/**/*.py']
+
+[tool.repo-review]
+ignore = [
+ 'PC111', # no blacken-docs because markdown has no code
+ 'PC140', # manual typecheck pre-commit hooks
+ 'PC170', # no pygrep-hooks because no rST
+ 'PY005', # Tests folder is not named tests
+ 'PY006', # pre-commit should not be used
+ 'PY007', # tox configured in tox.toml
+ 'RTD', # no RTD
+]
+
+[tool.coverage.run]
+plugins = ['coverage_conditional_plugin']
+branch = true
+source = ['portalocker', 'portalocker_tests']
+
+[tool.coverage.report]
+ignore_errors = true
+fail_under = 100
+exclude_also = [
+ 'def __repr__',
+ 'if self.debug:',
+ 'if settings.DEBUG',
+ 'if typing.TYPE_CHECKING',
+ 'raise AssertionError',
+ 'raise NotImplementedError',
+ 'if 0:',
+ 'if __name__ == .__main__.:',
+ 'typing.Protocol',
+]
+omit = [
+ 'portalocker/redis.py',
+]
+
+[tool.coverage.coverage_conditional_plugin.rules]
+nt = 'os_name == "nt"'
+not-nt = 'os_name != "nt"'
+posix = 'os_name == "posix"'
+not-posix = 'os_name != "posix"'
+
+[tool.pytest.ini_options]
+minversion = 8.3
+log_cli_level = 'INFO'
+xfail_strict = true
+filterwarnings = [
+ 'error',
+ 'ignore::DeprecationWarning',
+ 'ignore::PendingDeprecationWarning',
+ 'ignore::ImportWarning',
+ 'ignore::ResourceWarning',
+]
+testpaths = [
+ 'portalocker',
+ 'portalocker_tests',
+]
+
+python_files = [
+ 'portalocker_tests/*.py',
+]
+
+addopts = '''
+ -ra
+ --strict-markers
+ --strict-config
+ --ignore setup.py
+ --ignore portalocker/_*.py
+ --doctest-modules
+ --cov portalocker
+ --cov-report term-missing
+'''
+
+timeout = 20
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100644
index 64f7177..0000000
--- a/pytest.ini
+++ /dev/null
@@ -1,13 +0,0 @@
-[pytest]
-python_files =
- portalocker_tests/*.py
-
-addopts =
- --ignore setup.py
- --ignore portalocker/_*.py
- --doctest-modules
- --cov portalocker
- --cov-report term-missing
- --cov-report html
-
-timeout = 20
diff --git a/ruff.toml b/ruff.toml
index e4180ff..8b56bee 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,12 +1,16 @@
# We keep the ruff configuration separate so it can easily be shared across
# all projects
+target-version = 'py39'
-target-version = 'py38'
-
-src = ['portalocker']
-exclude = ['docs']
+exclude = [
+ 'docs',
+ '.tox',
+ # Ignore local test files/directories/old-stuff
+ 'test.py',
+ '*_old.py',
+]
-line-length = 80
+line-length = 79
[lint]
ignore = [
@@ -25,7 +29,12 @@ ignore = [
'C408', # Unnecessary {obj_type} call (rewrite as a literal)
'SIM114', # Combine `if` branches using logical `or` operator
'RET506', # Unnecessary `else` after `raise` statement
+ 'Q001', # Remove bad quotes
+ 'Q002', # Remove bad quotes
'FA100', # Missing `from __future__ import annotations`, but uses `typing.Optional`
+ 'COM812', # Missing trailing comma in a list
+ 'SIM108', # Ternary operators are not always more readable
+ 'RUF100', # Unused noqa directives. Due to multiple Python versions, we need to keep them
]
select = [
@@ -45,7 +54,6 @@ select = [
'I', # isort
'ICN', # flake8-import-conventions
'INP', # flake8-no-pep420
- 'ISC', # flake8-implicit-str-concat
'N', # pep8-naming
'NPY', # NumPy-specific rules
'PERF', # perflint,
@@ -66,7 +74,10 @@ select = [
[lint.pydocstyle]
convention = 'google'
-ignore-decorators = ['typing.overload']
+ignore-decorators = [
+ 'typing.overload',
+ 'typing.override',
+]
[lint.isort]
case-sensitive = true
@@ -77,3 +88,20 @@ force-wrap-aliases = true
docstring-quotes = 'single'
inline-quotes = 'single'
multiline-quotes = 'single'
+
+[format]
+line-ending = 'lf'
+indent-style = 'space'
+quote-style = 'single'
+docstring-code-format = true
+skip-magic-trailing-comma = false
+exclude = [
+ '__init__.py',
+]
+
+[lint.pycodestyle]
+max-line-length = 79
+
+[lint.flake8-pytest-style]
+mark-parentheses = true
+
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 29bf765..0000000
--- a/tox.ini
+++ /dev/null
@@ -1,84 +0,0 @@
-[tox]
-envlist =
- py38
- py39
- py310
- py311
- py312
- pypy3
- flake8
- docs
- mypy
- pyright
- ruff
- codespell
- black
-
-skip_missing_interpreters = True
-
-[testenv]
-pass_env =
- FORCE_COLOR
-basepython =
- py38: python3.8
- py39: python3.9
- py310: python3.10
- py311: python3.11
- py312: python3.12
- pypy3: pypy3
-
-deps = -e{toxinidir}[tests,redis]
-commands = python -m pytest {posargs}
-
-[testenv:mypy]
-basepython = python3
-deps = mypy
-commands =
- mypy --install-types --non-interactive
- mypy
-
-[testenv:pyright]
-changedir =
-basepython = python3
-deps =
- pyright
- -e{toxinidir}[tests,redis]
-commands = pyright {toxinidir}/portalocker {toxinidir}/portalocker_tests
-
-[testenv:flake8]
-basepython = python3
-deps = flake8>=6.0.0
-commands = flake8 {toxinidir}/portalocker {toxinidir}/portalocker_tests
-
-[testenv:black]
-basepython = python3
-deps = black
-commands = black {toxinidir}/portalocker {toxinidir}/portalocker_tests
-
-[testenv:docs]
-basepython = python3
-deps = -r{toxinidir}/docs/requirements.txt
-allowlist_externals =
- rm
- mkdir
-whitelist_externals =
- rm
- cd
- mkdir
-commands =
- rm -f docs/modules.rst
- mkdir -p docs/_static
- sphinx-apidoc -e -o docs/ portalocker
- rm -f docs/modules.rst
- sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html {posargs}
-
-[testenv:ruff]
-commands = ruff check {toxinidir}/portalocker {toxinidir}/portalocker_tests
-deps = ruff
-skip_install = true
-
-[testenv:codespell]
-commands = codespell .
-deps = codespell
-skip_install = true
-command = codespell
diff --git a/tox.toml b/tox.toml
new file mode 100644
index 0000000..5463610
--- /dev/null
+++ b/tox.toml
@@ -0,0 +1,98 @@
+min_version = '4'
+requires = ['tox-uv>=1']
+
+env_list = [
+ 'py39',
+ 'py310',
+ 'py311',
+ 'py312',
+ 'pypy3',
+ 'docs',
+ 'mypy',
+ 'pyright',
+ 'ruff',
+ 'repo-review',
+ 'codespell',
+]
+skip_missing_interpreters = true
+
+[env_run_base]
+labels = ['python']
+pass_env = ['FORCE_COLOR']
+commands = [
+ [
+ 'mypy',
+ '--cache-dir=/dev/null',
+ '--soft-error-limit=-1',
+ '.',
+ ],
+ ['pyright'],
+ ['pytest', '{posargs}'],
+]
+allowlist_externals = ['pytest', 'mypy', 'pyright']
+extras = ['tests', 'redis']
+
+[env.mypy]
+labels = ['lint', 'nontest']
+commands = [['mypy']]
+
+[env.pyright]
+labels = ['lint', 'nontest']
+deps = ['pyright']
+commands = [['pyright']]
+
+[env.ruff]
+labels = ['lint', 'nontest']
+deps = ['ruff']
+commands = [['ruff', 'check'], ['ruff', 'format', '--check']]
+
+[env.docs]
+labels = ['docs', 'nontest']
+extras = ['docs']
+allowlist_externals = ['rm', 'cd', 'mkdir']
+commands = [
+ [
+ 'rm',
+ '-f',
+ 'docs/modules.rst',
+ ],
+ [
+ 'mkdir',
+ '-p',
+ 'docs/_static',
+ ],
+ [
+ 'sphinx-apidoc',
+ '-e',
+ '-o',
+ 'docs/',
+ 'portalocker',
+ ],
+ [
+ 'rm',
+ '-f',
+ 'docs/modules.rst',
+ ],
+ [
+ 'sphinx-build',
+ '-b',
+ 'html',
+ '-d',
+ 'docs/_build/doctrees',
+ 'docs',
+ 'docs/_build/html',
+ ],
+]
+
+[env.repo-review]
+labels = ['lint', 'nontest']
+basepython = ['py313']
+deps = ['sp-repo-review[cli]', 'validate-pyproject', 'tomli', 'packaging']
+commands = [['repo-review']]
+
+[env.codespell]
+labels = ['lint', 'nontest']
+commands = [['codespell']]
+deps = ['codespell', 'tomli']
+skip_install = true
+command = 'codespell'