diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..eb144a1 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,133 @@ +name: CI + +on: + push: + branches: [master] + tags: + - 'v*' + pull_request: + +jobs: + + pre-commit: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v5 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.14" + - name: Installation (deps and package) + run: pip install . + - uses: pre-commit/action@v3.0.1 + + tests: + runs-on: ${{ matrix.os }} + strategy: + matrix: + python-version: ['3.10', 3.11, 3.12, 3.13, 3.14] + os: [ubuntu-latest, windows-latest] + + steps: + - uses: actions/checkout@v5 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v6 + with: + python-version: ${{ matrix.python-version }} + + - name: Installation (deps and package) + # we install with flit --pth-file, + # so that coverage will be recorded for the module + run: | + pip install flit + flit install --deps=production --extras=test --pth-file + + - name: Run pytest + run: | + pytest --cov=mdformat_pyproject --cov-report=xml --cov-report=term-missing + + - name: Store PR number and commit SHA + if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.14 + run: | + echo "Storing PR number ${{ github.event.number }}" + echo "${{ github.event.number }}" > pr_number.txt + + echo "Storing commit SHA ${{ github.event.pull_request.head.sha }}" + echo "${{ github.event.pull_request.head.sha }}" > commit_sha.txt + + # Workaround for codecov tokenless upload errors on external PRs + # Copied and ajusted from the workarounds suggested in the link below: + # https://github.com/codecov/feedback/issues/301#issuecomment-2009355183 + # Triggered sub-workflow is not able to detect the original commit/PR which is available + # in this workflow. + - name: Store PR number + if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.14 + uses: actions/upload-artifact@v4 + with: + name: pr_number + path: pr_number.txt + + - name: Store commit SHA + if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.14 + uses: actions/upload-artifact@v4 + with: + name: commit_sha + path: commit_sha.txt + + # This stores the coverage report in artifacts. The actual upload to Codecov + # is executed by a different workflow `coverage-report.yml`. The reason for this + # split is because `on.pull_request` workflows don't have access to secrets. + - name: Store coverage report in artifacts + if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.14 + uses: actions/upload-artifact@v4 + with: + name: codecov_report + path: ./coverage.xml + + - run: | + echo "The coverage report was stored in Github artifacts." + echo "It will be uploaded to Codecov using [codecov.yml] workflow shortly." + if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.14 + + pre-commit-hook: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v5 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: 3.14 + + - name: Installation (deps and package) + run: | + pip install pre-commit + pip install . + + - name: run pre-commit with plugin + run: | + pre-commit run --config .pre-commit-test.yaml --all-files --verbose --show-diff-on-failure + + publish: + name: Publish to PyPi + needs: [pre-commit, tests, pre-commit-hook] + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v5 + - name: Set up Python 3.14 + uses: actions/setup-python@v6 + with: + python-version: 3.14 + - name: install flit + run: | + pip install flit~=3.0 + - name: Build and publish + run: | + flit publish + env: + FLIT_USERNAME: __token__ + FLIT_PASSWORD: ${{ secrets.PYPI_KEY }} diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml new file mode 100644 index 0000000..942149b --- /dev/null +++ b/.github/workflows/codecov.yml @@ -0,0 +1,112 @@ +name: CodeCov Report Upload + +on: + # This workflow is triggered after every successfull execution + # of `ci` workflow. + workflow_run: + workflows: ["CI"] + types: + - completed + +jobs: + coverage: + name: CodeCov Report Upload + runs-on: ubuntu-latest + steps: + + - name: 'Download existing coverage report' + id: prepare_report + uses: actions/github-script@v8 + with: + script: | + var fs = require('fs'); + + // List artifacts of the workflow run that triggered this workflow + var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.payload.workflow_run.id, + }); + + let codecovReport = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "codecov_report"; + }); + + if (codecovReport.length != 1) { + throw new Error("Unexpected number of {codecov_report} artifacts: " + codecovReport.length); + } + + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: codecovReport[0].id, + archive_format: 'zip', + }); + fs.writeFileSync('codecov_report.zip', Buffer.from(download.data)); + + let prNumber = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr_number"; + }); + + if (prNumber.length != 1) { + throw new Error("Unexpected number of {pr_number} artifacts: " + prNumber.length); + } + + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: prNumber[0].id, + archive_format: 'zip', + }); + fs.writeFileSync('pr_number.zip', Buffer.from(download.data)); + + let commitSha = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "commit_sha"; + }); + + if (commitSha.length != 1) { + throw new Error("Unexpected number of {commit_sha} artifacts: " + commitSha.length); + } + + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: commitSha[0].id, + archive_format: 'zip', + }); + fs.writeFileSync('commit_sha.zip', Buffer.from(download.data)); + + - id: parse_previous_artifacts + run: | + unzip codecov_report.zip + unzip pr_number.zip + unzip commit_sha.zip + + echo "Detected PR is: $(> "$GITHUB_OUTPUT" + echo "override_commit=$(> "$GITHUB_OUTPUT" + + - name: Checkout repository + uses: actions/checkout@v5 + with: + ref: ${{ steps.parse_previous_artifacts.outputs.override_commit || '' }} + path: repo_root + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ${{ github.workspace }}/coverage.xml + fail_ci_if_error: true + # Manual overrides for these parameters are needed because automatic detection + # in codecov-action does not work for non-`pull_request` workflows. + # In `main` branch push, these default to empty strings since we want to run + # the analysis on HEAD. + override_commit: ${{ steps.parse_previous_artifacts.outputs.override_commit || '' }} + override_pr: ${{ steps.parse_previous_artifacts.outputs.override_pr || '' }} + working-directory: ${{ github.workspace }}/repo_root + # Location where coverage report files are searched for + directory: ${{ github.workspace }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index fd5aca2..0000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,100 +0,0 @@ -name: CI - -on: - push: - branches: [master] - tags: - - 'v*' - pull_request: - -jobs: - - pre-commit: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - name: Installation (deps and package) - run: pip install . - - uses: pre-commit/action@v2.0.0 - - tests: - runs-on: ${{ matrix.os }} - strategy: - matrix: - python-version: [3.7, 3.8, 3.9, '3.10', 3.11] - os: [ubuntu-latest, windows-latest] - - steps: - - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Installation (deps and package) - # we install with flit --pth-file, - # so that coverage will be recorded for the module - run: | - pip install flit - flit install --deps=production --extras=test --pth-file - - - name: Run pytest - run: | - pytest --cov=mdformat_pyproject --cov-report=xml --cov-report=term-missing - - - name: Upload to Codecov - if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.11 - uses: codecov/codecov-action@v5 - with: - fail_ci_if_error: true - files: ./coverage.xml - flags: pytests - name: pytests-py3.11 - token: ${{ secrets.CODECOV_TOKEN }} - - pre-commit-hook: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - - name: Installation (deps and package) - run: | - pip install pre-commit - pip install . - - - name: run pre-commit with plugin - run: | - pre-commit run --config .pre-commit-test.yaml --all-files --verbose --show-diff-on-failure - - publish: - name: Publish to PyPi - needs: [pre-commit, tests, pre-commit-hook] - if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v3 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - name: install flit - run: | - pip install flit~=3.0 - - name: Build and publish - run: | - flit publish - env: - FLIT_USERNAME: __token__ - FLIT_PASSWORD: ${{ secrets.PYPI_KEY }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a1b15e7..620deb7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v6.0.0 hooks: - id: end-of-file-fixer - id: mixed-line-ending @@ -8,19 +8,19 @@ repos: - id: check-yaml - id: check-toml - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.9.0 + rev: v1.10.0 hooks: - id: python-check-blanket-noqa - repo: https://github.com/timothycrosley/isort - rev: 5.13.2 + rev: 7.0.0 hooks: - id: isort - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 25.9.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 6.0.0 + rev: 7.3.0 hooks: - id: flake8 additional_dependencies: @@ -28,10 +28,9 @@ repos: - flake8-bugbear - flake8-builtins - flake8-comprehensions -- repo: local +- repo: https://github.com/hukkin/mdformat + rev: 0.7.21 hooks: - id: mdformat - name: mdformat - entry: mdformat - language: python - types: [markdown] + additional_dependencies: + - mdformat-pyproject diff --git a/README.md b/README.md index d33aa12..aea8006 100644 --- a/README.md +++ b/README.md @@ -36,15 +36,15 @@ Add the following to your `.pre-commit-config.yaml`: ```yaml - repo: https://github.com/executablebooks/mdformat - rev: 0.7.13 # Use the ref you want to point at + rev: 1.0.0 # Use the ref you want to point at hooks: - id: mdformat additional_dependencies: - mdformat-pyproject ``` -[ci-badge]: https://github.com/csala/mdformat-pyproject/actions/workflows/tests.yml/badge.svg -[ci-link]: https://github.com/csala/mdformat-pyproject/actions/workflows/tests.yml +[ci-badge]: https://github.com/csala/mdformat-pyproject/actions/workflows/ci.yml/badge.svg +[ci-link]: https://github.com/csala/mdformat-pyproject/actions/workflows/ci.yml [cov-badge]: https://codecov.io/gh/csala/mdformat-pyproject/branch/master/graph/badge.svg [cov-link]: https://codecov.io/gh/csala/mdformat-pyproject [mdformat]: https://github.com/executablebooks/mdformat diff --git a/mdformat_pyproject/plugin.py b/mdformat_pyproject/plugin.py index 7d11664..5ad664c 100644 --- a/mdformat_pyproject/plugin.py +++ b/mdformat_pyproject/plugin.py @@ -1,45 +1,37 @@ """Main plugin module.""" -import pathlib +from __future__ import annotations + import sys -from typing import TYPE_CHECKING, MutableMapping, Optional, Sequence, Union +from functools import cache +from pathlib import Path import markdown_it import mdformat +TYPE_CHECKING = False if TYPE_CHECKING: + from collections.abc import Sequence + from mdformat.renderer.typing import Render + _ConfigOptions = dict[str, int | str | Sequence[str]] + if sys.version_info >= (3, 11): import tomllib else: import tomli as tomllib -if sys.version_info >= (3, 9): - from functools import cache -else: - from functools import lru_cache - - cache = lru_cache() - - -_ConfigOptions = MutableMapping[str, Union[int, str, Sequence[str]]] - @cache -def _find_pyproject_toml_path(search_path: str) -> Optional[pathlib.Path]: - """Find the pyproject.toml file that corresponds to the search path. +def _find_pyproject_toml_path(search_path: Path) -> Path | None: + """Find the pyproject.toml file that applies to the search path. The search is done ascending through the folders tree until a pyproject.toml file is found in the same folder. If the root '/' is reached, None is returned. - - The special path "-" used for stdin inputs is replaced with the current working - directory. """ - if search_path == "-": - search_path = pathlib.Path.cwd() - else: - search_path = pathlib.Path(search_path).resolve() + if search_path.is_file(): + search_path = search_path.parent for parent in (search_path, *search_path.parents): candidate = parent / "pyproject.toml" @@ -50,13 +42,15 @@ def _find_pyproject_toml_path(search_path: str) -> Optional[pathlib.Path]: @cache -def _parse_pyproject(pyproject_path: pathlib.Path) -> Optional[_ConfigOptions]: +def _parse_pyproject(pyproject_path: Path) -> _ConfigOptions | None: """Extract and validate the mdformat options from the pyproject.toml file. The options are searched inside a [tool.mdformat] key within the toml file, and they are validated using the default functions from `mdformat._conf`. """ - content = tomllib.loads(pyproject_path.read_text()) + with pyproject_path.open(mode="rb") as pyproject_file: + content = tomllib.load(pyproject_file) + options = content.get("tool", {}).get("mdformat") if options is not None: mdformat._conf._validate_keys(options, pyproject_path) @@ -66,50 +60,26 @@ def _parse_pyproject(pyproject_path: pathlib.Path) -> Optional[_ConfigOptions]: @cache -def _reload_cli_opts() -> _ConfigOptions: - """Re-parse the sys.argv array to deduce which arguments were used in the CLI. - - If unknown arguments are found, we deduce that mdformat is being used as a - python library and therefore no mdformat command line arguments were passed. +def read_toml_opts(conf_dir: Path) -> tuple[dict, Path | None]: + """Alternative read_toml_opts that reads from pyproject.toml instead of .mdformat.toml. - Notice that the strategy above does not fully close the door to situations - with colliding arguments with different meanings, but the rarity of the - situation and the complexity of a possible solution makes the risk worth taking. + Notice that if `.mdformat.toml` exists it is ignored. """ - import mdformat._cli - - if hasattr(mdformat.plugins, "_PARSER_EXTENSION_DISTS"): - # New API, mdformat>=0.7.19 - arg_parser = mdformat._cli.make_arg_parser( - mdformat.plugins._PARSER_EXTENSION_DISTS, - mdformat.plugins._CODEFORMATTER_DISTS, - mdformat.plugins.PARSER_EXTENSIONS, - ) + pyproject_path = _find_pyproject_toml_path(conf_dir) + if pyproject_path: + pyproject_opts = _parse_pyproject(pyproject_path) else: - # Backwards compatibility, mdformat<0.7.19 - arg_parser = mdformat._cli.make_arg_parser( - mdformat.plugins.PARSER_EXTENSIONS, - mdformat.plugins.CODEFORMATTERS, - ) + pyproject_opts = {} - args, unknown = arg_parser.parse_known_args(sys.argv[1:]) - if unknown: - return {} - - return {key: value for key, value in vars(args).items() if value is not None} + return pyproject_opts, pyproject_path def update_mdit(mdit: markdown_it.MarkdownIt) -> None: - """Read the pyproject.toml file and re-create the mdformat options.""" - mdformat_options: _ConfigOptions = mdit.options["mdformat"] - file_path = mdformat_options.get("filename", "-") - pyproject_path = _find_pyproject_toml_path(file_path) - if pyproject_path: - pyproject_opts = _parse_pyproject(pyproject_path) - if pyproject_opts is not None: - cli_opts = _reload_cli_opts() - mdformat_options.update(**pyproject_opts) - mdformat_options.update(**cli_opts) + """No-op, since this plugin only monkey patches and does not modify mdit.""" + pass + +RENDERERS: dict[str, Render] = {} -RENDERERS: MutableMapping[str, "Render"] = {} +# Monkey patch mdformat._conf to use our own read_toml_opts version +mdformat._conf.read_toml_opts = read_toml_opts diff --git a/pyproject.toml b/pyproject.toml index 91c6f55..30b9130 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,16 +15,16 @@ classifiers = [ "Topic :: Software Development :: Libraries :: Python Modules", ] keywords = ["mdformat", "markdown", "markdown-it"] -requires-python = ">=3.7" +requires-python = ">=3.10" dependencies = [ - "mdformat ~=0.7", + "mdformat ~= 1.0.0", "tomli ~=2.0; python_version < '3.11'" ] dynamic = ["version", "description"] [project.optional-dependencies] test = [ - "pytest~=6.0", + "pytest >= 8.4.0", "coverage", "pytest-cov", ] @@ -46,6 +46,7 @@ profile = "black" [tool.mdformat] wrap = 99 number = true +exclude = [".tox/**", ".venv/**"] [tool.coverage.report] exclude_lines = [ diff --git a/tests/test_plugin.py b/tests/test_plugin.py index e1ea5f1..1a2e911 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -6,11 +6,12 @@ import markdown_it import pytest -from mdformat._conf import InvalidConfError from mdformat_pyproject import plugin -THIS_MODULE_PATH = pathlib.Path(__file__).parent +THIS_MODULE_PATH = pathlib.Path(__file__) +THIS_MODULE_PARENT = THIS_MODULE_PATH.parent +PYPROJECT_PATH = THIS_MODULE_PARENT.parent / "pyproject.toml" def setup_function(): @@ -21,29 +22,36 @@ def setup_function(): @pytest.fixture -def fake_filename(): +def nonexistent_path(): fake_parent = "/fake" while pathlib.Path(fake_parent).exists(): fake_parent += "e" - return str(pathlib.Path(fake_parent) / "path" / "to" / "a" / "file.md") + return pathlib.Path(fake_parent) / "path" / "to" / "a" / "file.md" -@unittest.mock.patch("mdformat_pyproject.plugin.pathlib.Path.cwd", lambda: THIS_MODULE_PATH) -def test__find_pyproject_toml_path_cwd(): - """Test _find_pyproject_toml_path when search_path is `-`. +def test__find_pyproject_toml_path_directory_inside_project(): + """Test _find_pyproject_toml_path when search_path points at a directory within the project. - Setup: - - Patch Path.cwd to return the path of this module, to ensure - that the `cwd` points at a subfolder of the project regardless - of where the `pytest` command was executed. Input: - - search_path="-" + - search_path=THIS_MODULE_PATH -> directory is inside the project Expected output: - pyproject.toml of this project. """ - returned = plugin._find_pyproject_toml_path("-") - assert returned == THIS_MODULE_PATH.parent / "pyproject.toml" + returned = plugin._find_pyproject_toml_path(THIS_MODULE_PARENT) + assert returned == PYPROJECT_PATH + + +def test__find_pyproject_toml_path_directory_outside_project(nonexistent_path): + """Test _find_pyproject_toml_path when search_path points at a directory within the project. + + Input: + - search_path=nonexistent_path.parent -> directory is outside the project + Expected output: + - pyproject.toml of this project. + """ + returned = plugin._find_pyproject_toml_path(nonexistent_path.parent) + assert returned is None def test__find_pyproject_toml_path_file_inside_project(): @@ -54,131 +62,80 @@ def test__find_pyproject_toml_path_file_inside_project(): Expected output: - pyproject.toml of this project. """ - returned = plugin._find_pyproject_toml_path(__file__) - assert returned == THIS_MODULE_PATH.parent / "pyproject.toml" + returned = plugin._find_pyproject_toml_path(THIS_MODULE_PATH) + assert returned == PYPROJECT_PATH -def test__find_pyproject_toml_path_file_outside_of_project(fake_filename): +def test__find_pyproject_toml_path_file_outside_of_project(nonexistent_path): """Test _find_pyproject_toml_path when search_path points at a file outside of a project. Input: - - search_path="/fake/folder/path" -> A madeup path to an inexisting folder. + - search_path="/fake/folder/path" -> A madeup path to an nonexistent folder. Expected output: - None """ - returned = plugin._find_pyproject_toml_path(fake_filename) + returned = plugin._find_pyproject_toml_path(nonexistent_path) assert returned is None -def get_mdit(filename, **kwargs): - mdit = unittest.mock.Mock(spec_set=markdown_it.MarkdownIt()) - mdformat_options = { - "check": False, - "end_of_line": "lf", - "filename": str(pathlib.Path(filename).resolve()), - "number": False, - "paths": [filename], - "wrap": 80, - } - mdit.options = {"mdformat": {**mdformat_options, **kwargs}} - return mdit - - -def test_update_mdit_no_config(fake_filename): - """Test update_mdit when there is no pyproject.toml. - - Input: - - mdit with the default opts and a filename located inside a fake folder - Excepted Side Effect: - - mdit options should remain untouched - """ - mdit = get_mdit(fake_filename) - expected_options = copy.deepcopy(mdit.options["mdformat"]) - - plugin.update_mdit(mdit) - - assert mdit.options["mdformat"] == expected_options - - -def test_update_mdit_pyproject(): - """Test update_mdit when there is configuration inside the pyproject.toml file. +def test_read_toml_opts_with_pyproject(): + """Test read_toml_opts when there is a pyproject.toml file. Input: - - mdit with the default opts and a filename located inside the current project. - Excepted Side Effect: - - mdit options should be updated to the pyproject values + - conf_dir pointing to this module's folder + Expected Output: + - Tuple containing: + - Dict with the mdformat options from pyproject.toml + - Path to the pyproject.toml file """ - mdit = get_mdit(__file__) - - plugin.update_mdit(mdit) - - mdformat_options = mdit.options["mdformat"] - assert mdformat_options["wrap"] == 99 - assert mdformat_options["number"] is True - assert mdformat_options["end_of_line"] == "lf" - + # run + opts, path = plugin.read_toml_opts(THIS_MODULE_PATH) -_BROKEN_OPTS = {"tool": {"mdformat": {"invalid": "option"}}} + # assert + assert opts == {"wrap": 99, "number": True, "exclude": [".tox/**", ".venv/**"]} + assert path == PYPROJECT_PATH -@unittest.mock.patch("mdformat_pyproject.plugin.tomllib.loads", lambda _: _BROKEN_OPTS) -def test_update_mdit_invalid_pyproject(): - """Test update_mdit when there are invlid options inside the pyproject.toml file. +def test_read_toml_opts_without_pyproject(nonexistent_path): + """Test read_toml_opts when there is no pyproject.toml file. - Setup: - - Mock tomllib.loads to return an invalid pyproject.toml file. - - Also ensure that the loads cache is clear Input: - - mdit with the default opts and a filename located inside the current project. - Excepted Side Effect: - - _validate_keys should raise an exception. - + - conf_dir pointing to a non-existent folder + Expected Output: + - Tuple containing: + - Empty dict + - None """ - mdit = get_mdit(__file__) + # run + opts, path = plugin.read_toml_opts(nonexistent_path) - with pytest.raises(InvalidConfError): - plugin.update_mdit(mdit) + # assert + assert opts == {} + assert path is None -@unittest.mock.patch("mdformat_pyproject.plugin.sys.argv", ["mdformat", "--wrap", "70", __file__]) -def test_update_mdit_pyproject_and_cli(): - """Test update_mdit when there are conflicting pyproject.toml configuration and cli argumnents. +def test_update_mdit_no_config(): + """Test update_mdit which is now a no-op. - Setup: - - Patch sys.argv to inject cli options different than the pyproject.toml. Input: - - mdit with the default opts and a filename located inside the current project. - Excepted Side Effect: - - mdit options should be updated, with the cli options having priority over the - pyproject ones. + - mdit with arbitrary configuration + Expected Side Effect: + - mdit options should remain untouched """ - mdit = get_mdit(__file__) - expected_options = copy.deepcopy(mdit.options["mdformat"]) - - plugin.update_mdit(mdit) - - expected_options["wrap"] = 70 - expected_options["number"] = True - assert mdit.options["mdformat"] == expected_options - - -@unittest.mock.patch("mdformat_pyproject.plugin.sys.argv", ["fake", "--wrap", "70", "--unknown"]) -def test_update_mdit_unknown_cli_arguments(): - """Test update_mdit when there are unknown arguments passed in the command line. + filename = "/some/file/name.toml" + mdformat_options = { + "check": False, + "end_of_line": "lf", + "filename": filename, + "number": False, + "paths": [filename], + "wrap": 80, + } + mdit = unittest.mock.Mock(spec_set=markdown_it.MarkdownIt()) + mdit.options = {"mdformat": mdformat_options} - Setup: - - Mock sys.argv to inject unknown cli options. - Input: - - mdit with the default opts and a filename located inside the current project. - Excepted Side Effect: - - The CLI arguments are discarded and only the pyproject.toml options are - injected into the mdit options. - """ - mdit = get_mdit(__file__) - expected_options = copy.deepcopy(mdit.options["mdformat"]) + expected_options = copy.deepcopy(mdformat_options) plugin.update_mdit(mdit) - expected_options["wrap"] = 99 # Still from pyproject - expected_options["number"] = True assert mdit.options["mdformat"] == expected_options diff --git a/tox.ini b/tox.ini index be12d45..921ebf4 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,8 @@ [tox] -envlist = py3{7,8,9,10,11}, coverage, pre-commit, hook +envlist = py3{10,11,12,13,14}, coverage, pre-commit, hook isolated_build = True -[testenv:py3{7,8,9,10,11}] +[testenv:py3{10,11,12,13,14}] extras = test commands = pytest {posargs} @@ -19,7 +19,7 @@ extras = dev commands = pre-commit run --config .pre-commit-test.yaml {posargs:--all-files --verbose --show-diff-on-failure} [flake8] -max-line-length = 88 +max-line-length = 99 max-complexity = 10 # These checks violate PEP8 so let's ignore them extend-ignore = E203