diff --git a/.circleci/config.yml b/.circleci/config.yml
index ab70c684e4d..c0780cd047f 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -11,498 +11,514 @@
version: 2.1
+_check_skip: &check_skip
+ name: Check-skip
+ command: |
+ set -e
+ export COMMIT_MESSAGE=$(git log --format=oneline -n 1);
+ if [[ "$CIRCLE_PULL_REQUEST" != "" ]] && ([[ "$COMMIT_MESSAGE" == *"[skip circle]"* ]] || [[ "$COMMIT_MESSAGE" == *"[circle skip]"* ]]); then
+ echo "Skip detected, exiting job ${CIRCLE_JOB} for PR ${CIRCLE_PULL_REQUEST}."
+ circleci-agent step halt;
+ fi
+
jobs:
- build_docs:
- parameters:
- scheduled:
- type: string
- default: "false"
- docker:
- - image: cimg/base:current-22.04
- # medium 2 vCPUs, 4GB mem; medium+ 3vCPUs 6GB mem; large 4 vCPUs 8GB mem
- # https://circleci.com/docs/configuration-reference#resourceclass
- resource_class: medium+
- steps:
- - restore_cache:
- keys:
- - source-cache
- - checkout
- - run:
- name: Complete checkout
- command: |
- set -e
- if ! git remote -v | grep upstream; then
- git remote add upstream https://github.com/mne-tools/mne-python.git
+ build_docs:
+ parameters:
+ scheduled:
+ type: string
+ default: "false"
+ machine:
+ image: ubuntu-2404:current
+ # large 4 vCPUs 15GB mem
+ # https://discuss.circleci.com/t/changes-to-remote-docker-reporting-pricing/47759
+ resource_class: large
+ steps:
+ - restore_cache:
+ keys:
+ - source-cache
+ - checkout:
+ method: full
+ - run:
+ name: Complete checkout
+ command: |
+ set -e
+ if ! git remote -v | grep upstream; then
+ git remote add upstream https://github.com/mne-tools/mne-python.git
+ fi
+ git remote set-url upstream https://github.com/mne-tools/mne-python.git
+ git fetch upstream
+ - save_cache:
+ key: source-cache
+ paths:
+ - ".git"
+ - run:
+ <<: *check_skip
+ - run:
+ name: Merge with upstream and triage run
+ command: |
+ set -e
+ echo $(git log -1 --pretty=%B) | tee gitlog.txt
+ echo ${CI_PULL_REQUEST//*pull\//} | tee merge.txt
+ if [[ $(cat merge.txt) != "" ]]; then
+ echo "Merging $(cat merge.txt)";
+ git pull --ff-only upstream "refs/pull/$(cat merge.txt)/merge";
+ else
+ if [[ "$CIRCLE_BRANCH" == "main" ]]; then
+ KIND=dev
+ else
+ KIND=stable
fi
- git remote set-url upstream https://github.com/mne-tools/mne-python.git
- git fetch upstream
- - save_cache:
- key: source-cache
- paths:
- - ".git"
- - run:
- name: Check-skip
- command: |
- set -e
export COMMIT_MESSAGE=$(git log --format=oneline -n 1);
- if [[ -v CIRCLE_PULL_REQUEST ]] && ([[ "$COMMIT_MESSAGE" == *"[skip circle]"* ]] || [[ "$COMMIT_MESSAGE" == *"[circle skip]"* ]]); then
- echo "Skip detected, exiting job ${CIRCLE_JOB} for PR ${CIRCLE_PULL_REQUEST}."
- circleci-agent step halt;
- fi
- - run:
- name: Merge with upstream and triage run
- command: |
- set -e
- echo $(git log -1 --pretty=%B) | tee gitlog.txt
- echo ${CI_PULL_REQUEST//*pull\//} | tee merge.txt
- if [[ $(cat merge.txt) != "" ]]; then
- echo "Merging $(cat merge.txt)";
- git pull --ff-only upstream "refs/pull/$(cat merge.txt)/merge";
- else
- if [[ "$CIRCLE_BRANCH" == "main" ]]; then
- KIND=dev
+ if [[ "<< parameters.scheduled >>" == "true" ]]; then
+ echo "Scheduled full build detected, checking if it's required."
+ wget https://mne.tools/${KIND}/_version.txt;
+ REMOTE_VERSION=$(cat _version.txt)
+ THIS_VERSION=$(git rev-parse HEAD)
+ echo "Current ${KIND} SHA: ${REMOTE_VERSION}"
+ echo "This ${KIND} SHA: ${THIS_VERSION}"
+ if [[ "${THIS_VERSION}" != "${REMOTE_VERSION}" ]]; then
+ echo "Rebuild required."
else
- KIND=stable
- fi
- export COMMIT_MESSAGE=$(git log --format=oneline -n 1);
- if [[ "<< parameters.scheduled >>" == "true" ]]; then
- echo "Scheduled full build detected, checking if it's required."
- wget https://mne.tools/${KIND}/_version.txt;
- REMOTE_VERSION=$(cat _version.txt)
- THIS_VERSION=$(git rev-parse HEAD)
- echo "Current ${KIND} SHA: ${REMOTE_VERSION}"
- echo "This ${KIND} SHA: ${THIS_VERSION}"
- if [[ "${THIS_VERSION}" != "${REMOTE_VERSION}" ]]; then
- echo "Rebuild required."
- else
- echo "Rebuild skipped."
- circleci-agent step halt;
- fi
- elif [[ "$COMMIT_MESSAGE" == *"[circle deploy]"* ]]; then
- echo "Forced deployed build detected, building and deploying docs";
- else
- echo "Waiting until scheduled run to build ${KIND} docs, exiting job ${CIRCLE_JOB}."
+ echo "Rebuild skipped."
circleci-agent step halt;
fi
+ elif [[ "$COMMIT_MESSAGE" == *"[circle deploy]"* ]]; then
+ echo "Forced deployed build detected, building and deploying docs";
+ else
+ echo "Waiting until scheduled run to build ${KIND} docs, exiting job ${CIRCLE_JOB}."
+ circleci-agent step halt;
fi
+ fi
- - run:
- name: Set BASH_ENV
- command: |
- set -e
- set -o pipefail
- ./tools/setup_xvfb.sh
- sudo apt install -qq graphviz optipng python3.10-venv python3-venv libxft2 ffmpeg
- python3.10 -m venv ~/python_env
- echo "set -e" >> $BASH_ENV
- echo "export OPENBLAS_NUM_THREADS=4" >> $BASH_ENV
- echo "export XDG_RUNTIME_DIR=/tmp/runtime-circleci" >> $BASH_ENV
- echo "export MNE_FULL_DATE=true" >> $BASH_ENV
- source tools/get_minimal_commands.sh
- echo "export MNE_3D_BACKEND=pyvistaqt" >> $BASH_ENV
- echo "export MNE_3D_OPTION_MULTI_SAMPLES=1" >> $BASH_ENV
- echo "export MNE_BROWSER_BACKEND=qt" >> $BASH_ENV
- echo "export MNE_BROWSER_PRECOMPUTE=false" >> $BASH_ENV
- echo "export PATH=~/.local/bin/:$PATH" >> $BASH_ENV
- echo "export DISPLAY=:99" >> $BASH_ENV
- echo "source ~/python_env/bin/activate" >> $BASH_ENV
- mkdir -p ~/.local/bin
- ln -s ~/python_env/bin/python ~/.local/bin/python
- echo "BASH_ENV:"
- cat $BASH_ENV
- mkdir -p ~/mne_data
- touch pattern.txt
- - run:
- name: check neuromag2ft
- command: |
- neuromag2ft --version
+ - run:
+ name: Set BASH_ENV
+ command: ./tools/circleci_bash_env.sh
- - run:
- name: Install fonts needed for diagrams
- command: |
- mkdir -p $HOME/.fonts
- curl https://codeload.github.com/adobe-fonts/source-code-pro/tar.gz/2.038R-ro/1.058R-it/1.018R-VAR | tar xz -C $HOME/.fonts
- curl https://codeload.github.com/adobe-fonts/source-sans-pro/tar.gz/3.028R | tar xz -C $HOME/.fonts
- fc-cache -f
+ - run:
+ name: Install fonts needed for diagrams
+ command: |
+ mkdir -p $HOME/.fonts
+ echo "Source Code Pro"
+ curl https://codeload.github.com/adobe-fonts/source-code-pro/tar.gz/2.038R-ro/1.058R-it/1.018R-VAR | tar xz -C $HOME/.fonts
+ echo "Source Sans Pro"
+ curl https://codeload.github.com/adobe-fonts/source-sans/tar.gz/3.028R | tar xz -C $HOME/.fonts
+ fc-cache -f
- # Load pip cache
- - restore_cache:
- keys:
- - pip-cache
- - restore_cache:
- keys:
- - user-install-bin-cache-310
+ # Load pip cache
+ - restore_cache:
+ keys:
+ - pip-cache-0
+ - restore_cache:
+ keys:
+ - user-install-bin-cache-310
- # Hack in uninstalls of libraries as necessary if pip doesn't do the right thing in upgrading for us...
- - run:
- name: Get Python running
- command: |
- ./tools/circleci_dependencies.sh
+ # Hack in uninstalls of libraries as necessary if pip doesn't do the right thing in upgrading for us...
+ - run:
+ name: Get Python running
+ command: |
+ ./tools/circleci_dependencies.sh
- - save_cache:
- key: pip-cache
- paths:
- - ~/.cache/pip
- - save_cache:
- key: user-install-bin-cache-310
- paths:
- - ~/.local/lib/python3.10/site-packages
- - ~/.local/bin
+ - save_cache:
+ key: pip-cache-0
+ paths:
+ - ~/.cache/pip
+ - save_cache:
+ key: user-install-bin-cache-310
+ paths:
+ - ~/.local/lib/python3.10/site-packages
+ - ~/.local/bin
- - run:
- name: Check Qt
- command: |
- ./tools/check_qt_import.sh PyQt6
- # Load tiny cache so that ~/.mne does not need to be created below
- - restore_cache:
- keys:
- - data-cache-tiny-0
+ - run:
+ name: Check Qt
+ command: |
+ ./tools/check_qt_import.sh PyQt6
+ # Load tiny cache so that ~/.mne does not need to be created below
+ - restore_cache:
+ keys:
+ - data-cache-tiny-0
- # Look at what we have and fail early if there is some library conflict
- - run:
- name: Check installation
- command: |
- which python
- QT_DEBUG_PLUGINS=1 mne sys_info -pd
- python -c "import numpy; numpy.show_config()"
- LIBGL_DEBUG=verbose python -c "import pyvistaqt; pyvistaqt.BackgroundPlotter(show=True)"
- python -c "import mne; mne.set_config('MNE_USE_CUDA', 'false')" # this is needed for the config tutorial
- python -c "import mne; mne.set_config('MNE_LOGGING_LEVEL', 'info')"
- python -c "import mne; level = mne.get_config('MNE_LOGGING_LEVEL'); assert level.lower() == 'info', repr(level)"
- - run:
- name: List packages
- command: python -m pip list
+ # Look at what we have and fail early if there is some library conflict
+ - run:
+ name: Check installation
+ command: |
+ which python
+ QT_DEBUG_PLUGINS=1 mne sys_info -pd
+ python -c "import numpy; numpy.show_config()"
+ python -c "import dipy.align.metrics"
+ LIBGL_DEBUG=verbose python -c "import pyvistaqt; pyvistaqt.BackgroundPlotter(show=True)"
+ python -c "import mne; mne.set_config('MNE_USE_CUDA', 'false')" # this is needed for the config tutorial
+ python -c "import mne; mne.set_config('MNE_LOGGING_LEVEL', 'info')"
+ python -c "import mne; level = mne.get_config('MNE_LOGGING_LEVEL'); assert level.lower() == 'info', repr(level)"
+ - run:
+ name: List packages
+ command: python -m pip list
- # Figure out if we should run a full, pattern, or noplot version
- - restore_cache:
- keys:
- - data-cache-tiny-1
- - restore_cache:
- keys:
- - data-cache-multimodal
- - restore_cache:
- keys:
- - data-cache-limo
- - restore_cache:
- keys:
- - data-cache-fsaverage
- - restore_cache:
- keys:
- - data-cache-bst-phantom-ctf
- - restore_cache:
- keys:
- - data-cache-bst-raw
- - restore_cache:
- keys:
- - data-cache-bst-phantom-elekta
- - restore_cache:
- keys:
- - data-cache-bst-auditory
- - restore_cache:
- keys:
- - data-cache-bst-resting
- - restore_cache:
- keys:
- - data-cache-fieldtrip
- - restore_cache:
- keys:
- - data-cache-somato
- - restore_cache:
- keys:
- - data-cache-hf-sef
- - restore_cache:
- keys:
- - data-cache-opm
- - restore_cache:
- keys:
- - data-cache-sample
- - restore_cache:
- keys:
- - data-cache-spm-face
- - restore_cache:
- keys:
- - data-cache-testing
- - restore_cache:
- keys:
- - data-cache-visual
- - restore_cache:
- keys:
- - data-cache-ucl-opm-auditory
- - run:
- name: Get data
- command: |
- ./tools/circleci_download.sh
- - run:
- name: Verify build type
- command: |
- echo "PATTERN=$(cat pattern.txt)"
- echo "BUILD=$(cat build.txt)"
- ls -al ~/mne_data;
+ # Figure out if we should run a full build or specify a pattern
+ - restore_cache:
+ keys:
+ - data-cache-tiny-1
+ - restore_cache:
+ keys:
+ - data-cache-multimodal
+ - restore_cache:
+ keys:
+ - data-cache-limo
+ - restore_cache:
+ keys:
+ - data-cache-fsaverage
+ - restore_cache:
+ keys:
+ - data-cache-bst-raw
+ - restore_cache:
+ keys:
+ - data-cache-bst-phantom-ctf
+ - restore_cache:
+ keys:
+ - data-cache-bst-phantom-elekta
+ - restore_cache:
+ keys:
+ - data-cache-bst-phantom-kernel
+ - restore_cache:
+ keys:
+ - data-cache-bst-auditory
+ - restore_cache:
+ keys:
+ - data-cache-bst-resting
+ - restore_cache:
+ keys:
+ - data-cache-fieldtrip
+ - restore_cache:
+ keys:
+ - data-cache-somato
+ - restore_cache:
+ keys:
+ - data-cache-hf-sef
+ - restore_cache:
+ keys:
+ - data-cache-opm
+ - restore_cache:
+ keys:
+ - data-cache-sample
+ - restore_cache:
+ keys:
+ - data-cache-spm-face
+ - restore_cache:
+ keys:
+ - data-cache-testing
+ - restore_cache:
+ keys:
+ - data-cache-visual
+ - restore_cache:
+ keys:
+ - data-cache-ucl-opm-auditory
+ - restore_cache:
+ keys:
+ - data-cache-phantom-kit
+ - restore_cache:
+ keys:
+ - data-cache-ds004388
+ - run:
+ name: Get data
+ # This limit could be increased, but this is helpful for finding slow ones
+ # (even ~2GB datasets should be downloadable in this time from good
+ # providers)
+ no_output_timeout: 10m
+ command: |
+ ./tools/circleci_download.sh
+ - run:
+ name: Verify build type
+ command: |
+ echo "PATTERN=$(cat pattern.txt)"
+ echo "BUILD=$(cat build.txt)"
+ ls -al ~/mne_data;
- # Run doctest (if it's full or front) before building the docs
- - run:
- name: make test-doc
- command: |
- if [[ $(cat gitlog.txt) == *"[circle front]"* ]] || [[ $(cat build.txt) == "html_dev-memory" ]] || [[ $(cat build.txt) == "html_stable-memory" ]]; then
- make test-doc;
- mkdir -p doc/_build/test-results/test-doc;
- cp junit-results.xml doc/_build/test-results/test-doc/junit.xml;
- fi;
- # Build docs
- - run:
- name: make html
- command: |
- PATTERN=$(cat pattern.txt) make -C doc $(cat build.txt);
- - run:
- name: Show profiling output
- when: always
- command: |
- if compgen -G "doc/*.dat" > /dev/null; then
- mkdir -p doc/generated
- mprof plot doc/*.dat --output doc/generated/memory.png
- else
- echo "No profile data found in doc/"
- fi
- - run:
- name: Sanity check system state
- command: |
- python -c "import mne; level = mne.get_config('MNE_LOGGING_LEVEL'); assert level.lower() == 'info', repr(level)"
+ # Run doctest (if it's full or front) before building the docs
+ - run:
+ name: make test-doc
+ command: |
+ if [[ $(cat gitlog.txt) == *"[circle front]"* ]] || [[ $(cat build.txt) == "html-memory" ]] ; then
+ make test-doc;
+ mkdir -p doc/_build/test-results/test-doc;
+ cp junit-results.xml doc/_build/test-results/test-doc/junit.xml;
+ cp coverage.xml doc/_build/test-results/test-doc/coverage.xml;
+ fi;
+ # Build docs
+ - run:
+ name: make html
+ command: | # we have -o pipefail in #BASH_ENV so we should be okay
+ set -x
+ PATTERN=$(cat pattern.txt) make -C doc $(cat build.txt) 2>&1 | tee sphinx_log.txt
+ - run:
+ name: Check sphinx log for warnings (which are treated as errors)
+ when: always
+ command: |
+ ! grep "^.*\(WARNING\|ERROR\): " sphinx_log.txt
+ - run:
+ name: Show profiling output
+ when: always
+ command: |
+ if compgen -G "doc/*.dat" > /dev/null; then
+ mkdir -p doc/generated
+ mprof plot doc/*.dat --output doc/generated/memory.png
+ else
+ echo "No profile data found in doc/"
+ fi
+ - run:
+ name: Sanity check system state
+ command: |
+ python -c "import mne; level = mne.get_config('MNE_LOGGING_LEVEL'); assert level.lower() == 'info', repr(level)"
- # Reduce upload time of artifacts we will (almost) never look at
- - run:
- name: Reduce artifact upload time
- command: |
- if grep -q html_dev-pattern-memory build.txt || grep -q html_dev-noplot build.txt; then
- zip -rm doc/_build/html/_downloads.zip doc/_build/html/_downloads
- fi
- for NAME in generated auto_tutorials auto_examples; do
- zip -rm doc/${NAME}.zip doc/${NAME}
- done
+ # Reduce upload time of artifacts we will (almost) never look at
+ - run:
+ name: Reduce artifact upload time
+ command: |
+ if grep -q html-pattern-memory build.txt; then
+ zip -rm doc/_build/html/_downloads.zip doc/_build/html/_downloads
+ fi
+ for NAME in generated auto_tutorials auto_examples; do
+ zip -rm doc/${NAME}.zip doc/${NAME}
+ done
- # Save the JUnit file
- - store_test_results:
- path: doc/_build/test-results
- - store_artifacts:
- path: doc/_build/test-results
- destination: test-results
- # Save the SG RST
- - store_artifacts:
- path: doc/auto_examples.zip
- - store_artifacts:
- path: doc/auto_tutorials.zip
- - store_artifacts:
- path: doc/generated.zip
- # Save the HTML
- - store_artifacts:
- path: doc/_build/html/
- destination: dev
- - store_artifacts:
- path: doc/_build/html_stable/
- destination: stable
- - persist_to_workspace:
- root: doc/_build
- paths:
- - html
- - html_stable
+ # Save the JUnit file
+ - store_test_results:
+ path: doc/_build/test-results
+ - store_artifacts:
+ path: doc/_build/test-results
+ destination: test-results
+ # Upload test results to Codecov
+ - run:
+ name: Upload test results to Codecov
+ environment:
+ CODECOV_TOKEN: fb4c4a94-72d7-4743-bb08-af25b623a29a
+ command: |
+ if [[ -f doc/_build/test-results/test-doc/coverage.xml ]]; then
+ bash <(curl -s https://codecov.io/bash) -f doc/_build/test-results/test-doc/coverage.xml || true
+ fi
+ # Save the SG RST
+ - store_artifacts:
+ path: doc/auto_examples.zip
+ - store_artifacts:
+ path: doc/auto_tutorials.zip
+ - store_artifacts:
+ path: doc/generated.zip
+ # Save the HTML
+ - store_artifacts:
+ path: doc/_build/html/
+ destination: html
+ - persist_to_workspace:
+ root: doc/_build
+ paths:
+ - html
- # Keep these separate, maybe better in terms of size limitations (?)
- - save_cache:
- key: data-cache-tiny-0 # < 100 M, might as well combine
- paths:
- - ~/.mne
- - ~/mne_data/MNE-kiloword-data # (28 M)
- - ~/mne_data/MNE-eegbci-data # (35 M)
- - ~/mne_data/MNE-misc-data # (39 M)
- - ~/mne_data/mTRF_1.5 # (56 M)
- - ~/mne_data/MNE-phantom-4DBTi # (77 M)
- - save_cache:
- key: data-cache-tiny-1 # more to combine
- paths:
- - ~/mne_data/MNE-fNIRS-motor-data # (71 M)
- - ~/mne_data/MNE-refmeg-noise-data # (93 M)
- - ~/mne_data/physionet-sleep-data # (95 M)
- - save_cache:
- key: data-cache-multimodal
- paths:
- - ~/mne_data/MNE-multimodal-data # (240 M)
- - save_cache:
- key: data-cache-limo
- paths:
- - ~/mne_data/MNE-limo-data # (244 M)
- - save_cache:
- key: data-cache-fsaverage
- paths:
- - ~/mne_data/MNE-fsaverage-data # (762 M)
- - save_cache:
- key: data-cache-bst-phantom-ctf
- paths:
- - ~/mne_data/MNE-brainstorm-data/bst_phantom_ctf # (177 M)
- - save_cache:
- key: data-cache-bst-raw
- paths:
- - ~/mne_data/MNE-brainstorm-data/bst_raw # (830 M)
- - save_cache:
- key: data-cache-bst-phantom-elekta
- paths:
- - ~/mne_data/MNE-brainstorm-data/bst_phantom_elekta # (1.4 G)
- - save_cache:
- key: data-cache-bst-auditory
- paths:
- - ~/mne_data/MNE-brainstorm-data/bst_auditory # (2.9 G)
- - save_cache:
- key: data-cache-bst-resting
- paths:
- - ~/mne_data/MNE-brainstorm-data/bst_resting # (4.5 G)
- - save_cache:
- key: data-cache-fieldtrip
- paths:
- - ~/mne_data/MNE-fieldtrip_cmc-data # (699 M)
- - save_cache:
- key: data-cache-somato
- paths:
- - ~/mne_data/MNE-somato-data # (750 M)
- - save_cache:
- key: data-cache-hf-sef
- paths:
- - ~/mne_data/HF_SEF # (1.3 G)
- - save_cache:
- key: data-cache-opm
- paths:
- - ~/mne_data/MNE-OPM-data # (1.9 G)
- - save_cache:
- key: data-cache-sample
- paths:
- - ~/mne_data/MNE-sample-data # (3.2 G)
- - save_cache:
- key: data-cache-spm-face
- paths:
- - ~/mne_data/MNE-spm-face # (1.5 G)
- - save_cache:
- key: data-cache-testing
- paths:
- - ~/mne_data/MNE-testing-data # (2.5 G)
- - save_cache:
- key: data-cache-visual
- paths:
- - ~/mne_data/MNE-visual_92_categories-data # (6 G)
- - save_cache:
- key: data-cache-ucl-opm-auditory
- paths:
- - ~/mne_data/auditory_OPM_stationary # (4 G)
+ # Keep these separate, maybe better in terms of size limitations (?)
+ - save_cache:
+ key: data-cache-tiny-0 # < 100 M, might as well combine
+ paths:
+ - ~/.mne
+ - ~/mne_data/MNE-kiloword-data # (28 M)
+ - ~/mne_data/MNE-eegbci-data # (35 M)
+ - ~/mne_data/MNE-misc-data # (39 M)
+ - ~/mne_data/mTRF_1.5 # (56 M)
+ - ~/mne_data/MNE-phantom-4DBTi # (77 M)
+ - save_cache:
+ key: data-cache-tiny-1 # more to combine
+ paths:
+ - ~/mne_data/MNE-fNIRS-motor-data # (71 M)
+ - ~/mne_data/MNE-refmeg-noise-data # (93 M)
+ - ~/mne_data/physionet-sleep-data # (95 M)
+ - save_cache:
+ key: data-cache-multimodal
+ paths:
+ - ~/mne_data/MNE-multimodal-data # (240 M)
+ - save_cache:
+ key: data-cache-limo
+ paths:
+ - ~/mne_data/MNE-limo-data # (244 M)
+ - save_cache:
+ key: data-cache-fsaverage
+ paths:
+ - ~/mne_data/MNE-fsaverage-data # (762 M)
+ - save_cache:
+ key: data-cache-bst-raw
+ paths:
+ - ~/mne_data/MNE-brainstorm-data/bst_raw # (830 M)
+ - save_cache:
+ key: data-cache-bst-phantom-ctf
+ paths:
+ - ~/mne_data/MNE-brainstorm-data/bst_phantom_ctf # (177 M)
+ - save_cache:
+ key: data-cache-bst-phantom-elekta
+ paths:
+ - ~/mne_data/MNE-brainstorm-data/bst_phantom_elekta # (1.4 G)
+ - save_cache:
+ key: data-cache-bst-phantom-kernel
+ paths:
+ - ~/mne_data/MNE-phantom-kernel-data # (362 M)
+ - save_cache:
+ key: data-cache-bst-auditory
+ paths:
+ - ~/mne_data/MNE-brainstorm-data/bst_auditory # (2.9 G)
+ - save_cache:
+ key: data-cache-bst-resting
+ paths:
+ - ~/mne_data/MNE-brainstorm-data/bst_resting # (4.5 G)
+ - save_cache:
+ key: data-cache-fieldtrip
+ paths:
+ - ~/mne_data/MNE-fieldtrip_cmc-data # (699 M)
+ - save_cache:
+ key: data-cache-somato
+ paths:
+ - ~/mne_data/MNE-somato-data # (750 M)
+ - save_cache:
+ key: data-cache-hf-sef
+ paths:
+ - ~/mne_data/HF_SEF # (1.3 G)
+ - save_cache:
+ key: data-cache-opm
+ paths:
+ - ~/mne_data/MNE-OPM-data # (1.9 G)
+ - save_cache:
+ key: data-cache-sample
+ paths:
+ - ~/mne_data/MNE-sample-data # (3.2 G)
+ - save_cache:
+ key: data-cache-spm-face
+ paths:
+ - ~/mne_data/MNE-spm-face # (1.5 G)
+ - save_cache:
+ key: data-cache-testing
+ paths:
+ - ~/mne_data/MNE-testing-data # (2.5 G)
+ - save_cache:
+ key: data-cache-visual
+ paths:
+ - ~/mne_data/MNE-visual_92_categories-data # (6 G)
+ - save_cache:
+ key: data-cache-ucl-opm-auditory
+ paths:
+ - ~/mne_data/auditory_OPM_stationary # (4 G)
+ - save_cache:
+ key: data-cache-phantom-kit
+ paths:
+ - ~/mne_data/MNE-phantom-KIT-data # (1 G)
+ - save_cache:
+ key: data-cache-ds004388
+ paths:
+ - ~/mne_data/ds004388 # (1.8 G)
- linkcheck:
- # there are a few files excluded from this for expediency, see Makefile
- parameters:
- scheduled:
- type: string
- default: "false"
- docker:
- - image: circleci/python:3.9.2-buster
- steps:
- - restore_cache:
- keys:
- - source-cache
- - checkout
- - run:
- name: Set BASH_ENV
- command: |
- set -e
- echo "set -e" >> $BASH_ENV
- echo "export PATH=~/.local/bin/:$PATH" >> $BASH_ENV
- - run:
- name: Check-skip
- command: |
- export COMMIT_MESSAGE=$(git log --format=oneline -n 1);
- if [[ "$COMMIT_MESSAGE" != *"[circle linkcheck]"* ]] && [ "<< parameters.scheduled >>" != "true" ]; then
- echo "Skip detected, exiting job ${CIRCLE_JOB}."
- circleci-agent step halt;
- fi
- - restore_cache:
- keys:
- - pip-cache
- - run:
- name: Get Python running
- command: |
- ./tools/circleci_dependencies.sh
- - run:
- name: Check installation
- command: |
- mne sys_info -pd
- - run:
- name: make linkcheck
- command: |
- make -C doc linkcheck
- - run:
- name: make linkcheck-grep
- when: always
- command: |
- make -C doc linkcheck-grep
- - store_artifacts:
- path: doc/_build/linkcheck
- destination: linkcheck
+ linkcheck:
+ # there are a few files excluded from this for expediency, see Makefile
+ parameters:
+ scheduled:
+ type: string
+ default: "false"
+ machine:
+ image: ubuntu-2404:current
+ resource_class: large
+ steps:
+ - restore_cache:
+ keys:
+ - source-cache
+ - checkout
+ - run:
+ name: Check-skip
+ command: |
+ export COMMIT_MESSAGE=$(git log --format=oneline -n 1);
+ if [[ "$COMMIT_MESSAGE" != *"[circle linkcheck]"* ]] && [ "<< parameters.scheduled >>" != "true" ]; then
+ echo "Skip detected, exiting job ${CIRCLE_JOB}."
+ circleci-agent step halt;
+ fi
+ - run:
+ name: Set BASH_ENV
+ command: ./tools/circleci_bash_env.sh
+ - restore_cache:
+ keys:
+ - pip-cache-0
+ - run:
+ name: Get Python running
+ command: |
+ ./tools/circleci_dependencies.sh
+ - run:
+ name: Check installation
+ command: |
+ mne sys_info -pd
+ - run:
+ name: make linkcheck
+ no_output_timeout: 40m
+ command: |
+ make -C doc linkcheck
+ - store_artifacts:
+ path: doc/_build/linkcheck
+ destination: linkcheck
- deploy:
- machine:
- image: ubuntu-2004:202111-01
- steps:
- - attach_workspace:
- at: /tmp/build
- - restore_cache:
- keys:
- - website-cache
- - run:
- name: Set BASH_ENV
- command: |
- set -e
- echo "set -e" >> $BASH_ENV
- # Don't try to deploy if nothing is there or not on the right branch
- - run:
- name: Check docs
- command: |
- if [ ! -f /tmp/build/html/index.html ] && [ ! -f /tmp/build/html_stable/index.html ]; then
- echo "No files found to upload (build: ${CIRCLE_BRANCH}).";
- circleci-agent step halt;
- fi;
- - run:
- name: Fetch docs
- command: |
- mkdir -p ~/.ssh
- echo -e "Host *\nStrictHostKeyChecking no" > ~/.ssh/config
- chmod og= ~/.ssh/config
- if [ ! -d ~/mne-tools.github.io ]; then
- git clone git@github.com:/mne-tools/mne-tools.github.io.git ~/mne-tools.github.io --depth=1
- fi
- - run:
- name: Deploy docs
- command: |
- git config --global user.email "circle@mne.com";
- git config --global user.name "Circle CI";
- cd ~/mne-tools.github.io;
- git checkout main
- git remote -v
- git fetch origin
- git reset --hard origin/main
- git clean -xdf
- if [ "${CIRCLE_BRANCH}" == "main" ]; then
- echo "Deploying dev docs for ${CIRCLE_BRANCH}.";
- rm -Rf dev;
- cp -a /tmp/build/html dev;
- git add -A;
- git commit -m "CircleCI update of dev docs (${CIRCLE_BUILD_NUM}).";
- else
- echo "Deploying stable docs for ${CIRCLE_BRANCH}.";
- rm -Rf stable;
- cp -a /tmp/build/html_stable stable;
- git add -A;
- git commit -m "CircleCI update of stable docs (${CIRCLE_BUILD_NUM}).";
- fi;
- git push origin main;
- - save_cache:
- key: website-cache
- paths:
- - ~/mne_data/MNE-visual_92_categories-data
+ deploy:
+ machine:
+ image: ubuntu-2404:current
+ steps:
+ - attach_workspace:
+ at: /tmp/build
+ - restore_cache:
+ keys:
+ - website-cache-1
+ - add_ssh_keys:
+ fingerprints:
+ # SHA256:N4qvp6MSbXcTz/27xz96VPsNuTDRT92zoRP8EW0I/8I
+ - "19:fe:1d:c3:c7:af:7e:16:94:4c:e1:e7:0a:56:13:bd"
+ - run:
+ name: Set BASH_ENV
+ command: |
+ set -e
+ echo "set -e" >> $BASH_ENV
+ # Don't try to deploy if nothing is there or not on the right branch
+ - run:
+ name: Check docs
+ command: |
+ if [ ! -f /tmp/build/html/index.html ] ; then
+ echo "No files found to upload (build: ${CIRCLE_BRANCH}).";
+ circleci-agent step halt;
+ fi;
+ - run:
+ name: Fetch docs
+ command: |
+ mkdir -p ~/.ssh
+ echo -e "Host *\nStrictHostKeyChecking no" > ~/.ssh/config
+ chmod og= ~/.ssh/config
+ if [ ! -d ~/mne-tools.github.io ]; then
+ git clone git@github.com:/mne-tools/mne-tools.github.io.git ~/mne-tools.github.io --depth=1
+ fi
+ - run:
+ name: Deploy docs
+ command: |
+ git config --global user.email "circle@mne.tools";
+ git config --global user.name "Circle CI";
+ ssh-add -D && ssh-add ~/.ssh/id_rsa_19fe1dc3c7af7e16944ce1e70a5613bd
+ cd ~/mne-tools.github.io;
+ git checkout main
+ git remote -v
+ git fetch origin
+ git reset --hard origin/main
+ git clean -xdf
+ if [ "${CIRCLE_BRANCH}" == "main" ]; then
+ echo "Deploying dev docs for ${CIRCLE_BRANCH}.";
+ rm -Rf dev;
+ cp -a /tmp/build/html dev;
+ git add -A;
+ git commit -m "CircleCI update of dev docs (${CIRCLE_BUILD_NUM}).";
+ else
+ echo "Deploying stable docs for ${CIRCLE_BRANCH}.";
+ rm -Rf stable;
+ cp -a /tmp/build/html stable;
+ git add -A;
+ git commit -m "CircleCI update of stable docs (${CIRCLE_BUILD_NUM}).";
+ fi;
+ git push origin main;
+ - save_cache:
+ key: website-cache-1
+ paths:
+ - ~/mne-tools.github.io
workflows:
default:
@@ -519,7 +535,7 @@ workflows:
branches:
only:
- main
- - maint/1.3
+ - /maint\/.*/
main:
jobs:
@@ -539,14 +555,14 @@ workflows:
only:
- main
- weekly:
+ monthly:
jobs:
- linkcheck:
- name: linkcheck_weekly
+ name: linkcheck_monthly
scheduled: "true"
triggers:
- schedule:
- # "At 6:00 AM GMT on the first day of each month" is often enough
+ # "At 6:00 AM GMT on the first day of each month"
cron: "0 6 1 * *"
filters:
branches:
diff --git a/.cirrus.yml b/.cirrus.yml
deleted file mode 100644
index 84386c2aa85..00000000000
--- a/.cirrus.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-# Test on macOS M1 silicon
-#
-# cibuildwheel FAQ helps (search "cirrus"):
-# https://cibuildwheel.readthedocs.io/en/stable/faq/#apple-silicon
-# https://cirrus-ci.org/guide/writing-tasks/#supported-instructions
-# In commit messages, [skip ci] or [skip cirrus] is supported natively.
-macos_arm64_task:
- name: macOS arm64 (M1) tests
- # Auto-cancel PR builds
- auto_cancellation: $CIRRUS_PR != ''
- only_if: $CIRRUS_REPO_FULL_NAME == "mne-tools/mne-python"
- macos_instance:
- image: ghcr.io/cirruslabs/macos-monterey-base:latest
- env:
- PATH: /opt/homebrew/opt/python@3.10/libexec/bin:$PATH
- python_install_script:
- - ./tools/cirrus_install_python.sh
- pip_cache:
- folder: ~/Library/Caches/pip
- fingerprint_script:
- - echo $PYTHON_VERSION && cat requirements.txt && cat requirements_testing.txt && cat requirements_testing_extra.txt
- populate_script:
- - ./tools/cirrus_dependencies.sh
- pip_install_script: # now we actually run it (possibly another time) to install deps
- - ./tools/cirrus_dependencies.sh
- mne_install_script:
- - pip install -ve .
- mne_sys_info_script:
- - mne sys_info -pd
- testing_cache:
- folder: ~/mne_data
- fingerprint_script:
- - ./tools/get_testing_version.sh
- populate_script:
- - python -c "import mne; mne.datasets.testing.data_path(force_update=True, verbose=True)"
- pytest_script:
- - pytest -m "not slowtest" --tb=short --cov=mne --cov-report=xml -vv mne
- # https://cirrus-ci.org/guide/writing-tasks/#artifact-parsing
- always:
- upload_results_artifacts:
- path: ./*.xml
- format: junit
- type: text/xml
- codecov_script: bash <(curl -s https://codecov.io/bash)
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 00000000000..054b0c65924
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,16 @@
+# PR number should follow the commit number so that our code credit
+# can parse this file correctly:
+d71e497dcf6f98e19eb81e82e641404a71d2d663 # 1420, split up viz.py
+203a96cbba2732d2e349a8f96065e74bbfd2a53b # 5862, split utils.py
+ff349f356edb04e1b5f0db13deda8d1a20aca351 # 6767, move around manual parts
+31a83063557fbd54d898f00f9527ffc547888395 # 10407, alphabetize docdict
+e81ec528a42ac687f3d961ed5cf8e25f236925b0 # 11667, black
+12395f9d9cf6ea3c72b225b62e052dd0d17d9889 # 11868, YAML indentation
+d6d2f8c6a2ed4a0b27357da9ddf8e0cd14931b59 # 12097, isort
+e7dd1588013179013a50d3f6b8e8f9ae0a185783 # 12261, ruff format
+940ac9553ce42c15b4c16ecd013824ca3ea7244a # 12533, whitespace
+e39995d9be6fc831c7a4a59f09b7a7c0a41ae315 # 12588, percent formatting
+1c5b39ff1d99bbcb2fc0e0071a989b3f3845ff30 # 12603, ruff UP028
+b8b168088cb474f27833f5f9db9d60abe00dca83 # 12779, PR JSONs
+ee64eba6f345e895e3d5e7d2804fa6aa2dac2e6d # 12781, Header unification
+362f9330925fb79a6adc19a42243672676dec63e # 12799, UP038
diff --git a/.git_archival.txt b/.git_archival.txt
new file mode 100644
index 00000000000..7c5100942aa
--- /dev/null
+++ b/.git_archival.txt
@@ -0,0 +1,3 @@
+node: $Format:%H$
+node-date: $Format:%cI$
+describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000000..00a7b00c94e
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+.git_archival.txt export-subst
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index b39bd6b1d3d..e7db9e4f94c 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -13,10 +13,115 @@
# This file was adapted from SciPy.
# Each line is a file pattern followed by one or more owners.
+# Syntax follows .gitignore, see https://git-scm.com/docs/gitignore#_pattern_format
+
+# Headings ideally should correspond to domains of knowledge/expertise, starting with
+# algorithms implementations (generally main codebase mne/ and related docs) followed by
+# project-level and miscellaneous items (CI, general documentation, etc.).
+
+#################################################
+# Algorithms, implementations, and related docs #
+#################################################
+
+# Artifact regression
+/mne/preprocessing/_regress.py @wmvanvliet
+/mne/preprocessing/tests/test_regress.py @wmvanvliet
+
+# Beamforming
+/mne/beamformer @britta-wstnr
+*dics*.py @britta-wstnr @wmvanvliet # related tutorials and examples
+*lcmv*.py @britta-wstnr
+
+# Channels
+/mne/channels @agramfort @mscheltienne @dengemann @jasmainak
+
+# Core sensor-space classes
+/mne/epochs.py @drammock @agramfort @mscheltienne @dengemann
+/mne/evoked.py @drammock @agramfort @mscheltienne @dengemann
+/mne/io/*.* @drammock @agramfort @mscheltienne @dengemann
+
+# Current-source density
+/mne/preprocessing/_csd.py @alexrockhill @dengemann
+
+# Decoding
+/mne/decoding/csp.py @agramfort @dengemann
+/mne/decoding/*.py @jasmainak
+
+# fNIRS
+/mne/preprocessing/nirs @rob-luke
+*fnirs*.py @rob-luke
+
+# forward
+/mne/forward/ @agramfort @jasmainak
+*forward*.py @agramfort
+
+# Intracranial
+/mne/preprocessing/ieeg @alexrockhill @adam2392
+*seeg*.py @alexrockhill @adam2392
+*ecog*.py @alexrockhill @adam2392
+
+# Inverse Sparse
+/mne/inverse_sparse @agramfort
+*mxne*.py @agramfort # related tutorials and examples
+*gamma*.py @agramfort # related tutorials and examples
# IO
-/mne/io/brainvision @sappelhoff
+/mne/io/brainvision @sappelhoff @adam2392
+/mne/io/nirx @rob-luke
+/mne/io/snirf @rob-luke
/mne/export @sappelhoff
+/mne/io/eeglab.py @jasmainak
+/mne/io/eeglab/tests/test_eeglab.py @jasmainak
-# Beamforming
-/mne/beamformer/ @britta-wstnr
+# Minimum Norm
+/mne/minimum_norm @agramfort
+
+# Preprocessing
+/mne/preprocessing/ica.py @adam2392 @agramfort @mscheltienne @dengemann
+/mne/preprocessing/infomax_.py @adam2392 @mscheltienne @dengemann
+/mne/preprocessing/*annotate*.py @mscheltienne
+/mne/preprocessing/bads.py @mscheltienne
+/mne/preprocessing/e*g.py @mscheltienne
+
+# Report
+/mne/report @dengemann @jasmainak
+
+# Simulation
+/mne/simulation/ @agramfort
+*simulate*.py @agramfort
+
+# Surface + transforms
+/mne/surface.py @alexrockhill @larsoner
+/mne/transforms.py @alexrockhill @larsoner
+/mne/_freesurfer.py @alexrockhill @larsoner
+
+# TFR
+/mne/time_frequency @drammock @adam2392 @mscheltienne
+
+# Viz
+/mne/viz @drammock @dengemann
+/mne/viz/_brain @larsoner @wmvanvliet
+/mne/viz/ui_events.py @wmvanvliet
+/tutorials/visualization @larsoner @wmvanvliet @dengemann
+/examples/visualization @larsoner @dengemann
+
+# Datasets
+/mne/datasets/brainstorm @jasmainak
+
+#########################
+# Project-level / other #
+#########################
+
+# Examples and tutorials
+/examples @drammock @agramfort @dengemann
+/tutorials @drammock @agramfort @dengemann
+
+# Non-tutorial documentation text and infrastructure
+/doc @larsoner @drammock @agramfort @dengemann
+/logo @drammock
+
+# Project infrastructure and CIs
+/*.* @larsoner @drammock # files in the root directory
+/.circleci @larsoner
+/.github @larsoner
+/tools @larsoner @drammock
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 53e02d49867..b7ab58dc917 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -10,4 +10,4 @@ This project and everyone participating in it is governed by the [MNE-Python's C
## How to contribute
-Before contributing make sure you are familiar with [our contributing guide](https://mne.tools/dev/install/contributing.html).
+Before contributing make sure you are familiar with [our contributing guide](https://mne.tools/dev/development/contributing.html).
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 00000000000..d0c3b59f68d
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,15 @@
+# These are supported funding model platforms
+
+# github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
+# patreon: # Replace with a single Patreon username
+open_collective: mne-python
+# ko_fi: # Replace with a single Ko-fi username
+# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
+# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
+# liberapay: # Replace with a single Liberapay username
+# issuehunt: # Replace with a single IssueHunt username
+# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
+# polar: # Replace with a single Polar username
+# buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
+# thanks_dev: # Replace with a single thanks.dev username
+# custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 6ec575d28e8..ddd5834e533 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -29,8 +29,8 @@ body:
Paste here a code snippet or minimal working example
([MWE](https://en.wikipedia.org/wiki/Minimal_Working_Example))
to replicate your problem, using one of the
- [datasets shipped with MNE-Python](https://mne.tools/dev/overview/datasets_index.html),
- preferably the one called [sample](https://mne.tools/dev/overview/datasets_index.html#sample).
+ [datasets shipped with MNE-Python](https://mne.tools/stable/documentation/datasets.html#datasets),
+ preferably the one called [sample](https://mne.tools/stable/documentation/datasets.html#sample).
render: Python
validations:
required: true
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index ea102484a7f..231488d2d47 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,5 +1,7 @@
+
+
+#### Reference issue (if any)
+
+
#### What does this implement/fix?
-Explain your changes.
+
+
#### Additional information
-Any additional information you think is important.
+
+
diff --git a/.github/actions/rename_towncrier/rename_towncrier.py b/.github/actions/rename_towncrier/rename_towncrier.py
new file mode 100755
index 00000000000..72d4f4be272
--- /dev/null
+++ b/.github/actions/rename_towncrier/rename_towncrier.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+
+# Authors: The MNE-Python contributors.
+# License: BSD-3-Clause
+# Copyright the MNE-Python contributors.
+
+# Adapted from action-towncrier-changelog
+import json
+import os
+import re
+import subprocess
+import sys
+from pathlib import Path
+
+from github import Github
+from tomllib import loads
+
+event_name = os.getenv("GITHUB_EVENT_NAME", "pull_request")
+if not event_name.startswith("pull_request"):
+ print(f"No-op for {event_name}")
+ sys.exit(0)
+if "GITHUB_EVENT_PATH" in os.environ:
+ with open(os.environ["GITHUB_EVENT_PATH"], encoding="utf-8") as fin:
+ event = json.load(fin)
+ pr_num = event["number"]
+ basereponame = event["pull_request"]["base"]["repo"]["full_name"]
+ real = True
+else: # local testing
+ pr_num = 12318 # added some towncrier files
+ basereponame = "mne-tools/mne-python"
+ real = False
+
+g = Github(os.environ.get("GITHUB_TOKEN"))
+baserepo = g.get_repo(basereponame)
+
+# Grab config from upstream's default branch
+toml_cfg = loads(Path("pyproject.toml").read_text("utf-8"))
+
+config = toml_cfg["tool"]["towncrier"]
+pr = baserepo.get_pull(pr_num)
+modified_files = [f.filename for f in pr.get_files()]
+
+# Get types from config
+types = [ent["directory"] for ent in toml_cfg["tool"]["towncrier"]["type"]]
+type_pipe = "|".join(types)
+
+# Get files that potentially match the types
+directory = toml_cfg["tool"]["towncrier"]["directory"]
+assert directory.endswith("/"), directory
+
+file_re = re.compile(rf"^{directory}({type_pipe})\.rst$")
+found_stubs = [f for f in modified_files if file_re.match(f)]
+for stub in found_stubs:
+ fro = stub
+ to = file_re.sub(rf"{directory}{pr_num}.\1.rst", fro)
+ print(f"Renaming {fro} to {to}")
+ if real:
+ subprocess.check_call(["mv", fro, to])
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 5ace4600a1f..8b0675e7df0 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -4,3 +4,11 @@ updates:
directory: "/"
schedule:
interval: "weekly"
+ groups:
+ actions:
+ patterns:
+ - "*"
+ labels:
+ - no-changelog-entry-needed
+ cooldown:
+ default-days: 7
diff --git a/.github/release.yaml b/.github/release.yaml
new file mode 100644
index 00000000000..386ce981916
--- /dev/null
+++ b/.github/release.yaml
@@ -0,0 +1,6 @@
+changelog:
+ exclude:
+ authors:
+ - dependabot
+ - pre-commit-ci
+ - github-actions
diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml
new file mode 100644
index 00000000000..b41e636bf5f
--- /dev/null
+++ b/.github/workflows/autofix.yml
@@ -0,0 +1,24 @@
+name: autofix.ci
+
+on: # yamllint disable-line rule:truthy
+ pull_request:
+ types: [opened, synchronize, labeled, unlabeled]
+
+permissions:
+ contents: read
+
+jobs:
+ autofix:
+ name: Autoupdate changelog entry and headers
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.12'
+ - run: pip install --upgrade towncrier pygithub gitpython numpy
+ - run: python ./.github/actions/rename_towncrier/rename_towncrier.py
+ - run: python ./tools/dev/ensure_headers.py
+ - uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
diff --git a/.github/workflows/automerge.yml b/.github/workflows/automerge.yml
new file mode 100644
index 00000000000..68720eaaa34
--- /dev/null
+++ b/.github/workflows/automerge.yml
@@ -0,0 +1,17 @@
+name: Bot auto-merge
+on: pull_request # yamllint disable-line rule:truthy
+
+jobs:
+ autobot:
+ permissions:
+ contents: write
+ pull-requests: write
+ runs-on: ubuntu-latest
+ # Names can be found with gh api /repos/mne-tools/mne-python/pulls/12998 -q .user.login for example
+ if: (github.event.pull_request.user.login == 'dependabot[bot]' || github.event.pull_request.user.login == 'pre-commit-ci[bot]' || github.event.pull_request.user.login == 'github-actions[bot]') && github.repository == 'mne-tools/mne-python'
+ steps:
+ - name: Enable auto-merge for bot PRs
+ run: gh pr merge --auto --squash "$PR_URL"
+ env:
+ PR_URL: ${{github.event.pull_request.html_url}}
+ GH_TOKEN: ${{secrets.GITHUB_TOKEN}}
diff --git a/.github/workflows/check_changelog.yml b/.github/workflows/check_changelog.yml
new file mode 100644
index 00000000000..7f8c0d28d7f
--- /dev/null
+++ b/.github/workflows/check_changelog.yml
@@ -0,0 +1,22 @@
+name: Changelog
+
+on: # yamllint disable-line rule:truthy
+ pull_request:
+ types: [opened, synchronize, labeled, unlabeled]
+ branches: ["main"]
+
+permissions:
+ contents: read
+
+jobs:
+ changelog_checker:
+ name: Check towncrier entry in doc/changes/dev/
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: false
+ - uses: scientific-python/action-towncrier-changelog@v2
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ BOT_USERNAME: changelog-bot
diff --git a/.github/workflows/circle_artifacts.yml b/.github/workflows/circle_artifacts.yml
index c444a1e9bae..7bce0a59688 100644
--- a/.github/workflows/circle_artifacts.yml
+++ b/.github/workflows/circle_artifacts.yml
@@ -1,14 +1,18 @@
-on: [status]
+on: [status] # yamllint disable-line rule:truthy
+permissions:
+ contents: read
+ statuses: write
jobs:
circleci_artifacts_redirector_job:
if: "${{ startsWith(github.event.context, 'ci/circleci: build_docs') }}"
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-latest
name: Run CircleCI artifacts redirector
steps:
- name: GitHub Action step
- uses: larsoner/circleci-artifacts-redirector-action@master
+ uses: scientific-python/circleci-artifacts-redirector-action@master
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- artifact-path: 0/dev/index.html
+ api-token: ${{ secrets.CIRCLECI_TOKEN }}
+ artifact-path: 0/html/sg_execution_times.html
circleci-jobs: build_docs,build_docs_main
job-title: Check the rendered docs here!
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 843a05e0cdb..c6a8ceee797 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -38,30 +38,32 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v5
+ with:
+ persist-credentials: false
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
-
+
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
-
+
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
- uses: github/codeql-action/autobuild@v2
+ uses: github/codeql-action/autobuild@v4
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- # If the Autobuild fails above, remove it and uncomment the following three lines.
+ # If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
@@ -69,4 +71,4 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v4
diff --git a/.github/workflows/codespell_and_flake.yml b/.github/workflows/codespell_and_flake.yml
deleted file mode 100644
index e191caa25d1..00000000000
--- a/.github/workflows/codespell_and_flake.yml
+++ /dev/null
@@ -1,45 +0,0 @@
-name: 'codespell_and_flake'
-# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
-# https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types#pullrequestevent
-# workflow name, PR number (empty on push), push ref (empty on PR)
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
- cancel-in-progress: true
-on:
- push:
- branches:
- - '*'
- pull_request:
- branches:
- - '*'
-
-jobs:
- style:
- name: 'codespell and flake'
- runs-on: ubuntu-20.04
- env:
- CODESPELL_DIRS: 'mne/ doc/ tutorials/ examples/'
- CODESPELL_SKIPS: 'doc/_build,doc/auto_*,*.fif,*.eve,*.gz,*.tgz,*.zip,*.mat,*.stc,*.label,*.w,*.bz2,*.annot,*.sulc,*.log,*.local-copy,*.orig_avg,*.inflated_avg,*.gii,*.pyc,*.doctree,*.pickle,*.inv,*.png,*.edf,*.touch,*.thickness,*.nofix,*.volume,*.defect_borders,*.mgh,lh.*,rh.*,COR-*,FreeSurferColorLUT.txt,*.examples,.xdebug_mris_calc,bad.segments,BadChannels,*.hist,empty_file,*.orig,*.js,*.map,*.ipynb,searchindex.dat,install_mne_c.rst,plot_*.rst,*.rst.txt,c_EULA.rst*,*.html,gdf_encodes.txt,*.svg,references.bib,*.css,*.edf,*.bdf,*.vhdr'
-
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
- with:
- python-version: '3.9'
- architecture: 'x64'
- - run: |
- python -m pip install --upgrade pip setuptools wheel
- python -m pip install flake8
- name: 'Install dependencies'
- - uses: rbialon/flake8-annotations@v1
- name: 'Setup flake8 annotations'
- - run: make flake
- name: 'Run flake8'
- - uses: codespell-project/actions-codespell@v1.0
- with:
- path: ${{ env.CODESPELL_DIRS }}
- skip: ${{ env.CODESPELL_SKIPS }}
- builtin: 'clear,rare,informal,names'
- ignore_words_file: 'ignore_words.txt'
- uri_ignore_words_list: 'bu'
- name: 'Run codespell'
diff --git a/.github/workflows/compat_minimal.yml b/.github/workflows/compat_minimal.yml
deleted file mode 100644
index a0027f46397..00000000000
--- a/.github/workflows/compat_minimal.yml
+++ /dev/null
@@ -1,77 +0,0 @@
-name: 'compat / minimal'
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
- cancel-in-progress: true
-on:
- push:
- branches:
- - '*'
- pull_request:
- branches:
- - '*'
-
-jobs:
- # Minimal (runs with and without testing data)
- job:
- name: 'minimal 3.8'
- runs-on: ubuntu-20.04
- defaults:
- run:
- shell: bash
- env:
- CONDA_DEPENDENCIES: 'numpy scipy matplotlib'
- DEPS: 'minimal'
- DISPLAY: ':99.0'
- MNE_DONTWRITE_HOME: true
- MNE_FORCE_SERIAL: true
- MNE_LOGGING_LEVEL: 'warning'
- MNE_SKIP_NETWORK_TEST: 1
- OPENBLAS_NUM_THREADS: '1'
- PYTHONUNBUFFERED: '1'
- PYTHON_VERSION: '3.8'
- steps:
- - uses: actions/checkout@v3
- with:
- fetch-depth: 0
- - run: ./tools/setup_xvfb.sh
- name: 'Setup xvfb'
- - uses: conda-incubator/setup-miniconda@v2
- with:
- python-version: ${{ env.PYTHON_VERSION }}
- miniforge-version: latest
- miniforge-variant: Mambaforge
- use-mamba: true
- - shell: bash -el {0}
- run: |
- ./tools/github_actions_dependencies.sh
- source tools/get_minimal_commands.sh
- name: 'Install dependencies'
- - shell: bash -el {0}
- run: ./tools/github_actions_install.sh
- name: 'Install MNE'
- - shell: bash -el {0}
- run: ./tools/github_actions_infos.sh
- name: 'Show infos'
- - shell: bash -el {0}
- run: ./tools/get_testing_version.sh
- name: 'Get testing version'
- - shell: bash -el {0}
- run: MNE_SKIP_TESTING_DATASET_TESTS=true pytest -m "not (ultraslowtest or pgtest)" --tb=short --cov=mne --cov-report xml -vv -rfE mne/
- name: Run tests with no testing data
- - uses: actions/cache@v3
- with:
- key: ${{ env.TESTING_VERSION }}
- path: ~/mne_data
- name: 'Cache testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_download.sh
- name: 'Download testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_locale.sh
- name: 'Print locale'
- - shell: bash -el {0}
- run: ./tools/github_actions_test.sh
- name: 'Run tests'
- - uses: codecov/codecov-action@v3
- if: success()
- name: 'Upload coverage to CodeCov'
diff --git a/.github/workflows/compat_old.yml b/.github/workflows/compat_old.yml
deleted file mode 100644
index 36e47774231..00000000000
--- a/.github/workflows/compat_old.yml
+++ /dev/null
@@ -1,68 +0,0 @@
-name: 'compat / old'
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
- cancel-in-progress: true
-on:
- push:
- branches:
- - '*'
- pull_request:
- branches:
- - '*'
-
-jobs:
- job:
- name: 'old 3.8'
- runs-on: ubuntu-20.04
- defaults:
- run:
- shell: bash
- env:
- CONDA_DEPENDENCIES: 'numpy=1.20.2 scipy=1.6.3 matplotlib=3.4 pandas=1.2.4 scikit-learn=0.24.2'
- DISPLAY: ':99.0'
- MNE_LOGGING_LEVEL: 'warning'
- OPENBLAS_NUM_THREADS: '1'
- PYTHONUNBUFFERED: '1'
- PYTHON_VERSION: '3.8'
- MNE_IGNORE_WARNINGS_IN_TESTS: 'true'
- steps:
- - uses: actions/checkout@v3
- - run: ./tools/setup_xvfb.sh
- name: 'Setup xvfb'
- - uses: conda-incubator/setup-miniconda@v2
- with:
- python-version: ${{ env.PYTHON_VERSION }}
- miniforge-version: latest
- miniforge-variant: Mambaforge
- use-mamba: true
- - shell: bash -el {0}
- run: |
- ./tools/github_actions_dependencies.sh
- source tools/get_minimal_commands.sh
- name: 'Install dependencies'
- - shell: bash -el {0}
- run: ./tools/github_actions_install.sh
- name: 'Install MNE'
- - shell: bash -el {0}
- run: ./tools/github_actions_infos.sh
- name: 'Show infos'
- - shell: bash -el {0}
- run: ./tools/get_testing_version.sh
- name: 'Get testing version'
- - uses: actions/cache@v3
- with:
- key: ${{ env.TESTING_VERSION }}
- path: ~/mne_data
- name: 'Cache testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_download.sh
- name: 'Download testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_locale.sh
- name: 'Print locale'
- - shell: bash -el {0}
- run: ./tools/github_actions_test.sh
- name: 'Run tests'
- - uses: codecov/codecov-action@v3
- if: success()
- name: 'Upload coverage to CodeCov'
diff --git a/.github/workflows/credit.yml b/.github/workflows/credit.yml
new file mode 100644
index 00000000000..3ceb98a185d
--- /dev/null
+++ b/.github/workflows/credit.yml
@@ -0,0 +1,45 @@
+name: Contributor credit
+
+on: # yamllint disable-line rule:truthy
+ # Scheduled actions only run on the main repo branch, which is exactly what we want
+ schedule:
+ - cron: '0 0 1 * *' # At 00:00 on day-of-month 1
+ workflow_dispatch:
+
+jobs:
+ update_credit:
+ permissions:
+ contents: write
+ pull-requests: write
+ name: Update
+ runs-on: ubuntu-latest
+ env:
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_TOKEN: ${{ github.token }}
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: true
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.12'
+ - run: pip install pygithub -e .
+ - run: python tools/dev/update_credit_json.py
+ - run: git add -f doc/sphinxext/prs/*.json
+ - run: |
+ git diff && git status --porcelain
+ if [[ $(git status --porcelain) ]]; then
+ echo "dirty=true" >> $GITHUB_OUTPUT
+ fi
+ id: status
+ - name: Create PR
+ run: |
+ set -xeo pipefail
+ git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ git checkout -b credit
+ git commit -am "MAINT: Update code credit"
+ git push origin credit
+ PR_NUM=$(gh pr create --base main --head credit --title "MAINT: Update code credit" --body "Created by credit [GitHub action](https://github.com/mne-tools/mne-python/actions/runs/${{ github.run_id }}).
*Adjustments may need to be made to \`doc/changes/credit_tools.py\` or \`.mailmap\` etc. to get CircleCI to pass.*" --label "no-changelog-entry-needed")
+ echo "Opened https://github.com/mne-tools/mne-python/pull/${PR_NUM}" >> $GITHUB_STEP_SUMMARY
+ if: steps.status.outputs.dirty == 'true'
diff --git a/.github/workflows/linux_conda.yml b/.github/workflows/linux_conda.yml
deleted file mode 100644
index 9822254ecee..00000000000
--- a/.github/workflows/linux_conda.yml
+++ /dev/null
@@ -1,125 +0,0 @@
-name: 'linux / conda'
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
- cancel-in-progress: true
-on:
- push:
- branches:
- - '*'
- pull_request:
- branches:
- - '*'
-
-jobs:
- py310:
- runs-on: ubuntu-20.04
- name: 'linux conda 3.10'
- defaults:
- run:
- shell: bash
- env:
- CONDA_ENV: 'environment.yml'
- DISPLAY: ':99.0'
- MNE_LOGGING_LEVEL: 'warning'
- MKL_NUM_THREADS: '1'
- PYTHONUNBUFFERED: '1'
- PYTHON_VERSION: '3.10'
- steps:
- - uses: actions/checkout@v3
- - run: ./tools/setup_xvfb.sh
- name: 'Setup xvfb'
- - uses: conda-incubator/setup-miniconda@v2
- with:
- activate-environment: 'mne'
- python-version: ${{ env.PYTHON_VERSION }}
- environment-file: ${{ env.CONDA_ENV }}
- # No mamba for this one job (use conda itself!)
- - shell: bash -el {0}
- run: |
- ./tools/github_actions_dependencies.sh
- source tools/get_minimal_commands.sh
- name: 'Install dependencies'
- - shell: bash -el {0}
- run: mne_surf2bem --version
- name: 'Check minimal commands'
- - shell: bash -el {0}
- run: ./tools/github_actions_install.sh
- name: 'Install MNE'
- - shell: bash -el {0}
- run: |
- QT_QPA_PLATFORM=xcb LIBGL_DEBUG=verbose LD_DEBUG=libs python -c "import pyvistaqt; pyvistaqt.BackgroundPlotter(show=True)"
- name: 'Check Qt GL'
- - shell: bash -el {0}
- run: ./tools/github_actions_infos.sh
- name: 'Show infos'
- - shell: bash -el {0}
- run: ./tools/get_testing_version.sh
- name: 'Get testing version'
- - uses: actions/cache@v3
- with:
- key: ${{ env.TESTING_VERSION }}
- path: ~/mne_data
- name: 'Cache testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_download.sh
- name: 'Download testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_locale.sh
- name: 'Print locale'
- - shell: bash -el {0}
- run: ./tools/github_actions_test.sh
- name: 'Run tests'
- - uses: codecov/codecov-action@v3
- if: success()
- name: 'Upload coverage to CodeCov'
-
- notebook:
- timeout-minutes: 90
- runs-on: ubuntu-20.04
- defaults:
- run:
- shell: bash
- env:
- CONDA_ENV: 'environment.yml'
- PYTHON_VERSION: '3.10'
- steps:
- - uses: actions/checkout@v3
- - uses: conda-incubator/setup-miniconda@v2
- with:
- activate-environment: 'mne'
- python-version: ${{ env.PYTHON_VERSION }}
- environment-file: ${{ env.CONDA_ENV }}
- miniforge-version: latest
- miniforge-variant: Mambaforge
- use-mamba: true
- - shell: bash -el {0}
- run: |
- # TODO: As of 2023/02/28, notebook tests need a pinned mesalib
- mamba install -c conda-forge "vtk>=9.2=*osmesa*" "mesalib=21.2.5"
- mamba list
- name: 'Install OSMesa VTK variant'
- - shell: bash -el {0}
- run: |
- pip uninstall -yq mne
- pip install --progress-bar off -ve .[test]
- name: 'Install dependencies'
- - shell: bash -el {0}
- run: ./tools/github_actions_infos.sh
- name: 'Show infos'
- - shell: bash -el {0}
- run: ./tools/get_testing_version.sh
- name: 'Get testing version'
- - uses: actions/cache@v3
- with:
- key: ${{ env.TESTING_VERSION }}
- path: ~/mne_data
- name: 'Cache testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_download.sh
- name: 'Download testing data'
- - shell: bash -el {0}
- run: pytest --tb=short -m "not pgtest" --cov=mne --cov-report=xml --cov-report=html -vv mne/viz
- name: 'Run viz tests'
- - uses: codecov/codecov-action@v3
- if: success()
- name: 'Upload coverage to CodeCov'
diff --git a/.github/workflows/linux_pip.yml b/.github/workflows/linux_pip.yml
deleted file mode 100644
index fff47e31508..00000000000
--- a/.github/workflows/linux_pip.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-name: 'linux / pip-pre'
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
- cancel-in-progress: true
-on:
- push:
- branches:
- - '*'
- pull_request:
- branches:
- - '*'
-
-permissions:
- contents: read
-
-jobs:
- # PIP-pre + non-default stim channel + log level info
- job:
- name: 'linux pip 3.10'
- runs-on: ubuntu-20.04
- defaults:
- run:
- shell: bash
- env:
- DISPLAY: ':99.0'
- MNE_LOGGING_LEVEL: 'info'
- MNE_STIM_CHANNEL: 'STI101'
- OPENBLAS_NUM_THREADS: '1'
- PYTHONUNBUFFERED: '1'
- PYTHON_VERSION: '3.10'
- steps:
- - uses: actions/checkout@v3
- - run: ./tools/setup_xvfb.sh
- name: 'Setup xvfb'
- - uses: actions/setup-python@v4
- with:
- python-version: ${{ env.PYTHON_VERSION }}
- name: 'Setup python'
- - shell: bash -el {0}
- run: |
- ./tools/github_actions_dependencies.sh
- source tools/get_minimal_commands.sh
- name: 'Install dependencies'
- - shell: bash -el {0}
- run: mne_surf2bem --version
- name: 'Check minimal commands'
- - shell: bash -el {0}
- run: ./tools/github_actions_install.sh
- name: 'Install MNE'
- - shell: bash -el {0}
- run: ./tools/github_actions_infos.sh
- name: 'Show infos'
- - shell: bash -el {0}
- run: |
- ./tools/check_qt_import.sh PyQt6
- python -c "import matplotlib; matplotlib.use('QtAgg'); import matplotlib.pyplot as plt; plt.figure()"
- - shell: bash -el {0}
- run: ./tools/get_testing_version.sh
- name: 'Get testing version'
- - uses: actions/cache@v3
- with:
- key: ${{ env.TESTING_VERSION }}
- path: ~/mne_data
- name: 'Cache testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_download.sh
- name: 'Download testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_locale.sh
- name: 'Print locale'
- - shell: bash -el {0}
- run: ./tools/github_actions_test.sh
- name: 'Run tests'
- - uses: codecov/codecov-action@v3
- if: success()
- name: 'Upload coverage to CodeCov'
diff --git a/.github/workflows/macos_conda.yml b/.github/workflows/macos_conda.yml
deleted file mode 100644
index 3befcc0b32b..00000000000
--- a/.github/workflows/macos_conda.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-name: 'macos / conda'
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
- cancel-in-progress: true
-on:
- push:
- branches:
- - '*'
- pull_request:
- branches:
- - '*'
-
-jobs:
- job:
- name: 'macos 3.8'
- runs-on: macos-latest
- defaults:
- run:
- shell: bash
- env:
- PYTHON_VERSION: '3.8'
- MNE_LOGGING_LEVEL: 'warning'
- MNE_3D_OPTION_SMOOTH_SHADING: 'true'
- OPENBLAS_NUM_THREADS: '1'
- PYTHONUNBUFFERED: '1'
- CONDA_ENV: 'environment.yml'
- CI_OS_NAME: 'osx'
- steps:
- - uses: actions/checkout@v3
- - uses: conda-incubator/setup-miniconda@v2
- with:
- activate-environment: 'mne'
- python-version: ${{ env.PYTHON_VERSION }}
- environment-file: ${{ env.CONDA_ENV }}
- miniforge-version: latest
- miniforge-variant: Mambaforge
- use-mamba: true
- - shell: bash -el {0}
- run: |
- ./tools/github_actions_dependencies.sh
- name: 'Install dependencies'
- # https://github.com/mne-tools/mne-python/issues/10805
- # https://github.com/mne-tools/mne-python/runs/7042965701?check_suite_focus=true
- #- shell: bash -el {0}
- # run: |
- # source tools/get_minimal_commands.sh
- # name: 'Install minimal commands'
- #- shell: bash -el {0}
- # run: mne_surf2bem --version
- # name: 'Check minimal commands'
- - shell: bash -el {0}
- run: ./tools/github_actions_install.sh
- name: 'Install MNE'
- - shell: bash -el {0}
- run: ./tools/github_actions_infos.sh
- name: 'Show infos'
- - shell: bash -el {0}
- run: ./tools/get_testing_version.sh
- name: 'Get testing version'
- - uses: actions/cache@v3
- with:
- key: ${{ env.TESTING_VERSION }}
- path: ~/mne_data
- name: 'Cache testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_download.sh
- name: 'Download testing data'
- - shell: bash -el {0}
- run: ./tools/github_actions_locale.sh
- name: 'Print locale'
- - shell: bash -el {0}
- run: ./tools/github_actions_test.sh
- name: 'Run tests'
- - uses: codecov/codecov-action@v3
- if: success()
- name: 'Upload coverage to CodeCov'
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000000..85e8ea7ebc7
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,51 @@
+# Upload a Python Package using Twine when a release is created
+
+name: Build
+on: # yamllint disable-line rule:truthy
+ release:
+ types: [published]
+ push:
+ branches: ["main", "maint/*"]
+ pull_request:
+ branches: ["main", "maint/*"]
+
+permissions:
+ contents: read
+
+jobs:
+ package:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.10'
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install build twine
+ - run: python -m build --sdist --wheel
+ - run: twine check --strict dist/*
+ - uses: actions/upload-artifact@v5
+ with:
+ name: dist
+ path: dist
+
+ pypi-upload:
+ needs: package
+ runs-on: ubuntu-latest
+ if: github.event_name == 'release'
+ permissions:
+ id-token: write # for trusted publishing
+ environment:
+ name: pypi
+ url: https://pypi.org/p/mne
+ steps:
+ - uses: actions/download-artifact@v6
+ with:
+ name: dist
+ path: dist
+ - uses: pypa/gh-action-pypi-publish@release/v1
+ if: github.event_name == 'release'
diff --git a/.github/workflows/spec_zero.yml b/.github/workflows/spec_zero.yml
new file mode 100644
index 00000000000..6a45f2876b6
--- /dev/null
+++ b/.github/workflows/spec_zero.yml
@@ -0,0 +1,62 @@
+name: SPEC0
+
+on: # yamllint disable-line rule:truthy
+ schedule:
+ - cron: '0 0 * * 1' # At 00:00 every Monday
+ workflow_dispatch:
+ inputs:
+ ssh:
+ description: 'Enable ssh debugging'
+ required: false
+ default: false
+ type: boolean
+
+jobs:
+ update_versions:
+ permissions:
+ contents: write
+ pull-requests: write
+ name: Update dependency versions
+ runs-on: ubuntu-latest
+ env:
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_TOKEN: ${{ github.token }}
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: true
+ - name: Triage SSH
+ run: |
+ if [[ "${{ inputs.ssh }}" == "true" ]] || [[ "$COMMIT_MESSAGE" == *"[actions ssh]"* ]]; then
+ echo "ENABLE_SSH=true" | tee -a $GITHUB_ENV
+ else
+ echo "ENABLE_SSH=false" | tee -a $GITHUB_ENV
+ fi
+ - name: Setup Remote SSH Connection
+ if: env.ENABLE_SSH == 'true'
+ uses: mxschmitt/action-tmate@v3
+ timeout-minutes: 10
+ with:
+ detached: true
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.12'
+ - run: pip install packaging requests tomlkit
+ - run: python tools/dev/spec_zero_update_versions.py
+ - run: |
+ git diff && git status --porcelain
+ if [[ $(git status --porcelain) ]]; then
+ echo "dirty=true" >> $GITHUB_OUTPUT
+ fi
+ id: status
+ - name: Create PR
+ run: |
+ set -xeo pipefail
+ git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ git checkout -b spec_zero
+ git commit -am "MAINT: Update dependency specifiers"
+ git push origin spec_zero
+ PR_NUM=$(gh pr create --base main --head spec_zero --title "MAINT: Update dependency specifiers" --body "Created by spec_zero [GitHub action](https://github.com/mne-tools/mne-python/actions/runs/${{ github.run_id }}).
*Adjustments may need to be made to shims in \`mne/fixes.py\` in this or another PR. \`git grep TODO VERSION\` is a good starting point for finding potential updates.*" --label "no-changelog-entry-needed")
+ echo "Opened https://github.com/mne-tools/mne-python/pull/${PR_NUM}" >> $GITHUB_STEP_SUMMARY
+ if: steps.status.outputs.dirty == 'true'
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
new file mode 100644
index 00000000000..507bf4ebfb1
--- /dev/null
+++ b/.github/workflows/tests.yml
@@ -0,0 +1,177 @@
+name: 'Tests'
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }}
+ cancel-in-progress: true
+on: # yamllint disable-line rule:truthy
+ push:
+ branches: ["main", "maint/*"]
+ pull_request:
+ branches: ["main", "maint/*"]
+ # adapted from spyder-ide/spyder
+ workflow_dispatch:
+ inputs:
+ ssh:
+ description: 'Enable ssh debugging'
+ required: false
+ default: false
+ type: boolean
+
+permissions:
+ contents: read
+
+jobs:
+ style:
+ name: Style
+ runs-on: ubuntu-latest
+ timeout-minutes: 3
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.13'
+ - uses: pre-commit/action@v3.0.1
+ - run: pip install mypy numpy scipy vulture
+ - run: mypy
+ - run: vulture
+
+ bandit:
+ name: Bandit
+ needs: style
+ runs-on: ubuntu-latest
+ steps:
+ - uses: davidslusser/actions_python_bandit@v1.0.1
+ with:
+ src: "mne"
+ options: "-c pyproject.toml -ll -r"
+ pip_install_command: "pip install bandit[toml]"
+
+ pytest:
+ name: '${{ matrix.os }} / ${{ matrix.kind }} / ${{ matrix.python }}'
+ needs: style
+ timeout-minutes: 120
+ runs-on: ${{ matrix.os }}
+ defaults:
+ run:
+ shell: bash -el {0}
+ env:
+ PYTHON_VERSION: '${{ matrix.python }}'
+ MKL_NUM_THREADS: '1'
+ OPENBLAS_NUM_THREADS: '1'
+ OMP_NUM_THREADS: '1'
+ PYTHONUNBUFFERED: '1'
+ MNE_CI_KIND: '${{ matrix.kind }}'
+ CI_OS_NAME: '${{ matrix.os }}'
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - os: ubuntu-latest
+ python: '3.13'
+ kind: pip
+ - os: ubuntu-latest
+ python: '3.13'
+ kind: pip-pre
+ - os: ubuntu-latest
+ python: '3.13'
+ kind: conda
+ - os: macos-latest # arm64 (Apple Silicon): Sequoia
+ python: '3.13'
+ kind: mamba
+ - os: macos-15-intel # intel: Sequoia
+ python: '3.13'
+ kind: mamba
+ - os: windows-latest
+ python: '3.11'
+ kind: mamba
+ - os: ubuntu-latest
+ python: '3.12'
+ kind: minimal
+ - os: ubuntu-22.04
+ python: '3.10'
+ kind: old
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ fetch-depth: 0
+ persist-credentials: false
+ - name: Get commit message
+ run: echo "COMMIT_MESSAGE=$(git show -s --format=%s ${{ github.event.pull_request.head.sha || github.sha }})" | tee -a ${GITHUB_ENV}
+ - name: Triage SSH
+ run: |
+ if [[ "${{ inputs.ssh }}" == "true" ]] || [[ "$COMMIT_MESSAGE" == *"[actions ssh]"* ]]; then
+ echo "ENABLE_SSH=true" | tee -a $GITHUB_ENV
+ else
+ echo "ENABLE_SSH=false" | tee -a $GITHUB_ENV
+ fi
+ - name: Setup Remote SSH Connection
+ if: env.ENABLE_SSH == 'true'
+ uses: mxschmitt/action-tmate@v3
+ timeout-minutes: 80
+ with:
+ detached: true
+ - run: ./tools/github_actions_env_vars.sh
+ # Xvfb/OpenGL
+ - uses: pyvista/setup-headless-display-action@v4
+ with:
+ qt: true
+ pyvista: false
+ wm: false
+ # Python (if pip)
+ - uses: actions/setup-python@v6
+ with:
+ python-version: ${{ matrix.python }}
+ if: startswith(matrix.kind, 'pip')
+ # Python (if conda)
+ - name: Fixes for conda
+ run: |
+ # For some reason on Linux we get crashes
+ if [[ "$RUNNER_OS" == "Linux" ]]; then
+ sed -i "/numba/d" environment.yml
+ fi
+ # And on Windows and macOS PySide6.9.0 segfaults
+ if [[ "$RUNNER_OS" == "macOS" ]]; then
+ sed -i "" "s/ - PySide6 .*/ - PySide6 =6.9.2/g" environment.yml
+ sed -i "" "s/ - vtk .*/ - vtk =9.5.1/g" environment.yml
+
+ else
+ sed -i "s/ - PySide6 .*/ - PySide6 =6.9.2/g" environment.yml
+ sed -i "s/ - vtk .*/ - vtk =9.5.1/g" environment.yml
+ if [[ "$RUNNER_OS" == "Windows" ]]; then
+ echo "MNE_IS_OSMESA=true" | tee -a $GITHUB_ENV
+ fi
+ fi
+ if: matrix.kind == 'conda' || matrix.kind == 'mamba'
+ - uses: mamba-org/setup-micromamba@v2
+ with:
+ environment-file: ${{ env.CONDA_ENV }}
+ environment-name: mne
+ log-level: ${{ runner.debug == '1' && 'debug' || 'info' }}
+ create-args: >-
+ python=${{ env.PYTHON_VERSION }}
+ -v
+ if: ${{ !startswith(matrix.kind, 'pip') }}
+ timeout-minutes: 20
+ - run: bash ./tools/github_actions_dependencies.sh
+ # Minimal commands on Linux (macOS stalls)
+ - run: bash ./tools/get_minimal_commands.sh
+ if: startswith(matrix.os, 'ubuntu') && matrix.kind != 'minimal' && matrix.kind != 'old'
+ - run: bash ./tools/github_actions_infos.sh
+ # Check Qt
+ - run: bash ./tools/check_qt_import.sh $MNE_QT_BACKEND
+ if: env.MNE_QT_BACKEND != ''
+ - name: Run tests with no testing data
+ run: MNE_SKIP_TESTING_DATASET_TESTS=true pytest -m "not (ultraslowtest or pgtest)" --tb=short --cov=mne --cov-report xml -vv -rfE mne/
+ if: matrix.kind == 'minimal'
+ - run: ./tools/get_testing_version.sh
+ - uses: actions/cache@v4
+ with:
+ key: ${{ env.TESTING_VERSION }}
+ path: ~/mne_data
+ - run: bash ./tools/github_actions_download.sh
+ - run: bash ./tools/github_actions_test.sh # for some reason on macOS we need to run "bash X" in order for a failed test run to show up
+ - uses: codecov/codecov-action@v5
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ if: success() || failure()
diff --git a/.github/zizmor.yml b/.github/zizmor.yml
new file mode 100644
index 00000000000..00ea2bb64ba
--- /dev/null
+++ b/.github/zizmor.yml
@@ -0,0 +1,5 @@
+rules:
+ unpinned-uses:
+ config:
+ policies:
+ "*": ref-pin
diff --git a/.gitignore b/.gitignore
index 40c64c7bb65..d66fbef96de 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,6 +20,11 @@ junit-results.xml
*.tmproj
*.png
*.dat
+# make sure we ship data files
+!mne/data/**/*.dat
+!mne/data/**/*.fif
+!mne/data/**/*.fif.gz
+!mne/icons/**/*.png
.DS_Store
events.eve
foo-lh.label
@@ -27,7 +32,6 @@ foo.lout
bar.lout
foobar.lout
epochs_data.mat
-memmap*.dat
tmp-*.w
tmtags
auto_examples
@@ -41,7 +45,6 @@ MNE-brainstorm-data*
physionet-sleep-data*
MEGSIM*
build
-mne/_version.py
coverage
htmlcov
.cache/
@@ -63,13 +66,17 @@ tutorials/misc/report.h5
tutorials/io/fnirs.csv
pip-log.txt
.coverage*
+!.coveragerc
coverage.xml
tags
-doc/coverages
-doc/samples
-doc/*.dat
-doc/fil-result
-doc/optipng.exe
+/doc/coverages
+/doc/samples
+/doc/fil-result
+/doc/optipng.exe
+/doc/sphinxext/.joblib
+sg_execution_times.rst
+sg_api_usage.rst
+sg_api_unused.dot
cover
*.html
@@ -90,7 +97,8 @@ cover
.venv/
venv/
-*.json
+/*.json
+!codemeta.json
.hypothesis/
-
+.ruff_cache/
.ipynb_checkpoints/
diff --git a/.lgtm.yml b/.lgtm.yml
deleted file mode 100644
index 4a43aa25c57..00000000000
--- a/.lgtm.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-extraction:
- javascript:
- index:
- filters:
- - exclude: "**/*.js"
-queries:
- - exclude: py/missing-equals
- - exclude: py/import-and-import-from
diff --git a/.mailmap b/.mailmap
index 0655885fc35..f76582bc657 100644
--- a/.mailmap
+++ b/.mailmap
@@ -2,8 +2,9 @@ Adam Li Adam Li
Adam Li Adam Li
Alan Leggitt leggitta
Alessandro Tonin Lychfindel <58313635+Lychfindel@users.noreply.github.com>
-Alex Rockhill Alex
+Alex Lepauvre Alex lepauvre
Alex Rockhill Alex
+Alex Rockhill Alex
Alex Rockhill Alex Rockhill
Alex Rockhill Alex Rockhill
Alexander Rudiuk Alexander Rudiuk
@@ -14,16 +15,24 @@ Alexandre Gramfort Alexandre Gramfort Alexandre Gramfort
Alexandre Gramfort Alexandre Gramfort
Ana Radanovic anaradanovic <79697247+anaradanovic@users.noreply.github.com>
+Andres Rodriguez
Andrew Dykstra
Andrew Quinn AJQuinn
-Anna Padee <44297909+apadee@users.noreply.github.com> apadee <44297909+apadee@users.noreply.github.com>
+Andy Gilbert <7andy121@gmail.com> Andrew Gilbert
+Andy Gilbert <7andy121@gmail.com> Andrew Gilbert
+Anna Padee apadee <44297909+apadee@users.noreply.github.com>
Anne-Sophie Dubarry annesodub
Archit Singhal <43236121+architsinghal-mriirs@users.noreply.github.com> archit singhal
-Ashley Drew <33734402+ashdrew@users.noreply.github.com> ashdrew <33734402+ashdrew@users.noreply.github.com>
+Arne Pelzer aplzr <7202498+aplzr@users.noreply.github.com>
+Arne Pelzer pzr
+Ashley Drew ashdrew <33734402+ashdrew@users.noreply.github.com>
Asish Panda kaichogami
Basile Pinsard
Brad Buran Brad Buran
Britta Westner britta-wstnr
+Bruno Aristimunha
+btkcodedev
+buildqa
Burkhard Maess Burkhard Maess
Carina Forster Carina
Carlos de la Torre carlos
@@ -40,15 +49,18 @@ Christina Zhao ChristinaZhao
Christoph Dinh Christoph Dinh
Christopher J. Bailey Chris Bailey
Claire Braboszcz claire-braboszcz
+Clemens Brunner
Clément Moutard
-Cora Kim <41998428+kimcoco@users.noreply.github.com> kimcoco <41998428+kimcoco@users.noreply.github.com>
+Cora Kim kimcoco <41998428+kimcoco@users.noreply.github.com>
Cristóbal Moënne-Loccoz Cristóbal
Dan G. Wakeman Daniel G. Wakeman
Dan G. Wakeman Daniel Wakeman
Dan G. Wakeman dgwakeman
Dan G. Wakeman dgwakeman
-Daniel Carlström Schad Daniel C Schad
+Daniel C Schad Daniel C Schad
+Daniel C Schad Daniel Carlström Schad
Daniel McCloy Daniel McCloy
+Daniel McCloy Daniel McCloy
Daniel McCloy drammock
Daniel Strohmeier Daniel Strohmeier
Daniel Strohmeier joewalter
@@ -66,6 +78,8 @@ Denis A. Engemann Denis Engemann dmalt
Dominik Krzemiński dokato
Dominik Welke dominikwelke <33089761+dominikwelke@users.noreply.github.com>
+Dominik Welke dominikwelke
+Dominik Wetzel Dominik Wetzel
Eberhard Eich ebeich
Eduard Ort Eduard Ort
Eduard Ort eort
@@ -73,7 +87,8 @@ Eduard Ort examplename
Ellen Lau ellenlau
Emily Stephen Emily P. Stephen
Emily Stephen emilyps14
-Enrico Varano <69973551+enricovara@users.noreply.github.com> enricovara <69973551+enricovara@users.noreply.github.com>
+Emma Bailey <93327939+emma-bailey@users.noreply.github.com> emma-bailey <93327939+emma-bailey@users.noreply.github.com>
+Enrico Varano enricovara <69973551+enricovara@users.noreply.github.com>
Enzo Altamiranda enzo
Eric Larson Eric Larson
Eric Larson Eric Larson
@@ -89,7 +104,7 @@ Erkka Heinila Teekuningas
Etienne de Montalivet
Evgenii Kalenkovich kalenkovich
Evgeny Goldstein <84768107+evgenygoldstein@users.noreply.github.com> evgenygoldstein <84768107+evgenygoldstein@users.noreply.github.com>
-Ezequiel Mikulan <39155887+ezemikulan@users.noreply.github.com> ezemikulan <39155887+ezemikulan@users.noreply.github.com>
+Ezequiel Mikulan ezemikulan <39155887+ezemikulan@users.noreply.github.com>
Fahimeh Mamashli <33672431+fmamashli@users.noreply.github.com> fmamashli <33672431+fmamashli@users.noreply.github.com>
Fede Raimondo Fede
Fede Raimondo Fede Raimondo
@@ -100,32 +115,44 @@ Fede Raimondo Federico Raimondo
Federico Zamberlan <44038765+fzamberlan@users.noreply.github.com>
Felix Klotzsche eioe
Felix Klotzsche eioe
-Félix Raimundo Felix Raimundo
Frederik D. Weber Frederik-D-Weber
Fu-Te Wong foucault
Fu-Te Wong zuxfoucault
+Félix Raimundo Felix Raimundo
Gansheng Tan <49130176+GanshengT@users.noreply.github.com> Gansheng TAN <49130176+GanshengT@users.noreply.github.com>
+Gennadiy Belonosov <7503709+Genuster@users.noreply.github.com> Gennadiy <7503709+Genuster@users.noreply.github.com>
+Gennadiy Belonosov <7503709+Genuster@users.noreply.github.com> Genuster <7503709+Genuster@users.noreply.github.com>
Giorgio Marinato neurogima <76406896+neurogima@users.noreply.github.com>
+Giulio Gabrieli
Guillaume Dumas deep-introspection
Guillaume Dumas Guillaume Dumas
+Hakimeh Aslsardroud
Hamid Maymandi <46011104+HamidMandi@users.noreply.github.com> Hamid <46011104+HamidMandi@users.noreply.github.com>
+Hasrat Ali Arzoo hasrat17 <56307533+hasrat17@users.noreply.github.com>
+Hongjiang Ye YE Hongjiang
Hongjiang Ye YE Hongjiang
Hubert Banville hubertjb
+Hyonyoung Shin <55095699+mcvain@users.noreply.github.com> mcvain <55095699+mcvain@users.noreply.github.com>
Hüseyin Orkun Elmas Hüseyin
Ingoo Lee dlsrnsi
+Ivo de Jong ivopascal
Jaakko Leppakangas Jaakko Leppakangas
Jaakko Leppakangas jaeilepp
Jaakko Leppakangas jaeilepp
+Jacob Phelan
Jair Montoya jmontoyam
+Jan Ebert janEbert
+Jan Sedivy
Jan Sosulski jsosulski
Jean-Baptiste Schiratti Jean-Baptiste SCHIRATTI
-Jean-Remi King Jean-Rémi KING
-Jean-Remi King kingjr
-Jean-Remi King kingjr
-Jean-Remi King kingjr
-Jean-Remi King UMR9752
-Jean-Remi King UMR9752
+Jean-Rémi King Jean-Rémi KING
+Jean-Rémi King kingjr
+Jean-Rémi King kingjr
+Jean-Rémi King kingjr
+Jean-Rémi King UMR9752
+Jean-Rémi King UMR9752
Jeff Stout jstout211
+Jennifer Behnke
Jesper Duemose Nielsen jdue
Jevri Hanna Jeff Hanna
Jevri Hanna Jevri Hanna
@@ -142,30 +169,36 @@ Jona Sassenhagen jona-sassenhagen jona-sassenhagen@
Jona Sassenhagen jona.sassenhagen@gmail.com
Jona Sassenhagen sassenha
+Jonathan Kuziek
Jordan Drew <39603454+jadrew43@users.noreply.github.com> jadrew43 <39603454+jadrew43@users.noreply.github.com>
Joris Van den Bossche Joris Van den Bossche
+Joshua Calder-Travis <38797399+jCalderTravis@users.noreply.github.com> jCalderTravis <38797399+jCalderTravis@users.noreply.github.com>
+Joshua J Bear
+Joshua Teves Joshua Teves
José C. García Alanis Jose Alanis
José C. García Alanis Jose C. G. Alanis <12409129+JoseAlanis@users.noreply.github.com>
José C. García Alanis José C. G. Alanis <12409129+JoseAlanis@users.noreply.github.com>
José C. García Alanis José C. García Alanis <12409129+JoseAlanis@users.noreply.github.com>
-Joshua J Bear
-Joshua Teves Joshua Teves
Julius Welzel <52565341+JuliusWelzel@users.noreply.github.com> jwelzel <52565341+JuliusWelzel@users.noreply.github.com>
+Justus Schwabedal
Kaisu Lankinen <41806798+klankinen@users.noreply.github.com> klankinen <41806798+klankinen@users.noreply.github.com>
Kambiz Tabavi Kambiz Tavabi
Kambiz Tabavi kambysese
Katarina Slama katarinaslama
+Katia Al-Amir <129207373+katia-sentry@users.noreply.github.com> Katia <129207373+katia-sentry@users.noreply.github.com>
Kostiantyn Maksymenko kostiantyn maksymenko
Kostiantyn Maksymenko Maksymenko Kostiantyn
Kostiantyn Maksymenko Maksymenko Kostiantyn
Laetitia Grabot