diff --git a/.cruft.json b/.cruft.json index b648b4a..af57888 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,7 +1,7 @@ { "template": "https://github.com/scverse/cookiecutter-scverse", - "commit": "94ef9fb6f9ad8cfe65a3d9575679c03c80c49cd1", - "checkout": "v0.5.0", + "commit": "6ff5b92b5d44ea6d8a88e47538475718d467db95", + "checkout": "v0.7.0", "context": { "cookiecutter": { "project_name": "FlowSOM", @@ -36,7 +36,7 @@ "trim_blocks": true }, "_template": "https://github.com/scverse/cookiecutter-scverse", - "_commit": "94ef9fb6f9ad8cfe65a3d9575679c03c80c49cd1" + "_commit": "6ff5b92b5d44ea6d8a88e47538475718d467db95" } }, "directory": null diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 3ca1ccb..6104b9e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -1,6 +1,6 @@ name: Bug report description: Report something that is broken or incorrect -labels: bug +type: Bug body: - type: markdown attributes: @@ -9,8 +9,7 @@ body: detailing how to provide the necessary information for us to reproduce your bug. In brief: * Please provide exact steps how to reproduce the bug in a clean Python environment. * In case it's not clear what's causing this bug, please provide the data or the data generation procedure. - * Sometimes it is not possible to share the data, but usually it is possible to replicate problems on publicly - available datasets or to share a subset of your data. + * Replicate problems on public datasets or share data subsets when full sharing isn't possible. - type: textarea id: report diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index c1048c6..3ba5aea 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -1,6 +1,6 @@ name: Feature request description: Propose a new feature for FlowSOM -labels: enhancement +type: Enhancement body: - type: textarea id: description diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 83e01a1..c6ecc2f 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -10,23 +10,16 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -defaults: - run: - # to fail on error in multiline statements (-e), in pipes (-o pipefail), and on unset variables (-u). - shell: bash -euo pipefail {0} - jobs: package: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: filter: blob:none fetch-depth: 0 - name: Install uv - uses: astral-sh/setup-uv@v5 - with: - cache-dependency-glob: pyproject.toml + uses: astral-sh/setup-uv@v7 - name: Build package run: uv build - name: Check package diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 0c1ebfa..59a43d7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -4,11 +4,6 @@ on: release: types: [published] -defaults: - run: - # to fail on error in multiline statements (-e), in pipes (-o pipefail), and on unset variables (-u). - shell: bash -euo pipefail {0} - # Use "trusted publishing", see https://docs.pypi.org/trusted-publishers/ jobs: release: @@ -20,14 +15,12 @@ jobs: permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: filter: blob:none fetch-depth: 0 - name: Install uv - uses: astral-sh/setup-uv@v5 - with: - cache-dependency-glob: pyproject.toml + uses: astral-sh/setup-uv@v7 - name: Build package run: uv build - name: Publish package distributions to PyPI diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index d5cfb2a..6bf473b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -12,48 +12,92 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -defaults: - run: - # to fail on error in multiline statements (-e), in pipes (-o pipefail), and on unset variables (-u). - shell: bash -euo pipefail {0} - jobs: + # Get the test environment from hatch as defined in pyproject.toml. + # This ensures that the pyproject.toml is the single point of truth for test definitions and the same tests are + # run locally and on continuous integration. + # Check [[tool.hatch.envs.hatch-test.matrix]] in pyproject.toml and https://hatch.pypa.io/latest/environment/ for + # more details. + get-environments: + runs-on: ubuntu-latest + outputs: + envs: ${{ steps.get-envs.outputs.envs }} + steps: + - uses: actions/checkout@v5 + with: + filter: blob:none + fetch-depth: 0 + - name: Install uv + uses: astral-sh/setup-uv@v7 + - name: Get test environments + id: get-envs + run: | + ENVS_JSON=$(uvx hatch env show --json | jq -c 'to_entries + | map( + select(.key | startswith("hatch-test")) + | { + name: .key, + label: (if (.key | contains("pre")) then .key + " (PRE-RELEASE DEPENDENCIES)" else .key end), + python: .value.python + } + )') + echo "envs=${ENVS_JSON}" | tee $GITHUB_OUTPUT + + # Run tests through hatch. Spawns a separate runner for each environment defined in the hatch matrix obtained above. test: - runs-on: ${{ matrix.os }} + needs: get-environments + permissions: + id-token: write # for codecov OIDC strategy: fail-fast: false matrix: - include: - - os: ubuntu-latest - python: "3.10" - - os: ubuntu-latest - python: "3.12" - - os: ubuntu-latest - python: "3.12" - pip-flags: "--pre" - name: PRE-RELEASE DEPENDENCIES - - name: ${{ matrix.name }} Python ${{ matrix.python }} - - env: - OS: ${{ matrix.os }} - PYTHON: ${{ matrix.python }} + os: [ubuntu-latest] + env: ${{ fromJSON(needs.get-environments.outputs.envs) }} + + name: ${{ matrix.env.label }} + runs-on: ${{ matrix.os }} + continue-on-error: ${{ contains(matrix.env.name, 'pre') }} # make "all-green" pass even if pre-release job fails steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: filter: blob:none fetch-depth: 0 - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v7 with: - cache-dependency-glob: pyproject.toml + python-version: ${{ matrix.env.python }} + - name: create hatch environment + run: uvx hatch env create ${{ matrix.env.name }} - name: run tests using hatch env: MPLBACKEND: agg PLATFORM: ${{ matrix.os }} DISPLAY: :42 - run: uvx hatch test --cover --python ${{ matrix.python }} + run: uvx hatch run ${{ matrix.env.name }}:run-cov -v --color=yes -n auto + - name: generate coverage report + run: | + # See https://coverage.readthedocs.io/en/latest/config.html#run-patch + test -f .coverage || uvx hatch run ${{ matrix.env.name }}:cov-combine + uvx hatch run ${{ matrix.env.name }}:cov-report # report visibly + uvx hatch run ${{ matrix.env.name }}:coverage xml # create report for upload - name: Upload coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 + with: + fail_ci_if_error: true + use_oidc: true + + # Check that all tests defined above pass. This makes it easy to set a single "required" test in branch + # protection instead of having to update it frequently. See https://github.com/re-actors/alls-green#why. + check: + name: Tests pass in all hatch environments + if: always() + needs: + - get-environments + - test + runs-on: ubuntu-latest + steps: + - uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.gitignore b/.gitignore index 31e10b3..bd24e4e 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ __pycache__/ # Tests and coverage /data/ /node_modules/ +/.coverage* # docs /docs/generated/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0fcce11..27d8a95 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,24 +7,24 @@ default_stages: minimum_pre_commit_version: 2.16.0 repos: - repo: https://github.com/biomejs/pre-commit - rev: v1.9.4 + rev: v2.3.10 hooks: - id: biome-format exclude: ^\.cruft\.json$ # inconsistent indentation with cruft - file never to be modified manually. - repo: https://github.com/tox-dev/pyproject-fmt - rev: v2.5.1 + rev: v2.11.1 hooks: - id: pyproject-fmt - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.2 + rev: v0.14.10 hooks: - - id: ruff + - id: ruff-check types_or: [python, pyi, jupyter] args: [--fix, --exit-non-zero-on-fix] - id: ruff-format types_or: [python, pyi, jupyter] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: detect-private-key - id: check-ast @@ -36,12 +36,3 @@ repos: # Check that there are no merge conflicts (could be generated by template sync) - id: check-merge-conflict args: [--assume-in-merge] - - repo: local - hooks: - - id: forbid-to-commit - name: Don't commit rej files - entry: | - Cannot commit .rej files. These indicate merge conflicts that arise during automated template updates. - Fix the merge conflicts manually and remove the .rej files. - language: fail - files: '.*\.rej$' diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 69897c3..6c28477 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,16 +1,16 @@ # https://docs.readthedocs.io/en/stable/config-file/v2.html version: 2 build: - os: ubuntu-20.04 + os: ubuntu-24.04 tools: - python: "3.10" -sphinx: - configuration: docs/conf.py - # disable this for more lenient docs builds - fail_on_warning: true -python: - install: - - method: pip - path: . - extra_requirements: - - doc + python: "3.13" + nodejs: latest + jobs: + create_environment: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + build: + html: + - uvx hatch run docs:build + - mv docs/_build $READTHEDOCS_OUTPUT diff --git a/biome.jsonc b/biome.jsonc index 2175c16..9f8f220 100644 --- a/biome.jsonc +++ b/biome.jsonc @@ -1,9 +1,10 @@ { - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json", + "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true }, "formatter": { "useEditorconfig": true }, "overrides": [ { - "include": ["./.vscode/*.json", "**/*.jsonc"], + "includes": ["./.vscode/*.json", "**/*.jsonc"], "json": { "formatter": { "trailingCommas": "all" }, "parser": { diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index d4bb2cb..0000000 --- a/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py index 5d4eb71..ca4245e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,11 +5,14 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- +import shutil import sys from datetime import datetime from importlib.metadata import metadata from pathlib import Path +from sphinxcontrib import katex + HERE = Path(__file__).parent sys.path.insert(0, str(HERE / "extensions")) @@ -19,7 +22,7 @@ # NOTE: If you installed your project in editable mode, this might be stale. # If this is the case, reinstall it to refresh the metadata info = metadata("FlowSOM") -project_name = info["Name"] +project = info["Name"] author = info["Author"] copyright = f"{datetime.now():%Y}, {author}." version = info["Version"] @@ -37,7 +40,7 @@ html_context = { "display_github": True, # Integrate GitHub "github_user": "artuurC", - "github_repo": project_name, + "github_repo": project, "github_version": "main", "conf_py_path": "/docs/", } @@ -54,9 +57,9 @@ "sphinx.ext.autosummary", "sphinx.ext.napoleon", "sphinxcontrib.bibtex", + "sphinxcontrib.katex", "sphinx_autodoc_typehints", "sphinx_tabs.tabs", - "sphinx.ext.mathjax", "IPython.sphinxext.ipython_console_highlighting", "sphinxext.opengraph", *[p.stem for p in (HERE / "extensions").glob("*.py")], @@ -92,7 +95,8 @@ } intersphinx_mapping = { - "python": ("https://docs.python.org/3", None), + # TODO: replace `3.13` with `3` once ReadTheDocs supports building with Python 3.14 + "python": ("https://docs.python.org/3.13", None), "anndata": ("https://anndata.readthedocs.io/en/stable/", None), "scanpy": ("https://scanpy.readthedocs.io/en/stable/", None), "numpy": ("https://numpy.org/doc/stable/", None), @@ -113,7 +117,7 @@ html_static_path = ["_static"] html_css_files = ["css/custom.css"] -html_title = project_name +html_title = project html_theme_options = { "repository_url": repository_url, @@ -123,6 +127,7 @@ } pygments_style = "default" +katex_prerender = shutil.which(katex.NODEJS_BINARY) is not None nitpick_ignore = [ # If building the documentation fails because of a missing link that is outside your control, diff --git a/docs/contributing.md b/docs/contributing.md index c22bb14..b172fd1 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -1,14 +1,33 @@ # Contributing guide -Scanpy provides extensive [developer documentation][scanpy developer guide], most of which applies to this project, too. -This document will not reproduce the entire content from there. -Instead, it aims at summarizing the most important information to get you started on contributing. - +This document aims at summarizing the most important information for getting you started on contributing to this project. We assume that you are already familiar with git and with making pull requests on GitHub. -If not, please refer to the [scanpy developer guide][]. +For more extensive tutorials, that also cover the absolute basics, +please refer to other resources such as the [pyopensci tutorials][], +the [scientific Python tutorials][], or the [scanpy developer guide][]. + +[pyopensci tutorials]: https://www.pyopensci.org/learn.html +[scientific Python tutorials]: https://learn.scientific-python.org/development/tutorials/ [scanpy developer guide]: https://scanpy.readthedocs.io/en/latest/dev/index.html +:::{tip} The *hatch* project manager + +We highly recommend to familiarize yourself with [`hatch`][hatch]. +Hatch is a Python project manager that + +- manages virtual environments, separately for development, testing and building the documentation. + Separating the environments is useful to avoid dependency conflicts. +- allows to run tests locally in different environments (e.g. different python versions) +- allows to run tasks defined in `pyproject.toml`, e.g. to build documentation. + +While the project is setup with `hatch` in mind, +it is still possible to use different tools to manage dependencies, such as `uv` or `pip`. + +::: + +[hatch]: https://hatch.pypa.io/latest/ + ## Installing dev dependencies In addition to the packages needed to _use_ this package, @@ -16,29 +35,103 @@ you need additional python packages to [run tests](#writing-tests) and [build th :::::{tabs} ::::{group-tab} Hatch -The easiest way is to get familiar with [hatch environments][], with which these tasks are simply: + +On the command line, you typically interact with hatch through its command line interface (CLI). +Running one of the following commands will automatically resolve the environments for testing and +building the documentation in the background: ```bash hatch test # defined in the table [tool.hatch.envs.hatch-test] in pyproject.toml hatch run docs:build # defined in the table [tool.hatch.envs.docs] ``` +When using an IDE such as VS Code, +you’ll have to point the editor at the paths to the virtual environments manually. +The environment you typically want to use as your main development environment is the `hatch-test` +environment with the latest Python version. + +To get a list of all environments for your projects, run + +```bash +hatch env show -i +``` + +This will list “Standalone” environments and a table of “Matrix” environments like the following: + +``` ++------------+---------+--------------------------+----------+---------------------------------+-------------+ +| Name | Type | Envs | Features | Dependencies | Scripts | ++------------+---------+--------------------------+----------+---------------------------------+-------------+ +| hatch-test | virtual | hatch-test.py3.11-stable | dev | coverage-enable-subprocess==1.0 | cov-combine | +| | | hatch-test.py3.14-stable | test | coverage[toml]~=7.4 | cov-report | +| | | hatch-test.py3.14-pre | | pytest-mock~=3.12 | run | +| | | | | pytest-randomly~=3.15 | run-cov | +| | | | | pytest-rerunfailures~=14.0 | | +| | | | | pytest-xdist[psutil]~=3.5 | | +| | | | | pytest~=8.1 | | ++------------+---------+--------------------------+----------+---------------------------------+-------------+ +``` + +From the `Envs` column, select the environment name you want to use for development. +In this example, it would be `hatch-test.py3.14-stable`. + +Next, create the environment with + +```bash +hatch env create hatch-test.py3.14-stable +``` + +Then, obtain the path to the environment using + +```bash +hatch env find hatch-test.py3.14-stable +``` + +In case you are using VScode, now open the command palette (Ctrl+Shift+P) and search for `Python: Select Interpreter`. +Choose `Enter Interpreter Path` and paste the path to the virtual environment from above. + +In this future, this may become easier through a hatch vscode extension. + +:::: + +::::{group-tab} uv + +A popular choice for managing virtual environments is [uv][]. +The main disadvantage compared to hatch is that it supports only a single environment per project at a time, +which requires you to mix the dependencies for running tests and building docs. +This can have undesired side-effects, +such as requiring to install a lower version of a library your project depends on, +only because an outdated sphinx plugin pins an older version. + +To initalize a virtual environment in the `.venv` directory of your project, simply run + +```bash +uv sync --all-extras +``` + +The `.venv` directory is typically automatically discovered by IDEs such as VS Code. + :::: ::::{group-tab} Pip -If you prefer managing environments manually, you can use `pip`: + +Pip is nowadays mostly superseded by environment manager such as [hatch][]. +However, for the sake of completeness, and since it’s ubiquitously available, +we describe how you can manage environments manually using `pip`: ```bash -cd FlowSOM python3 -m venv .venv source .venv/bin/activate pip install -e ".[dev,test,doc]" ``` +The `.venv` directory is typically automatically discovered by IDEs such as VS Code. + :::: ::::: [hatch environments]: https://hatch.pypa.io/latest/tutorials/environment/basic-usage/ +[uv]: https://docs.astral.sh/uv/ ## Code-style @@ -55,7 +148,7 @@ in the root of the repository. Pre-commit will automatically download all dependencies when it is run for the first time. Alternatively, you can rely on the [pre-commit.ci][] service enabled on GitHub. -If you didn't run `pre-commit` before pushing changes to GitHub it will automatically commit fixes to your pull request, or show an error message. +If you didn’t run `pre-commit` before pushing changes to GitHub it will automatically commit fixes to your pull request, or show an error message. If pre-commit.ci added a commit on a branch you still have been working on locally, simply use @@ -102,6 +195,14 @@ hatch test --all # test with all supported Python versions :::: +::::{group-tab} uv + +```bash +uv run pytest +``` + +:::: + ::::{group-tab} Pip ```bash @@ -118,12 +219,17 @@ in the root of the repository. ### Continuous integration -Continuous integration will automatically run the tests on all pull requests and test +Continuous integration via GitHub actions will automatically run the tests on all pull requests and test against the minimum and maximum supported Python version. -Additionally, there's a CI job that tests against pre-releases of all dependencies (if there are any). +Additionally, there’s a CI job that tests against pre-releases of all dependencies (if there are any). The purpose of this check is to detect incompatibilities of new package versions early on and -gives you time to fix the issue or reach out to the developers of the dependency before the package is released to a wider audience. +gives you time to fix the issue or reach out to the developers of the dependency before the package +is released to a wider audience. + +The CI job is defined in `.github/workflows/test.yaml`, +however the single point of truth for CI jobs is the Hatch test matrix defined in `pyproject.toml`. +This means that local testing via hatch and remote testing on CI tests against the same python versions and uses the same environments. ## Publishing a release @@ -189,7 +295,7 @@ please check out [this feature request][issue-render-notebooks] in the `cookiecu (docs-building)= -#### Building the docs locally +### Building the docs locally :::::{tabs} ::::{group-tab} Hatch @@ -201,12 +307,22 @@ hatch run docs:open :::: +::::{group-tab} uv + +```bash +cd docs +uv run sphinx-build -M html . _build -W +(xdg-)open _build/html/index.html +``` + +:::: + ::::{group-tab} Pip ```bash source .venv/bin/activate cd docs -make html +sphinx-build -M html . _build -W (xdg-)open _build/html/index.html ``` diff --git a/pyproject.toml b/pyproject.toml index a3a1b9f..4a5704f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,63 +14,81 @@ maintainers = [ authors = [ { name = "Artuur Couckuyt" }, ] -requires-python = ">=3.10" +requires-python = ">=3.11" classifiers = [ "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", ] dependencies = [ "anndata", # for debug logging (referenced from the issue template) "session-info2", ] -optional-dependencies.dev = [ +# https://docs.pypi.org/project_metadata/#project-urls +urls.Documentation = "https://FlowSOM.readthedocs.io/" +urls.Homepage = "https://github.com/artuurC/FlowSOM" +urls.Source = "https://github.com/artuurC/FlowSOM" + +[dependency-groups] +dev = [ "pre-commit", "twine>=4.0.2", ] -optional-dependencies.doc = [ - "docutils>=0.8,!=0.18.*,!=0.19.*", +test = [ + "coverage>=7.10", + "pytest", + "pytest-cov", # For VS Code’s coverage functionality +] +doc = [ "ipykernel", "ipython", "myst-nb>=1.1", "pandas", - # Until pybtex >0.24.0 releases: https://bitbucket.org/pybtex-devs/pybtex/issues/169/ - "setuptools", - "sphinx>=4", + "sphinx>=8.1", "sphinx-autodoc-typehints", "sphinx-book-theme>=1", "sphinx-copybutton", "sphinx-tabs", "sphinxcontrib-bibtex>=1", + "sphinxcontrib-katex", "sphinxext-opengraph", ] -optional-dependencies.test = [ - "coverage", - "pytest", -] -# https://docs.pypi.org/project_metadata/#project-urls -urls.Documentation = "https://FlowSOM.readthedocs.io/" -urls.Homepage = "https://github.com/artuurC/FlowSOM" -urls.Source = "https://github.com/artuurC/FlowSOM" [tool.hatch.build.targets.wheel] packages = [ "src/FlowSOM" ] [tool.hatch.envs.default] installer = "uv" -features = [ "dev" ] +dependency-groups = [ "dev" ] [tool.hatch.envs.docs] -features = [ "doc" ] -scripts.build = "sphinx-build -M html docs docs/_build {args}" +dependency-groups = [ "doc" ] +scripts.build = "sphinx-build -M html docs docs/_build -W {args}" scripts.open = "python -m webbrowser -t docs/_build/html/index.html" scripts.clean = "git clean -fdX -- {args:docs}" +# Test the lowest and highest supported Python versions with normal deps +[[tool.hatch.envs.hatch-test.matrix]] +deps = [ "stable" ] +python = [ "3.11", "3.14" ] + +# Test the newest supported Python version also with pre-release deps +[[tool.hatch.envs.hatch-test.matrix]] +deps = [ "pre" ] +python = [ "3.14" ] + [tool.hatch.envs.hatch-test] -features = [ "test" ] +dependency-groups = [ "dev", "test" ] + +[tool.hatch.envs.hatch-test.overrides] +# If the matrix variable `deps` is set to "pre", +# set the environment variable `UV_PRERELEASE` to "allow". +matrix.deps.env-vars = [ + { key = "UV_PRERELEASE", value = "allow", if = [ "pre" ] }, +] [tool.ruff] line-length = 120 @@ -112,15 +130,16 @@ lint.per-file-ignores."docs/*" = [ "I" ] lint.per-file-ignores."tests/*" = [ "D" ] lint.pydocstyle.convention = "numpy" -[tool.pytest.ini_options] +[tool.pytest] +strict = true testpaths = [ "tests" ] -xfail_strict = true addopts = [ "--import-mode=importlib", # allow using test files with same name ] [tool.coverage.run] source = [ "FlowSOM" ] +patch = [ "subprocess" ] omit = [ "**/test_*.py", ]