From bcef2e5847e17e1cfed9682ef609d907cce77805 Mon Sep 17 00:00:00 2001 From: Dima Gerasimov Date: Tue, 20 May 2025 20:46:56 +0100 Subject: [PATCH] general: update CI files - set min version to python 3.9 - switch build backend to hatch - publish via uv --- .ci/release-uv | 60 +++++++++++++++ .ci/run | 16 ++-- .github/workflows/main.yml | 46 +++++++----- conftest.py | 11 ++- mypy.ini | 11 ++- pyproject.toml | 39 +++++----- pytest.ini | 7 ++ ruff.toml | 127 ++++++++++++++++++++++++++++++-- src/orgparse/extra.py | 4 +- src/orgparse/tests/test_data.py | 2 +- tox.ini | 29 +++++--- 11 files changed, 276 insertions(+), 76 deletions(-) create mode 100755 .ci/release-uv diff --git a/.ci/release-uv b/.ci/release-uv new file mode 100755 index 0000000..c56697c --- /dev/null +++ b/.ci/release-uv @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 +''' +Deploys Python package onto [[https://pypi.org][PyPi]] or [[https://test.pypi.org][test PyPi]]. + +- running manually + + You'll need =UV_PUBLISH_TOKEN= env variable + +- running on Github Actions + + Instead of env variable, relies on configuring github as Trusted publisher (https://docs.pypi.org/trusted-publishers/) -- both for test and regular pypi + + It's running as =pypi= job in [[file:.github/workflows/main.yml][Github Actions config]]. + Packages are deployed on: + - every master commit, onto test pypi + - every new tag, onto production pypi +''' + +UV_PUBLISH_TOKEN = 'UV_PUBLISH_TOKEN' + +import argparse +import os +import shutil +from pathlib import Path +from subprocess import check_call + +is_ci = os.environ.get('CI') is not None + +def main() -> None: + p = argparse.ArgumentParser() + p.add_argument('--use-test-pypi', action='store_true') + args = p.parse_args() + + publish_url = ['--publish-url', 'https://test.pypi.org/legacy/'] if args.use_test_pypi else [] + + root = Path(__file__).absolute().parent.parent + os.chdir(root) # just in case + + if is_ci: + # see https://github.com/actions/checkout/issues/217 + check_call('git fetch --prune --unshallow'.split()) + + # TODO ok, for now uv won't remove dist dir if it already exists + # https://github.com/astral-sh/uv/issues/10293 + dist = root / 'dist' + if dist.exists(): + shutil.rmtree(dist) + + # todo what is --force-pep517? + check_call(['uv', 'build']) + + if not is_ci: + # CI relies on trusted publishers so doesn't need env variable + assert UV_PUBLISH_TOKEN in os.environ, f'no {UV_PUBLISH_TOKEN} passed' + + check_call(['uv', 'publish', *publish_url]) + + +if __name__ == '__main__': + main() diff --git a/.ci/run b/.ci/run index b2c184d..c881818 100755 --- a/.ci/run +++ b/.ci/run @@ -11,6 +11,8 @@ if ! command -v sudo; then } fi +# --parallel-live to show outputs while it's running +tox_cmd='run-parallel --parallel-live' if [ -n "${CI-}" ]; then # install OS specific stuff here case "$OSTYPE" in @@ -20,7 +22,8 @@ if [ -n "${CI-}" ]; then ;; cygwin* | msys* | win*) # windows - : + # ugh. parallel stuff seems super flaky under windows, some random failures, "file used by other process" and crap like that + tox_cmd='run' ;; *) # must be linux? @@ -29,12 +32,5 @@ if [ -n "${CI-}" ]; then esac fi - -PY_BIN="python3" -# some systems might have python pointing to python3 -if ! command -v python3 &> /dev/null; then - PY_BIN="python" -fi - -"$PY_BIN" -m pip install --user tox -"$PY_BIN" -m tox --parallel --parallel-live "$@" +# NOTE: expects uv installed +uv tool run --with tox-uv tox $tox_cmd "$@" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 80b66b2..3599e6a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,7 +6,6 @@ on: branches: '*' tags: 'v[0-9]+.*' # only trigger on 'release' tags for PyPi # Ideally I would put this in the pypi job... but github syntax doesn't allow for regexes there :shrug: - # P.S. fuck made up yaml DSLs. pull_request: # needed to trigger on others' PRs # Note that people who fork it need to go to "Actions" tab on their fork and click "I understand my workflows, go ahead and enable them". workflow_dispatch: # needed to trigger workflows manually @@ -25,23 +24,31 @@ jobs: fail-fast: false matrix: platform: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] # vvv just an example of excluding stuff from matrix # exclude: [{platform: macos-latest, python-version: '3.6'}] runs-on: ${{ matrix.platform }} + # useful for 'optional' pipelines + # continue-on-error: ${{ matrix.platform == 'windows-latest' }} + steps: # ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation - run: echo "$HOME/.local/bin" >> $GITHUB_PATH - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 with: - python-version: ${{ matrix.python-version }} + submodules: recursive + fetch-depth: 0 # nicer to have all git history when debugging/for tests - - uses: actions/checkout@v3 + - uses: actions/setup-python@v5 with: - submodules: recursive + python-version: ${{ matrix.python-version }} + + - uses: astral-sh/setup-uv@v5 + with: + enable-cache: false # we don't have lock files, so can't use them as cache key - uses: mxschmitt/action-tmate@v3 if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }} @@ -50,8 +57,9 @@ jobs: - run: bash .ci/run - if: matrix.platform == 'ubuntu-latest' # no need to compute coverage for other platforms - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: + include-hidden-files: true name: .coverage.mypy_${{ matrix.platform }}_${{ matrix.python-version }} path: .coverage.mypy/ @@ -59,30 +67,32 @@ jobs: pypi: runs-on: ubuntu-latest needs: [build] # add all other jobs here - + permissions: + # necessary for Trusted Publishing + id-token: write steps: # ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation - run: echo "$HOME/.local/bin" >> $GITHUB_PATH - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 with: - python-version: '3.8' + submodules: recursive - - uses: actions/checkout@v3 + - uses: actions/setup-python@v5 with: - submodules: recursive + python-version: '3.10' + + - uses: astral-sh/setup-uv@v5 + with: + enable-cache: false # we don't have lock files, so can't use them as cache key - name: 'release to test pypi' # always deploy merged master to test pypi if: github.event_name != 'pull_request' && github.event.ref == 'refs/heads/master' - env: - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD_TEST }} - run: pip3 install --user --upgrade build twine && .ci/release --test + run: .ci/release-uv --use-test-pypi - name: 'release to pypi' # always deploy tags to release pypi # NOTE: release tags are guarded by on: push: tags on the top if: github.event_name != 'pull_request' && startsWith(github.event.ref, 'refs/tags') - env: - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} - run: pip3 install --user --upgrade build twine && .ci/release + run: .ci/release-uv diff --git a/conftest.py b/conftest.py index 466da24..91a43a3 100644 --- a/conftest.py +++ b/conftest.py @@ -2,6 +2,7 @@ # without it, pytest can't discover the package root for some reason # also see https://github.com/karlicoss/pytest_namespace_pkgs for more +import os import pathlib from typing import Optional @@ -24,6 +25,10 @@ def resolve_package_path(path: pathlib.Path) -> Optional[pathlib.Path]: for parent in result.parents: if str(parent) in namespace_pkg_dirs: return parent + if os.name == 'nt': + # ??? for some reason on windows it is trying to call this against conftest? but not on linux/osx + if path.name == 'conftest.py': + return resolve_pkg_path_orig(path) raise RuntimeError("Couldn't determine path for ", path) _pytest.pathlib.resolve_package_path = resolve_package_path @@ -34,5 +39,9 @@ def resolve_package_path(path: pathlib.Path) -> Optional[pathlib.Path]: # not sure what are the consequences.. maybe it wouldn't be able to run against installed packages? not sure.. search_pypath_orig = _pytest.main.search_pypath def search_pypath(module_name: str) -> str: - return str(root_dir) + mpath = root_dir / module_name.replace('.', os.sep) + if not mpath.is_dir(): + mpath = mpath.with_suffix('.py') + assert mpath.exists(), mpath # just in case + return str(mpath) _pytest.main.search_pypath = search_pypath diff --git a/mypy.ini b/mypy.ini index 5a21a85..7b1e535 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,14 +1,17 @@ [mypy] -namespace_packages = True pretty = True show_error_context = True -show_error_codes = True show_column_numbers = True show_error_end = True -warn_unused_ignores = True + check_untyped_defs = True -enable_error_code = possibly-undefined + +# see https://mypy.readthedocs.io/en/stable/error_code_list2.html +warn_redundant_casts = True strict_equality = True +warn_unused_ignores = True +enable_error_code = deprecated,redundant-expr,possibly-undefined,truthy-bool,truthy-iterable,ignore-without-code,unused-awaitable + # an example of suppressing # [mypy-my.config.repos.pdfannots.pdfannots] diff --git a/pyproject.toml b/pyproject.toml index 30719be..359a928 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,9 @@ [project] -dynamic = ["version"] # version is managed by setuptools_scm +dynamic = ["version"] # version is managed by build backend name = "orgparse" +dependencies = [ +] +requires-python = ">=3.9" description = "orgparse - Emacs org-mode parser in Python" license = {file = "LICENSE"} authors = [ @@ -24,10 +27,8 @@ classifiers = [ Homepage = "https://github.com/karlicoss/orgparse" [project.optional-dependencies] +[dependency-groups] testing = [ - "pytest", -] -linting = [ "pytest", "ruff", "mypy", @@ -35,26 +36,20 @@ linting = [ ] -[build-system] -requires = ["setuptools", "setuptools-scm"] -build-backend = "setuptools.build_meta" -[tool.setuptools_scm] -version_scheme = "python-simplified-semver" -local_scheme = "dirty-tag" +# workaround for error during uv publishing +# see https://github.com/astral-sh/uv/issues/9513#issuecomment-2519527822 +[tool.setuptools] +license-files = [] -# nice things about pyproject.toml -# - zip_safe=False isn't neccessary anymore -# - correctly discovers namespace packages by defuilt? -# - correctly handles py.typed by default? -# - handles src layout automatically https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" -# things I'm not sure about yet -# - avoiding dupliation/variable reuse? -# - file/git dependencies? -# - unclear how to specify namespace package order https://github.com/seanbreckenridge/reorder_editable/issues/2 +# unfortunately have to duplicate project name here atm, see https://github.com/pypa/hatch/issues/1894 +[tool.hatch.build.targets.wheel] +packages = ["src/orgparse"] -# TODO -# - maybe it has a nicer pypi upload system? not sure -# e.g. possibly use hatch/flit/pdb/poetry -- but not sure what's the benefit tbh +[tool.hatch.version] +source = "vcs" diff --git a/pytest.ini b/pytest.ini index 20c3704..226488b 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,7 +1,14 @@ [pytest] # discover files that don't follow test_ naming. Useful to keep tests along with the source code python_files = *.py + +# this setting only impacts package/module naming under pytest, not the discovery +consider_namespace_packages = true + addopts = + # prevent pytest cache from being created... it craps into project dir and I never use it anyway + -p no:cacheprovider + # -rap to print tests summary even when they are successful -rap --verbose diff --git a/ruff.toml b/ruff.toml index 0be93e0..f6fd8b7 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,4 +1,54 @@ -ignore = [ +lint.extend-select = [ + "F", # flakes rules -- default, but extend just in case + "E", # pycodestyle -- default, but extend just in case + "W", # various warnings + + "B", # 'bugbear' set -- various possible bugs + "C4", # flake8-comprehensions -- unnecessary list/map/dict calls + "COM", # trailing commas + "EXE", # various checks wrt executable files + "I", # sort imports + "ICN", # various import conventions + "FBT", # detect use of boolean arguments + "FURB", # various rules + "PERF", # various potential performance speedups + "PD", # pandas rules + "PIE", # 'misc' lints + "PLC", # pylint convention rules + "PLR", # pylint refactor rules + "PLW", # pylint warnings + "PT", # pytest stuff + "PYI", # various type hinting rules + "RET", # early returns + "RUF", # various ruff-specific rules + "TID", # various imports suggestions + "TRY", # various exception handling rules + "UP", # detect deprecated python stdlib stuff + "FA", # suggest using from __future__ import annotations + "PTH", # pathlib migration + "ARG", # unused argument checks + "A", # builtin shadowing + "G", # logging stuff + # "EM", # TODO hmm could be helpful to prevent duplicate err msg in traceback.. but kinda annoying + + # "ALL", # uncomment this to check for new rules! +] + +# Preserve types, even if a file imports `from __future__ import annotations` +# we need this for cachew to work with HPI types on 3.9 +# can probably remove after 3.10? +lint.pyupgrade.keep-runtime-typing = true + +lint.ignore = [ + "D", # annoying nags about docstrings + "N", # pep naming + "TCH", # type checking rules, mostly just suggests moving imports under TYPE_CHECKING + "S", # bandit (security checks) -- tends to be not very useful, lots of nitpicks + "DTZ", # datetimes checks -- complaining about missing tz and mostly false positives + "FIX", # complains about fixmes/todos -- annoying + "TD", # complains about todo formatting -- too annoying + "ANN", # missing type annotations? seems way to strict though + ### too opinionated style checks "E501", # too long lines "E702", # Multiple statements on one line (semicolon) @@ -16,10 +66,75 @@ ignore = [ ## might be nice .. but later and I don't wanna make it strict "E402", # Module level import not at top of file -### maybe consider these soon -# sometimes it's useful to give a variable a name even if we don't use it as a documentation -# on the other hand, often is a sign of error - "F841", # Local variable `count` is assigned to but never used - "F401", # imported but unused + "RUF100", # unused noqa -- handle later + "RUF012", # mutable class attrs should be annotated with ClassVar... ugh pretty annoying for user configs + +### these are just nitpicky, we usually know better + "PLR0911", # too many return statements + "PLR0912", # too many branches + "PLR0913", # too many function arguments + "PLR0915", # too many statements + "PLR1714", # consider merging multiple comparisons + "PLR2044", # line with empty comment + "PLR5501", # use elif instead of else if + "PLR2004", # magic value in comparison -- super annoying in tests ### + "PLR0402", # import X.Y as Y -- TODO maybe consider enabling it, but double check + + "B009", # calling gettattr with constant attribute -- this is useful to convince mypy + "B010", # same as above, but setattr + "B011", # complains about assert False + "B017", # pytest.raises(Exception) + "B023", # seems to result in false positives? + "B028", # suggest using explicit stacklevel? TODO double check later, but not sure it's useful + + # complains about useless pass, but has sort of a false positive if the function has a docstring? + # this is common for click entrypoints (e.g. in __main__), so disable + "PIE790", + + # a bit too annoying, offers to convert for loops to list comprehension + # , which may heart readability + "PERF401", + + # suggests no using exception in for loops + # we do use this technique a lot, plus in 3.11 happy path exception handling is "zero-cost" + "PERF203", + + "RET504", # unnecessary assignment before returning -- that can be useful for readability + "RET505", # unnecessary else after return -- can hurt readability + + "PLW0603", # global variable update.. we usually know why we are doing this + "PLW2901", # for loop variable overwritten, usually this is intentional + + "PT011", # pytest raises should is too broad + "PT012", # pytest raises should contain a single statement + + "COM812", # trailing comma missing -- mostly just being annoying with long multiline strings + + "PD901", # generic variable name df + + "TRY003", # suggests defining exception messages in exception class -- kinda annoying + "TRY004", # prefer TypeError -- don't see the point + "TRY201", # raise without specifying exception name -- sometimes hurts readability + "TRY400", # TODO double check this, might be useful + "TRY401", # redundant exception in logging.exception call? TODO double check, might result in excessive logging + + "PGH", # TODO force error code in mypy instead? although it also has blanket noqa rule + + "TID252", # Prefer absolute imports over relative imports from parent modules + + "UP038", # suggests using | (union) in isisntance checks.. but it results in slower code + + ## too annoying + "T20", # just complains about prints and pprints + "Q", # flake quotes, too annoying + "C90", # some complexity checking + "G004", # logging statement uses f string + "ERA001", # commented out code + "SLF001", # private member accessed + "BLE001", # do not catch 'blind' Exception + "INP001", # complains about implicit namespace packages + "SIM", # some if statements crap + "RSE102", # complains about missing parens in exceptions + ## ] diff --git a/src/orgparse/extra.py b/src/orgparse/extra.py index cd51aba..5fefcd6 100644 --- a/src/orgparse/extra.py +++ b/src/orgparse/extra.py @@ -96,7 +96,7 @@ def emit() -> Rich: if last is Gap: res = Gap() elif last is Table: - res = Table(group) # type: ignore + res = Table(group) # type: ignore[assignment] else: raise RuntimeError(f'Unexpected type {last}') group = [] @@ -106,7 +106,7 @@ def emit() -> Rich: if RE_TABLE_ROW.match(line) or RE_TABLE_SEPARATOR.match(line): cur = Table else: - cur = Gap # type: ignore + cur = Gap # type: ignore[assignment] if cur is not last: if len(group) > 0: yield emit() diff --git a/src/orgparse/tests/test_data.py b/src/orgparse/tests/test_data.py index f315878..60e4db0 100644 --- a/src/orgparse/tests/test_data.py +++ b/src/orgparse/tests/test_data.py @@ -12,7 +12,7 @@ def load_data(path): """Load data from python file""" - ns = {} # type: ignore + ns = {} # type: ignore[var-annotated] # read_bytes() and compile hackery to avoid encoding issues (e.g. see 05_tags) exec(compile(Path(path).read_bytes(), path, 'exec'), ns) return ns['data'] diff --git a/tox.ini b/tox.ini index dc04b0e..681618b 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,8 @@ [tox] minversion = 3.21 # relies on the correct version of Python installed -envlist = ruff,tests,mypy +envlist = tests,mypy +# FIXME will fix ruff in a later commit # https://github.com/tox-dev/tox/issues/20#issuecomment-247788333 # hack to prevent .tox from crapping to the project directory toxworkdir = {env:TOXWORKDIR_BASE:}{toxinidir}/.tox @@ -11,24 +12,28 @@ toxworkdir = {env:TOXWORKDIR_BASE:}{toxinidir}/.tox package_name = "orgparse" passenv = # useful for tests to know they are running under ci - CI - CI_* + CI + CI_* # respect user's cache dirs to prevent tox from crapping into project dir - MYPY_CACHE_DIR - PYTHONPYCACHEPREFIX + PYTHONPYCACHEPREFIX + MYPY_CACHE_DIR + RUFF_CACHE_DIR + +# default is 'editable', in which tox builds wheel first for some reason? not sure if makes much sense +package = uv-editable [testenv:ruff] +skip_install = true +dependency_groups = testing commands = - {envpython} -m pip install --use-pep517 -e .[linting] - {envpython} -m ruff src/ + {envpython} -m ruff check src/ \ + {posargs} -# note: --use-pep517 here is necessary for tox --parallel flag to work properly -# otherwise it seems that it tries to modify .eggs dir in parallel and it fails [testenv:tests] +dependency_groups = testing commands = - {envpython} -m pip install --use-pep517 -e .[testing] # posargs allow test filtering, e.g. tox ... -- -k test_name {envpython} -m pytest \ --pyargs {[testenv]package_name} \ @@ -36,9 +41,9 @@ commands = [testenv:mypy] +dependency_groups = testing commands = - {envpython} -m pip install --use-pep517 -e .[linting] - {envpython} -m mypy --install-types --non-interactive \ + {envpython} -m mypy --no-install-types \ -p {[testenv]package_name} \ # txt report is a bit more convenient to view on CI --txt-report .coverage.mypy \