diff --git a/.coveragerc b/.coveragerc index 202ea23..13ad3b1 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,16 +1,5 @@ -# As of version 7.6.1 this is required to get "coverage combine" to do -# the Right Thing for data from both Unix and Windows. -[paths] -merge = - pdr/ - pdr\ - -# This section needs to be last in this file because the CI scripts -# need to add things to it. The 'relative_files' and 'source' options -# are also necessary to get "coverage combine" to do the Right Thing. [run] -relative_files = True -source = pdr +source_pkgs = pdr omit = */formats/* */pvl_utils.py diff --git a/.github/scripts/adjust-coverage-config b/.github/scripts/adjust-coverage-config new file mode 100755 index 0000000..018f0fa --- /dev/null +++ b/.github/scripts/adjust-coverage-config @@ -0,0 +1,112 @@ +#! /usr/bin/env python3 + +""" +Read a .coveragerc from stdin, adjust it for use in a CI build, and +write it back out to stdout. + +If files are listed on the command line, they are assumed to be +coverage databases, and a [paths] section is added to the .coveragerc +(replacing any existing [paths] section) that instructs coverage.py +to treat the common path prefix of each coverage database's files +as equivalent. When used this way, coverage.py must be importable. +""" + +import sys + +from argparse import ArgumentParser +from configparser import ConfigParser +from pathlib import Path + + +DATABASE_NAME = "coverage.dat" + + +def remap_paths_for_databases(cfg, databases): + from coverage import CoverageData + from collections import defaultdict + from os.path import commonprefix + from pathlib import PurePosixPath, PureWindowsPath + + prefixes = set() + for db_fname in databases: + db = CoverageData(basename=db_fname) + db.read() + prefixes.add(commonprefix(list(db.measured_files()))) + + packages = defaultdict(set) + for p in prefixes: + if '\\' in p or (len(p) >= 2 and p[0].isalpha() and p[1] == ':'): + name = PureWindowsPath(p).name + else: + name = PurePosixPath(p).name + packages[name].add(p) + + pkg_names = sorted(packages.keys()) + + cfg["run"]["relative_files"] = "true" + cfg["run"]["source_pkgs"] = " ".join(pkg_names) + + cfg["paths"] = {} + for pkg in pkg_names: + pkg_paths = ['', pkg + '/'] + pkg_paths.extend(sorted(packages[pkg])) + cfg["paths"]["src_" + pkg] = "\n".join(pkg_paths) + + +def adjust_omit(cfg): + """ + Adjust the "omit" setting to be more appropriate for use in CI; + the stock .coveragerc is tailored for interactive use. + """ + GLOBS_TO_DROP = ( + "*/formats/*", + "*/pvl_utils.py", + ) + + run_section = cfg["run"] + pruned_omit_globs = [] + for glob in run_section.get("omit", "").splitlines(): + glob = glob.strip() + if glob not in GLOBS_TO_DROP: + pruned_omit_globs.append(glob) + + if ( + len(pruned_omit_globs) == 0 + or len(pruned_omit_globs) == 1 and pruned_omit_globs[0] == "" + ): + del run_section["omit"] + else: + run_section["omit"] = "\n".join(pruned_omit_globs) + + +def change_database_name(cfg): + """ + Give the coverage database a more convenient name for use in + cross-platform CI. + """ + cfg["run"]["data_file"] = str(Path.cwd() / DATABASE_NAME) + + +def main(): + ap = ArgumentParser(description=__doc__) + ap.add_argument("databases", nargs="*", + help="Coverage databases to be merged") + args = ap.parse_args() + + # this must match how coverage.py initializes ConfigParser + cfg = ConfigParser(interpolation=None) + + with sys.stdin as ifp: + cfg.read_file(ifp, source="") + + if args.databases: + remap_paths_for_databases(cfg, args.databases) + + adjust_omit(cfg) + change_database_name(cfg) + + with sys.stdout as ofp: + cfg.write(ofp) + + +main() diff --git a/.github/scripts/find-gnu-tar b/.github/scripts/find-gnu-tar new file mode 100755 index 0000000..c090532 --- /dev/null +++ b/.github/scripts/find-gnu-tar @@ -0,0 +1,90 @@ +#! /usr/bin/env python3 + +""" +Find GNU tar, whose pathname transformation options we need, and which +is named 'tar' on Github's Linux and Windows CI runners but 'gtar' on +their MacOS runners. +""" + +import os +import stat +import sys + +from argparse import ArgumentParser +from pathlib import Path + + +if os.name == "nt": + EXE_SUFFIX = ".exe" + def is_executable_mode(mode): + return True +else: + EXE_SUFFIX = "" + def is_executable_mode(mode): + return (stat.S_IMODE(mode) & 0o111) != 0 + + +def is_executable_file(path, debug): + if debug: + sys.stderr.write(f" {path}: ") + try: + st = os.stat(path) + except FileNotFoundError: + if debug: + sys.stderr.write("not found\n") + return False + + if not stat.S_ISREG(st.st_mode): + if debug: + sys.stderr.write("not a regular file (mode={})\n" + .format(stat.filemode(st.st_mode))) + return False + + if not is_executable_mode(st.st_mode): + if debug: + sys.stderr.write("not executable (mode={}, os={})\n" + .format(stat.filemode(st.st_mode, os.name))) + return False + + if debug: + sys.stderr.write(" ok\n") + return True + + + +def find_gnu_tar(debug=False): + GTAR_CMD = "gtar" + EXE_SUFFIX + TAR_CMD = "tar" + EXE_SUFFIX + candidate = None + for d in os.get_exec_path(): + # Resolve symlinks in the directory components of the path, + # but *not* the command name, because changing the command + # name might alter the behavior of the command. + p = Path(d).resolve() + if debug: + sys.stderr.write(f"checking {p}\n") + gtar = p / GTAR_CMD + tar = p / TAR_CMD + if is_executable_file(gtar, debug): + # gtar is preferred + return gtar + if is_executable_file(tar, debug): + # use tar only if we don't find a gtar later in the path + candidate = tar + if candidate is not None: + return candidate + sys.stderr.write(f"neither {GTAR_CMD} nor {TAR_CMD} found in PATH\n") + sys.exit(1) + + +def main(): + ap = ArgumentParser(description=__doc__) + ap.add_argument("--debug", action="store_true", + help="Print debugging information during the search") + args = ap.parse_args() + + sys.stdout.write(str(find_gnu_tar(args.debug)) + "\n") + sys.exit(0) + + +main() diff --git a/.github/workflows/ci-unittest.yml b/.github/workflows/ci-unittest.yml index 639cd03..26041cf 100644 --- a/.github/workflows/ci-unittest.yml +++ b/.github/workflows/ci-unittest.yml @@ -5,213 +5,395 @@ on: branches: - develop - main + - zack/ci-test-installed jobs: + # Inspired by https://github.com/hynek/build-and-inspect-python-package/ + # but revised for our needs. In particular, we have some auxiliary + # files, needed for later steps of this process, that don't belong + # in the sdist or wheel, that we copy into a (separate) job artifact + # so those later steps don't need to check out the source tree. + # In the future, we might want to build a conda package as well + # as a wheel. + build-package: + name: Build PDR packages + runs-on: ubuntu-latest + steps: + - name: Check out source tree + uses: actions/checkout@v4 + with: + path: src + # Future: Any sort of validation of the source code itself, + # that only needs to run once, could go here. + - name: Set up Python + uses: actions/setup-python@v5 + id: py + with: + python-version: "3.x" # latest stable + update-environment: false + - name: Prepare build environment + run: | + '${{ steps.py.outputs.python-path }}' -m venv venv + . venv/bin/activate + pip install --only-binary :all: --upgrade pip + pip install --only-binary :all: \ + build check-wheel-contents twine wheel + - name: Build and verify sdist and wheel + working-directory: src + run: | + . ../venv/bin/activate + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) + python -BIm build --outdir=../dist + check-wheel-contents ../dist/*.whl + python -Im twine check --strict ../dist/* + - name: Collect auxiliary files for later stages + working-directory: src/.github + run: | + zip -r ../../dist/build-scripts.zip scripts + - name: Upload packages + uses: actions/upload-artifact@v4 + with: + name: packages + path: dist/pdr* + + - name: Upload build helpers + uses: actions/upload-artifact@v4 + with: + name: build-scripts + path: dist/build-scripts.zip + # this artifact exists only to be input to later jobs, + # it does not need to be retained for long + retention-days: 1 + + - name: Report results + run: | + ( cd dist + printf '\n### Contents of the `packages` artifact\n\n```none\n' + shasum -a 256 pdr* + ) >> $GITHUB_STEP_SUMMARY + conda-unit: name: | - ${{ format('conda python: {0} os: {1} env: {2}', - matrix.python-version, matrix.os, matrix.environment-file) }} + Test: ${{matrix.python}}, conda, ${{matrix.os}}, ${{matrix.deps}} strategy: fail-fast: false matrix: # Conda-based installations are our recommendation, so we test # comprehensively. - os: [ubuntu-latest, macos-latest, windows-latest] - environment-file: ["environment.yml", "minimal_environment.yml"] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13.0-rc.1"] - runs-on: ${{ matrix.os }} + os: [ubuntu, macos, windows] + deps: [minimal, full] + python: ["3.9", "3.10", "3.11", "3.12"] + + needs: build-package + runs-on: "${{ matrix.os }}-latest" defaults: run: - # required for conda environments to work correctly + # bash --login is required for conda environments to work correctly; # see https://github.com/marketplace/actions/setup-miniconda#important - shell: bash -elo pipefail {0} + # something in conda's shell scripts is incompatible with -u + shell: bash -l -e -o pipefail {0} steps: - - name: Check out code - uses: actions/checkout@v4 + - name: Get build helpers + uses: actions/download-artifact@v4 with: - path: src - - name: Install Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + name: build-scripts + - name: Extract build helpers + run: unzip build-scripts.zip + + - name: Get just-built PDR package + uses: actions/download-artifact@v4 with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies using Conda + name: packages + path: dist + + - name: Extract environment file + run: | + tar=$(scripts/find-gnu-tar) + if [[ "${{ matrix.deps }}" = minimal ]]; then + conda_env=minimal_environment.yml + else + conda_env=environment.yml + fi + "$tar" --extract --to-stdout -f dist/pdr-*.tar.gz \ + --wildcards \*/$conda_env > environment.yml + + - name: Install Python and dependencies using miniconda uses: conda-incubator/setup-miniconda@v3 with: + python-version: ${{ matrix.python }} channels: conda-forge - environment-file: src/${{ matrix.environment-file }} - - name: Install test dependencies + environment-file: environment.yml + + - name: Install test tools run: | - pip install pytest pytest-cov - - name: Prepare source tree for testing - # Run unit tests with a read-only source tree, to weed out - # places where the tests and/or pdr core write files into the - # source tree. - # - # This requires us to tell pytest-cov to write its data file - # somewhere outside the source tree (which in turn is why we - # have the source tree in a subdirectory). That can only be - # done by adding options to .coveragerc, which of course has - # to be done *before* the tree is made read-only. Caution: - # for this to work correctly, the [run] section must be the - # last section in .coveragerc. - # - # Fun fact: on Windows `attrib +r` has *no effect* on directories. - # You can have read-only directories but you have to drop down - # a level and mess directly with ACLs. - # - # The 'git config' command is just to squelch a warning from - # the "post check out code" step, which will try to run the - # same command if we don't do it now, and fail because it - # can't write .git/config. + conda install pytest pytest-cov + + - name: Install PDR wheel run: | - printf 'data_file = %s\n' "$PWD/pytest-cov.dat" \ - >> src/.coveragerc - (cd src && git config --local --unset-all \ - 'http.https://github.com/.extraheader') - case "$OSTYPE" in - (msys|cygwin|win32) - export MSYS2_ARG_CONV_EXCL='*' - icacls src /inheritance:r \ + pip install --no-deps --only-binary :all: dist/pdr-*-py3-*.whl + + - name: Prepare for testing + run: | + tar=$(scripts/find-gnu-tar) + mkdir src + "$tar" --extract -f dist/pdr-*.tar.gz \ + --strip-components=1 --directory=src \ + --wildcards \*/.coveragerc \*/tests + + scripts/adjust-coverage-config < src/.coveragerc > coveragerc + + # Make the directory tree containing the tests, and the directory + # tree containing the actual PDR .py files, read-only. This is + # because we used to have a lot of code that used these locations + # for scratch space and we don't want it coming back. + readonly_dirs="dist src" + + # TODO: Move this block to another helper script to avoid + # more repetition in the next job and also the MSYS2 gunk. + # (I wonder how hard it would be to reimplement the icacls + # invocations using direct system calls from Python.) + host_os=$(python3 -c 'import os; print(os.name)') + if [ $host_os = nt ]; then + # Because of backward compatibility with DOS, attrib +r has no + # effect on directories. You have to use ACLs instead. + export MSYS2_ARG_CONV_EXCL='*' + for f in $readonly_dirs; do + icacls "$f" /inheritance:r \ /grant 'CREATOR OWNER:(OI)(CI)(RX,D,DC,WDAC)' \ /grant 'BUILTIN\Users:(OI)(CI)RX' - icacls 'src\*.*' /reset /t /l /q - ;; - (*) - chmod -R a-w src - ;; - esac + icacls "$f"'\*.*' /reset /t /l /q + done + else + # We only make $CONDA_PREFIX read-only on Unixy platforms, + # because doing the same on Windows makes setup-miniconda's + # cleanup pass hang. I don't understand why actions even + # *have* a cleanup pass given that they run right before + # the entire VM is nuked from orbit, but so it goes. We + # *probably* don't have any code that is both conda- and + # Windows-specific that writes to the PDR package directory. + chmod -R a-w $readonly_dirs $CONDA_PREFIX + fi + - name: Run unit tests - # Because the source tree is read-only we need to turn off - # .pyc generation and the pytest cache, neither of which is - # actually ever going to get used anyway. - # - # The -r fEsX option to pytest means show details of tests - # that failed, errored, skipped or passed when they were - # expected to fail. run: | - cd src export PYTHONDONTWRITEBYTECODE=1 - pytest -p no:cacheprovider -r fEsX --cov=pdr --cov-branch --cov-report= + pytest -p no:cacheprovider --basetemp=$PWD/tmp --cov \ + --cov-config=$PWD/coveragerc --cov-branch --cov-report=term \ + --rootdir=$PWD/src --import-mode=importlib -r fEsX + - name: Store coverage data uses: actions/upload-artifact@v4 with: - name: coverage-data-conda-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.environment-file }} - path: pytest-cov.dat + name: coverage-data-conda-${{ matrix.os }}-${{ matrix.python }}-${{ matrix.deps }} + path: coverage.dat if-no-files-found: ignore # unmerged coverage analysis data can be ephemeral retention-days: 1 pip-unit: name: | - ${{ format('pip python: {0} os: {1}', - matrix.python-version, matrix.os) }} + Test: ${{matrix.python}}, pip, ${{matrix.os}}, ${{matrix.deps}} + needs: build-package strategy: fail-fast: false matrix: # This test exists to verify that the dependencies declared # for pip-based installs are accurate, and this should not be - # drastically affected by which python we have. Only test - # with the oldest supported and latest stable python, but on - # all three available OSes. - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.9", "3.12"] - runs-on: ${{ matrix.os }} + # drastically affected by which python we have, so we don't + # test all the python versions in the support range, just the + # ends. Also, this is our opportunity to test against the + # upcoming python 3.13, which is not available via conda yet, + # but _is_ available from github's own snake collection. + os: [ubuntu, macos, windows] + python: ["3.9", "3.12", "3.13-dev"] + deps: [minimal, full] + runs-on: "${{ matrix.os }}-latest" defaults: run: # so I don't have to write nontrivial script fragments twice - shell: bash -eo pipefail {0} + shell: bash -e -u -o pipefail {0} steps: - - uses: actions/checkout@v4 - with: - path: src - - name: Install Python ${{ matrix.python-version }} + - name: Install Python ${{ matrix.python }} uses: actions/setup-python@v5 + id: py + with: + python-version: ${{ matrix.python }} + update-environment: false + + - name: Get build helpers + uses: actions/download-artifact@v4 + with: + name: build-scripts + - name: Extract build helpers + run: unzip build-scripts.zip + + - name: Get just-built PDR package + uses: actions/download-artifact@v4 with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies using pip + name: packages + path: dist + + - name: Install pdr from wheel using pip + id: deps run: | - python3 -m venv venv - case "$OSTYPE" in - (msys|cygwin|win32) source venv/Scripts/activate ;; - (*) source venv/bin/activate ;; - esac - python3 -m pip install --upgrade pip setuptools wheel - pip install pytest-cov - cd src && pip install -e '.[browsify,fits,tiff,pvl,tests]' - - name: Prepare source tree for testing + if [[ "${{ matrix.deps }}" = minimal ]]; then + features= + else + features="[browsify,fits,pvl,tiff]" + fi + # Need to expand the glob naming the wheel file _before_ + # it is combined with $features. + wheel="$(echo dist/pdr-*-py3-*.whl)" + + '${{ steps.py.outputs.python-path }}' -m venv venv + # I do not understand why the activation script is in a + # different location on Windows. + if [ ! -d venv/bin ]; then mv venv/Scripts venv/bin; fi + source venv/bin/activate + + pip install --only-binary :all: pytest pytest-cov + + # If this pip command fails *and* Python is a development + # version, the issue is probably that some of our + # dependencies aren't available; skip the rest of the steps + # but don't fail the job. continue-on-error is + # insufficiently nuanced for this rule. + set +e +o pipefail + pip install --only-binary :all: "$wheel$features" 2>&1 | + tee pip-deps.log + pip_status=${PIPESTATUS[0]} + python_is_experimental=$(python3 -c 'import sys; print( + 1 if sys.version_info.releaselevel != "final" else 0 + )') + if [[ $pip_status -eq 0 ]]; then + echo "available=true" >> "$GITHUB_OUTPUT" + exit 0 + else + echo "available=false" >> "$GITHUB_OUTPUT" + if [[ $python_is_experimental -eq 1 ]]; then + annote=warning + exitcode=0 + else + annote=error + exitcode=1 + fi + sed -ne 's/^ERROR: /::'"${annote}"'::/p' < pip-deps.log + exit $exitcode + fi + + - name: Prepare for testing + if: ${{ fromJSON(steps.deps.outputs.available) }} # See comments on the matching step of the conda-unit job for - # explanation. This step should be identical between the two jobs. + # explanation. The only difference between that job and this + # one should be the value of readonly_dirs. run: | - printf 'data_file = %s\n' "$PWD/pytest-cov.dat" \ - >> src/.coveragerc - (cd src && git config --local --unset-all \ - 'http.https://github.com/.extraheader') - case "$OSTYPE" in - (msys|cygwin|win32) - export MSYS2_ARG_CONV_EXCL='*' - icacls src /inheritance:r \ + source venv/bin/activate + tar=$(scripts/find-gnu-tar) + mkdir src + "$tar" --extract -f dist/pdr-*.tar.gz \ + --strip-components=1 --directory=src \ + --wildcards \*/.coveragerc \*/tests + + scripts/adjust-coverage-config < src/.coveragerc > coveragerc + + readonly_dirs="dist src venv" + host_os=$(python3 -c 'import os; print(os.name)') + if [ $host_os = nt ]; then + export MSYS2_ARG_CONV_EXCL='*' + for f in $readonly_dirs; do + icacls "$f" /inheritance:r \ /grant 'CREATOR OWNER:(OI)(CI)(RX,D,DC,WDAC)' \ /grant 'BUILTIN\Users:(OI)(CI)RX' - icacls 'src\*.*' /reset /t /l /q - ;; - (*) - chmod -R a-w src - ;; - esac + icacls "$f"'\*.*' /reset /t /l /q + done + else + chmod -R a-w $readonly_dirs + fi + - name: Run unit tests + if: ${{ fromJSON(steps.deps.outputs.available) }} # See comments on the matching step of the conda-unit job for # explanation. This step should be the same as that step # *except* that when using pip we have to explicitly activate # the virtualenv. run: | - case "$OSTYPE" in - (msys|cygwin|win32) source venv/Scripts/activate ;; - (*) source venv/bin/activate ;; - esac - cd src + source venv/bin/activate export PYTHONDONTWRITEBYTECODE=1 - pytest -p no:cacheprovider -r fEsX --cov=pdr --cov-branch --cov-report= + pytest -p no:cacheprovider --basetemp=$PWD/tmp --cov \ + --cov-config=$PWD/coveragerc --cov-branch --cov-report=term \ + --rootdir=$PWD/src --import-mode=importlib -r fEsX + - name: Store coverage data + if: ${{ fromJSON(steps.deps.outputs.available) }} uses: actions/upload-artifact@v4 with: - name: coverage-data-pip-${{ matrix.os }}-${{ matrix.python-version }} - path: pytest-cov.dat - if-no-files-found: ignore + name: coverage-data-pip-${{ matrix.os }}-${{ matrix.python }}-${{ matrix.deps }} + path: coverage.dat # unmerged coverage analysis data can be ephemeral retention-days: 1 coverage: - name: combined coverage report - if: always() + name: Combine coverage needs: [conda-unit, pip-unit] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - name: Set up Python + uses: actions/setup-python@v5 + id: py with: - python-version: 3.12 - - run: | - python3 -m pip install --upgrade coverage[toml] - - name: Download coverage data + python-version: "3.x" # latest stable + update-environment: false + + - name: Get build helpers + uses: actions/download-artifact@v4 + with: + name: build-scripts + - name: Get just-built PDR package + uses: actions/download-artifact@v4 + with: + name: packages + path: dist + - name: Get coverage data uses: actions/download-artifact@v4 with: pattern: coverage-data-* + path: cov # this does the exact opposite of what it sounds like: # if true, all the artifacts that match the pattern are # unpacked in the *same location* and clobber each other # if false they all get renamed to not collide merge-multiple: false + + - name: Extract build helpers + run: unzip build-scripts.zip + + - name: Extract sources + # this is guaranteed to run on Linux so we can assume "tar" is GNU tar + run: | + tar -xz --strip-components=1 -f dist/pdr-*.tar.gz \ + --wildcards \*/.coveragerc \*/pyproject.toml \*/pdr + + - name: Prepare analysis environment + run: | + '${{ steps.py.outputs.python-path }}' -m venv venv + . venv/bin/activate + pip install --only-binary :all: --upgrade pip + pip install --only-binary :all: coverage + - name: Crunch coverage data run: | - for dir in coverage-data-*/; do - dir=${dir%/} - tag=${dir#coverage-data-} - mv "$dir/pytest-cov.dat" ".coverage.$tag" - rmdir "$dir" - done - python3 -m coverage combine - python3 -m coverage report --format=markdown >> $GITHUB_STEP_SUMMARY + . venv/bin/activate + scripts/adjust-coverage-config cov/coverage-data-*/coverage.dat \ + < .coveragerc > coveragerc.adjusted + mv coveragerc.adjusted .coveragerc + + printf '### Combined coverage report\n\n' >> $GITHUB_STEP_SUMMARY + + python3 -m coverage combine cov/coverage-data-*/coverage.dat + python3 -m coverage report --format=markdown --sort=cover \ + >> $GITHUB_STEP_SUMMARY python3 -m coverage xml + - name: Store coverage report uses: actions/upload-artifact@v4 with: