diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 4cdc3f5e94..2657e1e42a 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -3,7 +3,7 @@ # - https://github.com/actions/checkout # - https://github.com/marketplace/actions/setup-miniconda -name: ci-tests +name: ci-tests on: push: diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml new file mode 100644 index 0000000000..265489883f --- /dev/null +++ b/.github/workflows/ci-wheels.yml @@ -0,0 +1,166 @@ +# Reference: +# - https://github.com/actions/checkout +# - https://github.com/actions/download-artifact +# - https://github.com/actions/upload-artifact +# - https://github.com/pypa/build +# - https://github.com/pypa/gh-action-pypi-publish +# - https://test.pypi.org/help/#apitoken + +name: ci-wheels + +on: + pull_request: + + push: + tags: + - "v*" + branches-ignore: + - "auto-update-lockfiles" + - "pre-commit-ci-update-config" + - "dependabot/*" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + name: "build sdist & wheel" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: "building" + shell: bash + run: | + # require build with explicit --sdist and --wheel in order to + # get correct version associated with sdist and bdist artifacts + pipx run build --sdist --wheel + + - uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist/* + + test-wheel: + needs: build + name: "test wheel (py${{ matrix.python-version }})" + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10"] + session: ["wheel"] + env: + ENV_NAME: "ci-wheels" + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - name: "environment configure" + env: + # Maximum cache period (in weeks) before forcing a cache refresh. + CACHE_WEEKS: 2 + run: | + echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + + - name: "conda package cache" + uses: ./.github/workflows/composite/conda-pkg-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "conda install" + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-version: latest + channels: conda-forge,defaults + activate-environment: ${{ env.ENV_NAME }} + auto-update-conda: false + use-only-tar-bz2: true + + - name: "conda environment cache" + uses: ./.github/workflows/composite/conda-env-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + install_packages: "nox pip" + + - name: "nox cache" + uses: ./.github/workflows/composite/nox-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + lock_file: ${{ env.LOCK_FILE }} + + - name: "nox install and test wheel" + env: + PY_VER: ${{ matrix.python-version }} + run: | + nox --session ${{ matrix.session }} -- --verbose + + show-artifacts: + needs: build + name: "show artifacts" + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - shell: bash + run: | + ls -l ${{ github.workspace }}/dist + + publish-artifacts-test-pypi: + needs: test-wheel + name: "publish to test.pypi" + runs-on: ubuntu-latest + # upload to Test PyPI for every commit on main branch + if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + skip_existing: true + print_hash: true + + publish-artifacts-pypi: + needs: test-wheel + name: "publish to pypi" + runs-on: ubuntu-latest + # upload to PyPI for every tag starting with 'v' + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + print_hash: true \ No newline at end of file diff --git a/.readthedocs.yml b/.readthedocs.yml index 8ec8ab145c..58d5b26769 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -6,6 +6,9 @@ build: python: mambaforge-4.10 jobs: post_checkout: + # The SciTools/iris repository is shallow i.e., has a .git/shallow, + # therefore complete the repository with a full history in order + # to allow setuptools-scm to correctly auto-discover the version. - git fetch --unshallow - git fetch --all diff --git a/MANIFEST.in b/MANIFEST.in index 81d7165199..ad28df9c7c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,15 +1,13 @@ # Top-level files include CHANGES COPYING COPYING.LESSER +prune .github +exclude .gitignore -# Files from setup.py package_data that are not automatically added to source distributions -recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json -recursive-include lib/iris/etc * -include lib/iris/tests/stock/file_headers/* - +# Files required for conda package management recursive-include requirements * -# File required to build docs -recursive-include docs Makefile *.js *.png *.py *.rst +# Files required to build docs +recursive-include docs * prune docs/src/_build prune docs/src/generated prune docs/gallery_tests @@ -18,7 +16,5 @@ prune docs/gallery_tests include tools/generate_std_names.py include etc/cf-standard-name-table.xml -global-exclude *.pyc -global-exclude __pycache__ -global-exclude iris_image_test_output -exclude lib/iris/_version.py \ No newline at end of file +global-exclude *.py[cod] +global-exclude __pycache__ \ No newline at end of file diff --git a/docs/src/conf.py b/docs/src/conf.py index 4f1eae7403..6b11e83a8c 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -89,7 +89,9 @@ def autolog(message): version = get_version("scitools-iris") if version.endswith("+dirty"): version = version[: -len("+dirty")] +release = version autolog(f"Iris Version = {version}") +autolog(f"Iris Release = {release}") # -- General configuration --------------------------------------------------- diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 182ab482b3..25a426e20b 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -123,6 +123,14 @@ conda package on the `conda-forge Anaconda channel`_. Update PyPI ----------- +.. note:: + + As part of our Continuous-Integration (CI), the building and publishing of + PyPI artifacts is now automated by a dedicated GitHub Action. + + The following instructions **no longer** require to be performed manually, + but remain part of the documentation for reference purposes only. + Update the `scitools-iris`_ project on PyPI with the latest Iris release. To do this perform the following steps. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 761833ba15..7ff731d4cb 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -254,6 +254,9 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ and `@trexfeathers`_ (reviewer) adopted `setuptools-scm`_ for automated ``iris`` package versioning. (:pull:`4841`) +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) added building, testing and + publishing of ``iris`` PyPI ``sdist`` and binary ``wheels`` as part of + our GitHub Continuous-Integration. (:pull`4849`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, diff --git a/noxfile.py b/noxfile.py index 2b1df8fb00..8aabf862fb 100755 --- a/noxfile.py +++ b/noxfile.py @@ -271,6 +271,36 @@ def linkcheck(session: nox.sessions.Session): ) +@nox.session(python=PY_VER, venv_backend="conda") +def wheel(session: nox.sessions.Session): + """ + Perform iris local wheel install and import test. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.cd("dist") + fname = list(Path(".").glob("scitools_iris-*.whl")) + if len(fname) == 0: + raise ValueError("Cannot find wheel to install.") + if len(fname) > 1: + emsg = ( + f"Expected to find 1 wheel to install, found {len(fname)} instead." + ) + raise ValueError(emsg) + session.install(fname[0].name) + session.run( + "python", + "-c", + "import iris; print(f'{iris.__version__=}')", + external=True, + ) + + @nox.session @nox.parametrize( "run_type", diff --git a/setup.cfg b/setup.cfg index 66e865572e..a60d107835 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,7 @@ install_requires = numpy>=1.19 scipy xxhash -packages = find: +packages = find_namespace: package_dir = =lib python_requires =