diff --git a/.github/actions/pull-caches/action.yml b/.github/actions/pull-caches/action.yml index 2f7f29ea0cc..c3c79ab93e9 100644 --- a/.github/actions/pull-caches/action.yml +++ b/.github/actions/pull-caches/action.yml @@ -18,6 +18,14 @@ inputs: description: cache restore/dump key required: false default: "pypi-packages" + cache-torch-HF: + description: "cache torch and HF" + required: false + default: "true" + cache-references: + description: "cache metrics references" + required: false + default: "false" runs: using: "composite" @@ -67,6 +75,7 @@ runs: shell: bash - name: Cache Torch & HF + if: inputs.cache-torch-HF == 'true' # since the input is string continue-on-error: true uses: actions/cache/restore@v3 with: @@ -75,6 +84,7 @@ runs: key: ci-caches - name: Restored Torch & HF + if: inputs.cache-torch-HF == 'true' # since the input is string run: | mkdir -p $CACHES_DIR pip install -q py-tree @@ -83,7 +93,7 @@ runs: - name: Cache References # do not use this cache for dispatch and crone, to enable rebuild caches if needed - if: github.event_name != 'workflow_dispatch' && github.event_name != 'schedule' + if: github.event_name != 'workflow_dispatch' && github.event_name != 'schedule' && inputs.cache-references == 'true' continue-on-error: true uses: actions/cache/restore@v3 with: @@ -91,6 +101,7 @@ runs: key: cache-references - name: Restored References + if: inputs.cache-references == 'true' # since the input is string continue-on-error: true working-directory: tests/ run: | diff --git a/.github/actions/push-caches/action.yml b/.github/actions/push-caches/action.yml index 8f5db36b6dd..85c7378a69e 100644 --- a/.github/actions/push-caches/action.yml +++ b/.github/actions/push-caches/action.yml @@ -14,6 +14,18 @@ inputs: description: location to pull PyTorch from required: false default: "https://download.pytorch.org/whl/cpu/torch_stable.html" + cache-artifact-appendix: + description: "unique name or running index" + required: false + default: "" + cache-torch-HF: + description: "cache torch and HF" + required: false + default: "true" + cache-references: + description: "cache metrics references" + required: false + default: "false" runs: using: "composite" @@ -23,66 +35,50 @@ runs: shell: bash - name: Freeze local emv. + if: inputs.cache-artifact-appendix != '' run: | pip freeze > requirements.dump cat requirements.dump shell: bash - #- name: Filter self pkg - # run: | - # import os - # fp = 'requirements.dump' - # with open(fp) as fopen: - # lines = [ln.strip() for ln in fopen.readlines()] - # lines = [ln.split('+')[0] for ln in lines if '-e ' not in ln] - # with open(fp, 'w') as fwrite: - # fwrite.writelines([ln + os.linesep for ln in lines]) - # shell: python - - name: Dump wheels + if: inputs.cache-artifact-appendix != '' run: | pip wheel -r requirements/_devel.txt --prefer-binary \ - --wheel-dir=.pip-wheels \ + --wheel-dir=_pip-wheels \ -f ${{ inputs.torch-url }} -f ${{ inputs.pypi-dir }} - ls -lh .pip-wheels + ls -lh _pip-wheels shell: bash - - name: Cache pull packages - uses: actions/cache/restore@v3 - with: - enableCrossOsArchive: true - path: ${{ inputs.pypi-dir }} - key: ${{ inputs.pypi-key }} - - - name: Find diff - id: wheels-diff + - name: Move new packages to staging + if: inputs.cache-artifact-appendix != '' run: | - import os, glob - wheels = [os.path.basename(p) for p in glob.glob(".pip-wheels/*")] - pkgs = [os.path.basename(p) for p in glob.glob("${{ inputs.pypi-dir }}/*")] - diff = [w for w in wheels if w not in pkgs] - print(diff) - with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: - print(f'count-new={len(diff)}', file=fh) - shell: python - - - run: cp .pip-wheels/* ${{ inputs.pypi-dir }} - if: ${{ steps.wheels-diff.outputs.count-new != 0 }} + mkdir -p _pip-staging + python .github/assistant.py move_new_packages \ + --dir-cache="${{ inputs.pypi-dir }}" \ + --dir_local="_pip-wheels" \ + --dir_staging="_pip-staging" + ls -lh _pip-staging/ + # count files in the staging dir + file_count=$(ls -1 "_pip-staging/" | wc -l) + echo "NUM_PACKAGES=$file_count" >> $GITHUB_ENV shell: bash - - name: Cache push packages - if: ${{ steps.wheels-diff.outputs.count-new != 0 }} - uses: actions/cache/save@v3 + - name: Upload new packages + if: inputs.cache-artifact-appendix != '' && env.NUM_PACKAGES != 0 + uses: actions/upload-artifact@v4 with: - enableCrossOsArchive: true - path: ${{ inputs.pypi-dir }} - key: ${{ inputs.pypi-key }} + name: ${{ inputs.pypi-key }}-run-${{ inputs.cache-artifact-appendix }} + path: _pip-staging + retention-days: 1 - name: Post Torch & HF + if: inputs.cache-torch-HF == 'true' # since the input is string run: py-tree $CACHES_DIR shell: bash - name: Cache Torch & HF + if: inputs.cache-torch-HF == 'true' # since the input is string continue-on-error: true uses: actions/cache/save@v3 with: @@ -91,6 +87,7 @@ runs: key: ci-caches - name: Cache references + if: inputs.cache-references == 'true' # since the input is string continue-on-error: true uses: actions/cache/save@v3 with: @@ -98,6 +95,8 @@ runs: path: tests/_cache-references key: cache-references - - name: Post References - run: py-tree tests/_cache-references/ --show_hidden - shell: bash + #- name: Post References + # # This print taken soo many lines, so it is commented out + # if: inputs.cache-references == 'true' # since the input is string + # run: py-tree tests/_cache-references/ --show_hidden + # shell: bash diff --git a/.github/assistant.py b/.github/assistant.py index 694a6cf7e13..d2564603f2f 100644 --- a/.github/assistant.py +++ b/.github/assistant.py @@ -179,6 +179,23 @@ def _crop_path(fname: str, paths: list[str]) -> str: raise ValueError(f"Missing following paths: {not_exists}") return " ".join(test_modules) + @staticmethod + def move_new_packages(dir_cache: str, dir_local: str, dir_staging: str) -> None: + """Move unique packages from local folder to staging.""" + assert os.path.isdir(dir_cache), f"Missing folder with saved packages: '{dir_cache}'" # noqa: S101 + assert os.path.isdir(dir_local), f"Missing folder with local packages: '{dir_local}'" # noqa: S101 + assert os.path.isdir(dir_staging), f"Missing folder for staging: '{dir_staging}'" # noqa: S101 + + import shutil + + for pkg in os.listdir(dir_local): + if not os.path.isfile(pkg): + continue + if pkg in os.listdir(dir_cache): + continue + logging.info(f"Moving '{pkg}' to staging...") + shutil.move(os.path.join(dir_cache, pkg), os.path.join(dir_staging, pkg)) + if __name__ == "__main__": logging.basicConfig(level=logging.INFO) diff --git a/.github/workflows/_merge_cache.yml b/.github/workflows/_merge_cache.yml new file mode 100644 index 00000000000..7581f232b88 --- /dev/null +++ b/.github/workflows/_merge_cache.yml @@ -0,0 +1,57 @@ +name: Collect new packages and upload cache + +on: + workflow_call: + inputs: + pypi-key: + description: cache restore/dump key + required: false + type: string + default: "pypi-packages" + pypi-dir: + description: location of local PyPI cache + required: false + type: string + default: "_ci-cache_PyPI" + cache-artifact-appendix: + description: "unique name for the job" + required: true + type: string + +jobs: + merge-caches: + runs-on: ubuntu-latest + steps: + - name: Download 📥 artifacts + uses: actions/download-artifact@v4 + with: + pattern: ${{ inputs.pypi-key }}-run-${{ inputs.cache-artifact-appendix }}* + merge-multiple: true + path: _local-packages + - name: Cache pull packages + uses: actions/cache/restore@v3 + with: + enableCrossOsArchive: true + path: ${{ inputs.pypi-dir }} + key: ${{ inputs.pypi-key }} + + - name: show 📦 + run: | + # create the directory if it doesn't exist - no artifact were found + mkdir -p _local-packages + ls -lh _local-packages + ls -lh ${{ inputs.pypi-dir }} + # count files in the staging dir + file_count=$(ls -1 "_local-packages/" | wc -l) + echo "NUM_PACKAGES=$file_count" >> $GITHUB_ENV + - name: Move collected 📦 + if: env.NUM_PACKAGES != 0 + run: mv _local-packages/* ${{ inputs.pypi-dir }} + + - name: Cache push packages + if: env.NUM_PACKAGES != 0 + uses: actions/cache/save@v3 + with: + enableCrossOsArchive: true + path: ${{ inputs.pypi-dir }} + key: ${{ inputs.pypi-key }} diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 210f2f2195b..22e5ca5563a 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -20,6 +20,9 @@ defaults: run: shell: bash +env: + PYPI_CACHE_DIR: "_ci-cache_PyPI" + jobs: check-diff: if: github.event.pull_request.draft == false @@ -59,10 +62,10 @@ jobs: - { os: "windows-2022", python-version: "3.11", pytorch-version: "2.6.0" } env: FREEZE_REQUIREMENTS: ${{ ! (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release/')) }} - PYPI_CACHE_DIR: "_ci-cache_PyPI" TOKENIZERS_PARALLELISM: false TEST_DIRS: ${{ needs.check-diff.outputs.test-dirs }} - PIP_EXTRA_INDEX_URL: "--find-links https://download.pytorch.org/whl/cpu/torch_stable.html" + PIP_EXTRA_INDEX_URL: "--find-links=https://download.pytorch.org/whl/cpu/torch_stable.html" + UNITTEST_TIMEOUT: "" # by default, it is not set # Timeout: https://stackoverflow.com/a/59076067/4521646 # seems that macOS jobs take much more than orger OS @@ -98,6 +101,7 @@ jobs: requires: ${{ matrix.requires }} pytorch-version: ${{ matrix.pytorch-version }} pypi-dir: ${{ env.PYPI_CACHE_DIR }} + cache-references: true - name: Switch to PT test URL if: ${{ matrix.pytorch-version == '2.6.0' }} @@ -107,7 +111,7 @@ jobs: run: | pip --version pip install -e . -U "setuptools==69.5.1" -r requirements/_doctest.txt \ - $PIP_EXTRA_INDEX_URL --find-links $PYPI_CACHE_DIR + $PIP_EXTRA_INDEX_URL --find-links="$PYPI_CACHE_DIR" pip list - name: DocTests @@ -126,7 +130,7 @@ jobs: python adjust-torch-versions.py $fpath done pip install --requirement requirements/_devel.txt -U \ - $PIP_EXTRA_INDEX_URL --find-links $PYPI_CACHE_DIR + $PIP_EXTRA_INDEX_URL --find-links="$PYPI_CACHE_DIR" pip list - name: set special vars for PR @@ -201,7 +205,7 @@ jobs: uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - file: tests/coverage.xml + files: "tests/coverage.xml" flags: cpu,${{ runner.os }},python${{ matrix.python-version }},torch${{ steps.info.outputs.TORCH }} env_vars: OS,PYTHON name: codecov-umbrella @@ -213,6 +217,8 @@ jobs: uses: ./.github/actions/push-caches with: pypi-dir: ${{ env.PYPI_CACHE_DIR }} + cache-artifact-appendix: ${{ github.run_id }}-${{ strategy.job-index }} + cache-references: true testing-guardian: runs-on: ubuntu-latest @@ -227,3 +233,11 @@ jobs: if: contains(fromJSON('["cancelled", "skipped"]'), needs.pytester.result) timeout-minutes: 1 run: sleep 90 + + merge-pkg-artifacts: + needs: pytester + if: success() + uses: ./.github/workflows/_merge_cache.yml + with: + pypi-dir: "_ci-cache_PyPI" + cache-artifact-appendix: ${{ github.run_id }}-${{ strategy.job-index }} diff --git a/tests/unittests/_helpers/wrappers.py b/tests/unittests/_helpers/wrappers.py index 157dd1db2b8..2a1d54d8152 100644 --- a/tests/unittests/_helpers/wrappers.py +++ b/tests/unittests/_helpers/wrappers.py @@ -10,7 +10,6 @@ "We couldn't connect to", "Connection error", "Can't load", - "`nltk` resource `punkt` is", ) diff --git a/tests/unittests/utilities/test_auc.py b/tests/unittests/classification/test_auc.py similarity index 99% rename from tests/unittests/utilities/test_auc.py rename to tests/unittests/classification/test_auc.py index 887f4c2d14b..8644de105a0 100644 --- a/tests/unittests/utilities/test_auc.py +++ b/tests/unittests/classification/test_auc.py @@ -19,6 +19,7 @@ from sklearn.metrics import auc as _sk_auc from torch import Tensor, tensor from torchmetrics.utilities.compute import auc + from unittests import NUM_BATCHES from unittests._helpers import seed_all from unittests._helpers.testers import MetricTester