From 3c5ceeb045da8e301b891bf688adbb1ccdbfdbb3 Mon Sep 17 00:00:00 2001
From: Jirka Borovec <6035284+Borda@users.noreply.github.com>
Date: Tue, 19 Mar 2024 16:40:06 +0100
Subject: [PATCH] docs: pin version in links to external docs (#2435)
* docs: pin version in links to external docs
* lightning-utilities >=0.11.0, <0.12.0
---
.github/workflows/docs-build.yml | 4 +++-
README.md | 2 +-
docs/source/conf.py | 24 ++++++++++++++++++++++--
docs/source/index.rst | 2 +-
docs/source/pages/lightning.rst | 6 +++---
docs/source/pages/overview.rst | 2 +-
docs/source/pages/quickstart.rst | 2 +-
requirements/_docs.txt | 2 +-
requirements/base.txt | 2 +-
src/torchmetrics/__about__.py | 2 +-
10 files changed, 35 insertions(+), 13 deletions(-)
diff --git a/.github/workflows/docs-build.yml b/.github/workflows/docs-build.yml
index 5751a98784c..812352c6617 100644
--- a/.github/workflows/docs-build.yml
+++ b/.github/workflows/docs-build.yml
@@ -69,7 +69,9 @@ jobs:
- name: Full build for deployment
if: github.event_name != 'pull_request'
- run: echo "SPHINX_FETCH_ASSETS=1" >> $GITHUB_ENV
+ run: |
+ echo "SPHINX_FETCH_ASSETS=1" >> $GITHUB_ENV
+ echo "SPHINX_PIN_RELEASE_VERSIONS=1" >> $GITHUB_ENV
- name: make ${{ matrix.target }}
working-directory: ./docs
run: make ${{ matrix.target }} --debug --jobs $(nproc) SPHINXOPTS="-W --keep-going"
diff --git a/README.md b/README.md
index 325f512a36d..2144d89e010 100644
--- a/README.md
+++ b/README.md
@@ -97,7 +97,7 @@ TorchMetrics is a collection of 100+ PyTorch metrics implementations and an easy
- Metrics optimized for distributed-training
- Automatic synchronization between multiple devices
-You can use TorchMetrics with any PyTorch model or with [PyTorch Lightning](https://pytorch-lightning.readthedocs.io/en/stable/) to enjoy additional features such as:
+You can use TorchMetrics with any PyTorch model or with [PyTorch Lightning](https://lightning.ai/docs/pytorch/stable/) to enjoy additional features such as:
- Module metrics are automatically placed on the correct device.
- Native support for logging metrics in Lightning to reduce even more boilerplate.
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 461f24bc598..0fce257321e 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -20,7 +20,7 @@
import lai_sphinx_theme
import torchmetrics
-from lightning_utilities.docs import fetch_external_assets
+from lightning_utilities.docs import adjust_linked_external_docs, fetch_external_assets
from lightning_utilities.docs.formatting import _transform_changelog
_PATH_HERE = os.path.abspath(os.path.dirname(__file__))
@@ -30,6 +30,7 @@
FOLDER_GENERATED = "generated"
SPHINX_MOCK_REQUIREMENTS = int(os.environ.get("SPHINX_MOCK_REQUIREMENTS", True))
SPHINX_FETCH_ASSETS = int(os.environ.get("SPHINX_FETCH_ASSETS", False))
+SPHINX_PIN_RELEASE_VERSIONS = int(os.getenv("SPHINX_PIN_RELEASE_VERSIONS", False))
html_favicon = "_static/images/icon.svg"
@@ -86,6 +87,25 @@ def _set_root_image_path(page_path: str) -> None:
for page in all_pages:
_set_root_image_path(page)
+
+if SPHINX_PIN_RELEASE_VERSIONS:
+ adjust_linked_external_docs(
+ "https://numpy.org/doc/stable/", "https://numpy.org/doc/{numpy.__version__}/", _PATH_ROOT
+ )
+ adjust_linked_external_docs(
+ "https://pytorch.org/docs/stable/", "https://pytorch.org/docs/{torch.__version__}/", _PATH_ROOT
+ )
+ adjust_linked_external_docs(
+ "https://matplotlib.org/stable/",
+ "https://matplotlib.org/{matplotlib.__version__}/",
+ _PATH_ROOT,
+ version_digits=3,
+ )
+ adjust_linked_external_docs(
+ "https://scikit-learn.org/stable/", "https://scikit-learn.org/{sklearn.__version__}/", _PATH_ROOT
+ )
+
+
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
@@ -263,7 +283,7 @@ def _set_root_image_path(page_path: str) -> None:
"python": ("https://docs.python.org/3", None),
"torch": ("https://pytorch.org/docs/stable/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
- "matplotlib": ("http://matplotlib.org/stable", None),
+ "matplotlib": ("https://matplotlib.org/stable/", None),
}
nitpicky = True
diff --git a/docs/source/index.rst b/docs/source/index.rst
index af7b6ff798b..a51a1184a78 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -19,7 +19,7 @@ TorchMetrics is a collection of 100+ PyTorch metrics implementations and an easy
* Automatic accumulation over batches
* Automatic synchronization between multiple devices
-You can use TorchMetrics in any PyTorch model, or within `PyTorch Lightning `_ to enjoy the following additional benefits:
+You can use TorchMetrics in any PyTorch model, or within `PyTorch Lightning `_ to enjoy the following additional benefits:
* Your data will always be placed on the same device as your metrics
* You can log :class:`~torchmetrics.Metric` objects directly in Lightning to reduce even more boilerplate
diff --git a/docs/source/pages/lightning.rst b/docs/source/pages/lightning.rst
index 655c72103b0..7f1bc70363b 100644
--- a/docs/source/pages/lightning.rst
+++ b/docs/source/pages/lightning.rst
@@ -22,11 +22,11 @@ While TorchMetrics was built to be used with native PyTorch, using TorchMetrics
* Modular metrics are automatically placed on the correct device when properly defined inside a LightningModule.
This means that your data will always be placed on the same device as your metrics. No need to call ``.to(device)`` anymore!
* Native support for logging metrics in Lightning using
- `self.log `_ inside
+ `self.log `_ inside
your LightningModule.
* TheĀ ``.reset()`` method of the metric will automatically be called at the end of an epoch.
-The example below shows how to use a metric in your `LightningModule `_:
+The example below shows how to use a metric in your `LightningModule `_:
.. testcode:: python
@@ -64,7 +64,7 @@ Logging TorchMetrics
Logging metrics can be done in two ways: either logging the metric object directly or the computed metric values.
When :class:`~torchmetrics.Metric` objects, which return a scalar tensor are logged directly in Lightning using the
-LightningModule `self.log `_
+LightningModule `self.log `_
method, Lightning will log the metric based on ``on_step`` and ``on_epoch`` flags present in ``self.log(...)``. If
``on_epoch`` is True, the logger automatically logs the end of epoch metric value by calling ``.compute()``.
diff --git a/docs/source/pages/overview.rst b/docs/source/pages/overview.rst
index bbe33c69cca..83f11ec4ef0 100644
--- a/docs/source/pages/overview.rst
+++ b/docs/source/pages/overview.rst
@@ -96,7 +96,7 @@ be moved to the same device as the input of the metric:
print(out.device) # cuda:0
However, when **properly defined** inside a :class:`~torch.nn.Module` or
-`LightningModule `_ the metric will
+`LightningModule `_ the metric will
be automatically moved to the same device as the module when using ``.to(device)``. Being
**properly defined** means that the metric is correctly identified as a child module of the
model (check ``.children()`` attribute of the model). Therefore, metrics cannot be placed
diff --git a/docs/source/pages/quickstart.rst b/docs/source/pages/quickstart.rst
index f05ac869160..fd9ade6b8d4 100644
--- a/docs/source/pages/quickstart.rst
+++ b/docs/source/pages/quickstart.rst
@@ -11,7 +11,7 @@ TorchMetrics is a collection of 100+ PyTorch metrics implementations and an easy
* Automatic accumulation over batches
* Automatic synchronization between multiple devices
-You can use TorchMetrics in any PyTorch model, or within `PyTorch Lightning `_ to enjoy additional features:
+You can use TorchMetrics in any PyTorch model, or within `PyTorch Lightning `_ to enjoy additional features:
* This means that your data will always be placed on the same device as your metrics.
* Native support for logging metrics in Lightning to reduce even more boilerplate.
diff --git a/requirements/_docs.txt b/requirements/_docs.txt
index 4fbb0d08291..9bab0989fd2 100644
--- a/requirements/_docs.txt
+++ b/requirements/_docs.txt
@@ -12,7 +12,7 @@ sphinx-togglebutton ==0.3.2
sphinx-copybutton ==0.5.2
lightning >=1.8.0, <2.3.0
-lightning-utilities >=0.9.0, <0.11.0
+lightning-utilities >=0.11.0, <0.12.0
pydantic > 1.0.0, < 3.0.0
# integrations
diff --git a/requirements/base.txt b/requirements/base.txt
index d0286421e42..94ee1a90ebe 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -5,4 +5,4 @@ numpy >1.20.0
packaging >17.1
torch >=1.10.0, <=2.2.1
typing-extensions; python_version < '3.9'
-lightning-utilities >=0.8.0, <0.11.0
+lightning-utilities >=0.8.0, <0.12.0
diff --git a/src/torchmetrics/__about__.py b/src/torchmetrics/__about__.py
index 377ebc62055..2414b1d017f 100644
--- a/src/torchmetrics/__about__.py
+++ b/src/torchmetrics/__about__.py
@@ -8,7 +8,7 @@
__docs_url__ = "https://lightning.ai/docs/torchmetrics/stable/"
__long_doc__ = """
Torchmetrics is a metrics API created for easy metric development and usage in both PyTorch and
-[PyTorch Lightning](https://pytorch-lightning.readthedocs.io/en/stable/). It was originally a part of
+[PyTorch Lightning](https://lightning.ai/docs/pytorch/stable/). It was originally a part of
Pytorch Lightning, but got split off so users could take advantage of the large collection of metrics
implemented without having to install Pytorch Lightning (even though we would love for you to try it out).
We currently have around 100+ metrics implemented and we continuously are adding more metrics, both within