diff --git a/docs/conf.py b/docs/conf.py index c759b4137d..abe40c72da 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,6 +50,7 @@ *[p.stem for p in (HERE / "extensions").glob("*.py")], "sphinx_copybutton", "sphinx_gallery.load_style", + "sphinx_tabs.tabs", ] # nbsphinx specific settings diff --git a/docs/user_guide/developer.rst b/docs/user_guide/developer.rst index f0a502d9b0..d98b77f1bc 100644 --- a/docs/user_guide/developer.rst +++ b/docs/user_guide/developer.rst @@ -19,3 +19,43 @@ Model development tutorials notebooks/module_user_guide notebooks/model_user_guide + +Glossary +-------- + +.. tabs:: + + .. tab:: Model + + A Model class inherits :class:`~scvi.model.base.BaseModelClass` and is the user-facing object for interacting with a module. + The model has a `train` method that learns the parameters of the module, and also contains methods + for users to retrieve information from the module, like the latent representation of cells in a VAE. + Conventionally, the post-inference model methods should not store data into the AnnData object, but + instead return "standard" Python objects, like numpy arrays or pandas dataframes. + + .. tab:: Module + + A module is the lower-level object that defines a generative model and inference scheme. A module will + either inherit :class:`~scvi.module.base.BaseModuleClass` or :class:`~scvi.module.base.PyroBaseModuleClass`. + Consequently, a module can either be implemented with PyTorch alone, or Pyro. In the PyTorch only case, the + generative process and inference scheme are implemented respectively in the `generative` and `inference` methods, + while the `loss` method computes the loss, e.g, ELBO in the case of variational inference. + +.. tabs:: + + .. tab:: TrainingPlan + + + The training plan is a PyTorch Lightning Module that is initialized with a scvi-tools module object. + It configures the optimizers, defines the training step and validation step, and computes metrics to be + recorded during training. The training step and validation step are functions that take data, run it through + the model and return the loss, which will then be used to optimize the model parameters in the Trainer. + Overall, custom training plans can be used to develop complex inference schemes on top of modules. + + + .. tab:: Trainer + + The :class:`~scvi.train.Trainer` is a lightweight wrapper of the PyTorch Lightning Trainer. It takes as input + the training plan, a training data loader, and a validation dataloader. It performs the actual training loop, in + which parameters are optimized, as well as the validation loop to monitor metrics. It automatically handles moving + data to the correct device (CPU/GPU). diff --git a/pyproject.toml b/pyproject.toml index 1ac6b6a8c7..576b2808da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,7 @@ scikit-misc = {version = ">=0.1.3", optional = true} sphinx = {version = ">=3.4", optional = true} sphinx-autodoc-typehints = {version = "*", optional = true} sphinx-gallery = {version = ">0.6", optional = true} +sphinx-tabs = {version = "*", optional = true} sphinx_copybutton = {version = "*", optional = true} torch = ">=1.7.1" tqdm = ">=4.56.0" @@ -82,6 +83,7 @@ docs = [ "sphinx-autodoc-typehints", "sphinx_copybutton", "sphinx_gallery", + "sphinx-tabs", ] tutorials = ["scanpy", "leidenalg", "python-igraph", "loompy", "scikit-misc"]