diff --git a/.circleci/config.yml b/.circleci/config.yml index 3eed619d56..5c889193ec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -394,6 +394,35 @@ jobs: ssh-add ~/.ssh/id_ed25519 /home/circleci/nipype/tools/feedstock.sh + build_docs: + docker: + - image: python:3.7.4 + working_directory: /tmp/src/nipype + environment: + - FSLOUTPUTTYPE: 'NIFTI' + steps: + - checkout + name: Check Python version and upgrade pip + command: | + python --version + python -m pip install -U pip + - run: + name: Install graphviz + command: | + apt-get update + apt-get install -y graphviz + - run: + name: Install Requirements (may contain pinned versions) + command: python -m pip install -r docs/requirements.txt + - run: + name: Install NiPype + command: python -m pip install ".[doc]" + - run: + name: Build documentation + command: make -C doc html + - store_artifacts: + path: /tmp/src/nipype/doc/_build/html + workflows: version: 2 build_test_deploy: @@ -406,21 +435,39 @@ workflows: only: /.*/ - compare_base_dockerfiles: filters: + branches: + ignore: + - /docs?\/.*/ tags: only: /.*/ - test_pytest: filters: + branches: + ignore: + - /docs?\/.*/ tags: only: /.*/ requires: - compare_base_dockerfiles - test_fmri_fsl_spm: + filters: + branches: + ignore: + - /docs?\/.*/ requires: - compare_base_dockerfiles - test_fmri_spm_dartel_multiproc: + filters: + branches: + ignore: + - /docs?\/.*/ requires: - compare_base_dockerfiles - test_fmri_spm_nested_fsl_feeds: + filters: + branches: + ignore: + - /docs?\/.*/ requires: - compare_base_dockerfiles - deploy_dockerhub: diff --git a/doc/Makefile b/doc/Makefile index 2c96edd38b..85d491a70f 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -11,12 +11,11 @@ PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html nipypeapi htmlonly latex changes linkcheck doctest +.PHONY: help clean html htmlonly latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html make the HTML documentation" - @echo " nipypeapi make interface API documents only" @echo " latex make the LaTeX, you can set PAPER=a4 or PAPER=letter" @echo " pdf make and run the PDF generation" @echo " changes make an overview of all changed/added/deprecated" \ @@ -33,20 +32,15 @@ htmlonly: @echo @echo "Build finished. The HTML pages are in _build/html." -nipypeapi: - rm -rf interfaces/generated - python -u ../tools/build_interface_docs.py - @echo "Build API docs finished." - -html: clean examples2rst nipypeapi htmlonly +html: clean examples2rst htmlonly @echo "Build HTML and API finished." -examples2rst: +examples2rst: clean mkdir -p users/examples - ../tools/make_examples.py --no-exec + ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec @echo "examples2rst finished." -latex: nipypeapi +latex: clean $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." @@ -80,18 +74,3 @@ gitwash-update: --project-url=http://nipy.org/nipype \ --project-ml-url=http://mail.scipy.org/mailman/listinfo/nipy-devel @echo "gitwash updated" - -# Sourceforge doesn't appear to have a way of copying the files -# without specifying a username. So we'll probably have one target -# for each project admin -sf_satra_nightly: html - @echo "Copying html files to sourceforge..." - scp -r _build/html/* satra,nipy@web.sourceforge.net:htdocs/nipype-nightly/ - -sf_satra: html - @echo "Copying html files to sourceforge..." - rsync -auv _build/html/. satra,nipy@web.sourceforge.net:htdocs/nipype/. - -sf_filo: html - @echo "Copying html files to sourceforge..." - rsync -auv _build/html/. gorgolewski,nipy@web.sourceforge.net:htdocs/nipype/. diff --git a/doc/README.txt b/doc/README.txt deleted file mode 100644 index e55d4936cf..0000000000 --- a/doc/README.txt +++ /dev/null @@ -1,28 +0,0 @@ -====================== - Nipype Documentation -====================== - -This directory contains the documentation for the Nipype_ project. -The documentation is written in reST_ (reStructuredText) and uses -Sphinx_ to render html documentation from the rst source files. - -A relatively recent version of Sphinx_ is required to build the -documentation, at least 0.6.x. - -Use the ``Makefile`` to build the documentation. Common commands: - -Discover available make targets:: - - make help - -Clean up previous build:: - - make clean - -Build html documentation:: - - make html - - - - diff --git a/doc/_templates/navbar.html b/doc/_templates/navbar.html index 883bedc56b..fd0216b53f 100644 --- a/doc/_templates/navbar.html +++ b/doc/_templates/navbar.html @@ -9,7 +9,9 @@ Home · Quickstart · -Documentation · +User Guide and Examples · +Interfaces Index · +Developers · About · Nipy diff --git a/doc/about.rst b/doc/about.rst index f9d2831f70..45a6e30229 100644 --- a/doc/about.rst +++ b/doc/about.rst @@ -1,3 +1,5 @@ +:orphan: + .. _about: ===== diff --git a/doc/api/index.rst b/doc/api/index.rst deleted file mode 100644 index 0cc9d87e32..0000000000 --- a/doc/api/index.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. _api-index: - -########################################### -Library API (application program interface) -########################################### - -Information on specific functions, classes, and methods. - -:Release: |version| -:Date: |today| - -.. toctree:: - :glob: - - generated/* diff --git a/doc/changes.rst b/doc/changes.rst index 858a907691..3fc9469ef8 100644 --- a/doc/changes.rst +++ b/doc/changes.rst @@ -1,3 +1,4 @@ +:orphan: :tocdepth: 2 .. _changes: diff --git a/doc/conf.py b/doc/conf.py index 45bd46b97b..a93cfe7480 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,14 +16,6 @@ from packaging.version import Version import nipype -# if not os.path.exists('users/examples'): -# os.mkdir('users/examples') -# os.system('python ../tools/make_examples.py --no-exec') - -# if os.path.exists('interfaces/generated'): -# rmtree('interfaces/generated') -# os.system('python ../tools/build_interface_docs.py') - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -41,9 +33,11 @@ 'sphinx.ext.inheritance_diagram', 'sphinx.ext.todo', 'sphinxcontrib.apidoc', - 'sphinxcontrib.napoleon', 'matplotlib.sphinxext.plot_directive', + 'nbsphinx', 'nipype.sphinxext.plot_workflow', + 'nipype.sphinxext.apidoc', + 'nipype.sphinxext.documenter', ] autodoc_mock_imports = [ @@ -57,6 +51,8 @@ 'skimage', 'svgutils', 'transforms3d', + 'tvtk', + 'vtk' ] # Accept custom section names to be parsed for numpy-style docstrings @@ -68,6 +64,8 @@ ('Inputs', 'Parameters'), ('Outputs', 'Parameters'), ('Attributes', 'Parameters'), + ('Mandatory Inputs', 'Parameters'), + ('Optional Inputs', 'Parameters'), ] @@ -122,8 +120,6 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [ '_build', 'Thumbs.db', '.DS_Store', - 'api/generated/gen.rst', - 'interfaces/generated/gen.rst' ] # The reST default role (used for this markup: `text`) to use for all documents. @@ -278,10 +274,8 @@ apidoc_output_dir = 'api/generated' apidoc_excluded_paths = [ '*/tests/*', 'tests/*', - 'algorithms/*', 'external/*', 'fixes/*', - 'interfaces/*', 'scripts/*', 'sphinxext/*', 'testing/*', diff --git a/doc/developers.rst b/doc/developers.rst new file mode 100644 index 0000000000..d77fbcc946 --- /dev/null +++ b/doc/developers.rst @@ -0,0 +1,17 @@ +:orphan: + +.. _developers: + +================== +Developer's Corner +================== + +.. toctree:: + :maxdepth: 2 + + devel/index + +.. toctree:: + :maxdepth: 3 + + api/generated/nipype diff --git a/doc/documentation.rst b/doc/documentation.rst deleted file mode 100644 index 1cf275d630..0000000000 --- a/doc/documentation.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. _documentation: - -============= -Documentation -============= - -:Release: |version| -:Date: |today| - -Previous versions: `1.3.0 `_ `1.2.3 `_ - - -.. container:: doc2 - - .. admonition:: Michael Notter's Nipype guide - - Be sure to read `Michael's excellent tutorials `__. - - .. admonition:: Nipype Workflows - - The workflows that used to live as a module under - ``nipype.workflows`` have been migrated to the - new project `NiFlows `__. - - .. admonition:: Interfaces and Examples - - .. hlist:: - :columns: 2 - - * *In-house* interfaces - - .. toctree:: - :maxdepth: 1 - :glob: - - interfaces/generated/*algorithms* - - * Interfaces to third-party tools - - .. toctree:: - :maxdepth: 1 - :glob: - - interfaces/generated/*interfaces* - - * Examples - - .. toctree:: - :maxdepth: 1 - :glob: - - users/examples/* - - .. admonition:: Developer Guides - - .. hlist:: - :columns: 2 - - * API - - .. toctree:: - :maxdepth: 2 - - api/index - - * Developer Guide - - .. toctree:: - :maxdepth: 2 - - devel/index - -.. include:: links_names.txt diff --git a/doc/examples.rst b/doc/examples.rst new file mode 100644 index 0000000000..5b645fcace --- /dev/null +++ b/doc/examples.rst @@ -0,0 +1,19 @@ +:orphan: + +.. _examples: + +======================= +User Guide and Examples +======================= + +.. admonition:: Michael Notter's User Guide + + Be sure to read `Michael's excellent tutorials `__. + +Examples +~~~~~~~~ + .. toctree:: + :maxdepth: 1 + :glob: + + users/examples/* diff --git a/doc/interfaces.rst b/doc/interfaces.rst new file mode 100644 index 0000000000..177f08fca4 --- /dev/null +++ b/doc/interfaces.rst @@ -0,0 +1,130 @@ +:orphan: + +.. _interfaces: + +======================== +Interfaces and Workflows +======================== +Workflows +--------- +.. important:: + + The workflows that used to live as a module under + ``nipype.workflows`` have been migrated to the + new project `NiFlows `__. + +Interfaces +---------- +An index of all nipype interfaces is found belows. +Nipype provides some *in-house* interfaces to help with workflow +management tasks, basic image manipulations, and filesystem/storage +interfaces: + + * `"Algorithms" `__ + * `Image manipulation `__ + * `I/O Operations `__ + * `Self-reporting interfaces `__ + * `Utilities `__ + +Nipype provides interfaces for the following **third-party** tools: + + * `AFNI `__ + (Analysis of Functional NeuroImages) is a leading software suite of C, Python, + R programs and shell scripts primarily developed for the analysis and display of + anatomical and functional MRI (fMRI) data. + * `ANTs `__ + (Advanced Normalization ToolS) computes high-dimensional mappings to capture + the statistics of brain structure and function. + * `BrainSuite `__ + is a collection of open source software tools that enable largely + automated processing of magnetic resonance images (MRI) of the human brain. + * `BRU2NII `__ + is a simple tool for converting Bruker ParaVision MRI data to NIfTI. + * `Convert3D `__ + is a command-line tool for converting 3D images between common file formats. + * `Camino `__ + is an open-source software toolkit for diffusion MRI processing. + * `Camino-TrackVis `__ + allows interoperability between Camino and TrackVis. + * `Connectome Mapper (CMP) `__ + implements a full processing pipeline for creating multi-variate and + multi-resolution connectomes with dMRI data. + * `dcm2nii `__ + converts images from the proprietary scanner DICOM format to NIfTI + * `DCMStack `__ + allows series of DICOM images to be stacked into multi-dimensional arrays. + * `Diffusion Toolkit `__ + is a set of command-line tools with a GUI frontend that performs data reconstruction + and fiber tracking on diffusion MR images. + * `DIPY `__ + is a free and open source software project for computational neuroanatomy, + focusing mainly on diffusion magnetic resonance imaging (dMRI) analysis. + * `DTITK `__ + is a spatial normalization and atlas construction toolkit optimized for examining + white matter morphometry using DTI data. + * `Elastix `__ + is a toolbox for rigid and nonrigid registration of images. + * `FreeSurfer `__ + is an open source software suite for processing and analyzing (human) brain MRI images. + * `FSL `__ + is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. + * Matlab `script wrapper `__ + provides interfaces to integrate matlab scripts within workflows. + * `MeshFix `__ + converts a raw digitized polygon mesh to a clean mesh where all the occurrences + of a specific set of "defects" are corrected. + * `MINC Toolkit `__ + contains the most commonly used tools developed at the McConnell Brain Imaging Centre, + Montreal Neurological Institute. + * `MIPAV (Medical Image Processing, Analysis, and Visualization) `__ + enables quantitative analysis and visualization of medical images of numerous + modalities such as PET, MRI, CT, or microscopy. + * `MNE `__ + is a software for exploring, visualizing, and analyzing human neurophysiological + data: MEG, EEG, sEEG, ECoG, and more. + * MRTrix is a set of tools to perform various types of diffusion MRI analyses, from various + forms of tractography through to next-generation group-level analyses + (`MRTrix3 `__, and the deprecated + `MRTrix version 2 `__). + * Nifty Tools: + `NiftyFit `__ + is a software package for multi-parametric model-fitting of 4D MRI; + `NiftyReg `__ + is an open-source software for efficient medical image registration; and + `NiftySeg `__ + contains programs to perform EM based segmentation of images in NIfTI or Analyze format. + * `NiLearn `__ + is a Python module for fast and easy statistical learning on NeuroImaging data. + * `NiPy `__ + is a Python project for analysis of structural and functional neuroimaging data. + * `Nitime `__ + is a library for time-series analysis of data from neuroscience experiments. + * `PETPVC `__ + is toolbox for :abbr:`PVC (partial volume correction)` of + :abbr:`PET (positron emission tomography)` imaging. + * `QuickShear `__ + uses a skull-stripped version of an anatomical images as a reference to deface the + unaltered anatomical image. + * `SEM Tools `__ + are useful tools for Structural Equation Modeling. + * `SPM `__ + (Statistical Parametric Mapping) is a software package for the analysis of brain + imaging data sequences. + * `VistaSoft `__ + contains Matlab code to perform a variety of analysis on MRI data, including + functional MRI and diffusion MRI. + * `Connectome Workbench `__ + is an open source, freely available visualization and discovery tool used to map neuroimaging data, + especially data generated by the Human Connectome Project. + * `3D Slicer `__ + is an open source software platform for medical image informatics, + image processing, and three-dimensional visualization. + +Index of Interfaces +~~~~~~~~~~~~~~~~~~~ + +.. toctree:: + :maxdepth: 3 + + api/generated/nipype.algorithms + api/generated/nipype.interfaces \ No newline at end of file diff --git a/doc/interfaces/.gitignore b/doc/interfaces/.gitignore deleted file mode 100644 index e324eac91f..0000000000 --- a/doc/interfaces/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/generated diff --git a/doc/interfaces/index.rst b/doc/interfaces/index.rst deleted file mode 100644 index 14deeec063..0000000000 --- a/doc/interfaces/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _interface-index: - -######################### -Interfaces and Algorithms -######################### - -:Release: |version| -:Date: |today| - diff --git a/doc/links_names.txt b/doc/links_names.txt index 1a51a6dea3..5f75721f32 100644 --- a/doc/links_names.txt +++ b/doc/links_names.txt @@ -98,6 +98,7 @@ .. _MNE: https://martinos.org/mne/index.html .. _ANTS: http://stnava.github.io/ANTs/ .. _DIPY: http://dipy.org +.. _BrainSuite: http://brainsuite.org/ .. General software .. _gcc: http://gcc.gnu.org diff --git a/doc/make.bat b/doc/make.bat deleted file mode 100644 index aa5985eece..0000000000 --- a/doc/make.bat +++ /dev/null @@ -1,112 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -set SPHINXBUILD=sphinx-build -set ALLSPHINXOPTS=-d _build/doctrees %SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (_build\*) do rmdir /q /s %%i - del /q /s _build\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% _build/html - echo. - echo.Build finished. The HTML pages are in _build/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% _build/dirhtml - echo. - echo.Build finished. The HTML pages are in _build/dirhtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% _build/pickle - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% _build/json - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% _build/htmlhelp - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in _build/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% _build/qthelp - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in _build/qthelp, like this: - echo.^> qcollectiongenerator _build\qthelp\nipype.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile _build\qthelp\nipype.ghc - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% _build/latex - echo. - echo.Build finished; the LaTeX files are in _build/latex. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% _build/changes - echo. - echo.The overview file is in _build/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% _build/linkcheck - echo. - echo.Link check complete; look for any errors in the above output ^ -or in _build/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% _build/doctest - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in _build/doctest/output.txt. - goto end -) - -:end diff --git a/doc/quickstart.rst b/doc/quickstart.rst index ee856d5fb2..60baa443c9 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -1,3 +1,5 @@ +:orphan: + .. _quickstart: ========== diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000000..057147c5b5 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,8 @@ +dipy +ipython +matplotlib +nbsphinx +sphinx-argparse +sphinx>=2.1.2 +sphinxcontrib-apidoc +sphinxcontrib-napoleon \ No newline at end of file diff --git a/doc/searchresults.rst b/doc/searchresults.rst index d79eaebfbc..06db60bc00 100644 --- a/doc/searchresults.rst +++ b/doc/searchresults.rst @@ -1,3 +1,5 @@ +:orphan: + .. This displays the search results from the Google Custom Search engine. Don't link to it directly. diff --git a/doc/version.rst b/doc/version.rst index 35e3e0a60f..cbbed6c7d8 100644 --- a/doc/version.rst +++ b/doc/version.rst @@ -1,3 +1,5 @@ +:orphan: + .. _version: :Release: |version| diff --git a/examples/dmri_connectivity.py b/examples/dmri_connectivity.py index fc5b51c362..06d212ebb7 100755 --- a/examples/dmri_connectivity.py +++ b/examples/dmri_connectivity.py @@ -26,9 +26,8 @@ * http://db.tt/1vx4vLeP -Along with `Camino `_, -`Camino-Trackvis `_, `FSL `_, -and `Freesurfer `_, you must also have the Connectome File Format +Along with Camino_, Camino2Trackvis_, FSL_, and FreeSurfer_, +you must also have the Connectome File Format library installed as well as the Connectome Mapper. These are written by Stephan Gerhard and can be obtained from: diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index 21d594d3b7..1537d2897f 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -130,6 +130,7 @@ """ bias = remove_bias() + """ Connect nodes in workflow ========================= @@ -148,6 +149,7 @@ (prep, bias, [('outputnode.out_file', 'inputnode.in_file'), ('outputnode.out_mask', 'inputnode.in_mask')]), (datasource, bias, [('bvals', 'inputnode.in_bval')])]) + """ Run the workflow as command line executable """ @@ -155,3 +157,13 @@ if __name__ == '__main__': wf.run() wf.write_graph() + +""" +References +---------- + +.. [Jeurissen2014] Jeurissen et al., Multi-tissue constrained spherical deconvolution + for improved analysis of multi-shell diffusion MRI data. + NeuroImage 103:411--426. 2014. + doi:`10.1016/j.neuroimage.2014.07.061 + `__. diff --git a/examples/fmri_openfmri.py b/examples/fmri_openfmri.py deleted file mode 100755 index e69de29bb2..0000000000 diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index e4c690421a..178deb42b4 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -8,7 +8,6 @@ Introduction ============ - The fmri_spm_auditory.py recreates the classical workflow described in the `SPM8 manual `_ using auditory dataset that can be downloaded from http://www.fil.ion.ucl.ac.uk/spm/data/auditory/:: @@ -36,18 +35,17 @@ # Set the way matlab should be called mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") + """ + Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. - Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses """ @@ -56,10 +54,10 @@ """We strongly encourage to use 4D files insteead of series of 3D for fMRI analyses for many reasons (cleanness and saving and filesystem inodes are among them). However, the the workflow presented in the SPM8 manual which this tutorial is based on -uses 3D files. Therefore we leave converting to 4D as an option. We are using `merge_to_4d` +uses 3D files. Therefore we leave converting to 4D as an option. We are using ``merge_to_4d`` variable, because switching between 3d and 4d requires some additional steps (explauned later on). -Use :class:`nipype.interfaces.fsl.Merge` to merge a series of 3D files along the time -dimension creating a 4d file. +Use :ref:`nipype.interfaces.fsl.utils.Merge` to merge a series +of 3D files along the time dimension creating a 4D file. """ merge_to_4d = True @@ -67,26 +65,28 @@ if merge_to_4d: merge = pe.Node(interface=fsl.Merge(), name="merge") merge.inputs.dimension = "t" -"""Use :class:`nipype.interfaces.spm.Realign` for motion correction -and register all images to the mean image. +"""Use :ref:`nipype.interfaces.spm.preprocess.Realign` +for motion correction and register all images to the mean image. """ realign = pe.Node(interface=spm.Realign(), name="realign") -"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid -body registration of the functional data to the structural data. +"""Use :ref:`nipype.interfaces.spm.preprocess.Coregister` +to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(interface=spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' segment = pe.Node(interface=spm.Segment(), name="segment") + """Uncomment the following line for faster execution """ # segment.inputs.gaussians_per_class = [1, 1, 1, 4] + """Warp functional and structural data to SPM's T1 template using -:class:`nipype.interfaces.spm.Normalize`. The tutorial data set -includes the template image, T1.nii. +:ref:`nipype.interfaces.spm.preprocess.Normalize`. +The tutorial data set includes the template image, T1.nii. """ normalize_func = pe.Node(interface=spm.Normalize(), name="normalize_func") @@ -95,16 +95,17 @@ normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" """Smooth the functional data using -:class:`nipype.interfaces.spm.Smooth`. +:ref:`nipype.interfaces.spm.preprocess.Smooth`. """ smooth = pe.Node(interface=spm.Smooth(), name="smooth") -"""`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to -the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data + +"""``write_voxel_sizes`` is the input of the normalize interface that is recommended +to be set to the voxel sizes of the target volume. +There is no need to set it manually since we can infer it from data using the following function: """ - def get_vox_dims(volume): import nibabel as nb from nipype.utils import NUMPY_MMAP @@ -116,8 +117,9 @@ def get_vox_dims(volume): return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] -"""Here we are connecting all the nodes together. Notice that we add the merge node only if you choose -to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal +"""Here we are connecting all the nodes together. +Notice that we add the merge node only if you choose to use 4D. +Also ``get_vox_dims`` function is passed along the input volume of normalise to set the optimal voxel sizes. """ @@ -137,34 +139,38 @@ def get_vox_dims(volume): 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using -:class:`nipype.interfaces.spm.SpecifyModel`. +:ref:`nipype.algorithms.modelgen.SpecifyModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") + """Generate a first level SPM.mat file for analysis -:class:`nipype.interfaces.spm.Level1Design`. +:ref:`nipype.interfaces.spm.model.Level1Design`. """ level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} -"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the -parameters of the model. + +"""Use :ref:`nipype.interfaces.spm.model.EstimateModel` +to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") -"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the -first level contrasts specified in a few steps above. + +"""Use :ref:`nipype.interfaces.spm.model.EstimateContrast` +to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node( @@ -182,16 +188,18 @@ def get_vox_dims(volume): """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) -"""Pluging in `functional_runs` is a bit more complicated, because model spec expects a list of `runs`. -Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and -to make one we need a helper function. + +""" +Pluging in ``functional_runs`` is a bit more complicated, +because model spec expects a list of ``runs``. +Every run can be a 4D file or a list of 3D files. +Therefore for 3D analysis we need a list of lists and to make one we need a helper function. """ if merge_to_4d: @@ -209,8 +217,7 @@ def makelist(item): """ Data specific components ------------------------ - -In this tutorial there is only one subject `M00223`. +In this tutorial there is only one subject ``M00223``. Below we set some variables to inform the ``datasource`` about the layout of our data. We specify the location of the data, the subject @@ -231,7 +238,9 @@ def makelist(item): infosource = pe.Node( interface=util.IdentityInterface(fields=['subject_id']), name="infosource") -"""Here we set up iteration over all the subjects. The following line + +""" +Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that it should repeat the analysis on each of the items in the @@ -241,9 +250,10 @@ def makelist(item): """ infosource.iterables = ('subject_id', subject_list) + """ -Now we create a :class:`nipype.interfaces.io.DataGrabber` object and -fill in the information from above about the layout of our data. The +Now we create a :ref:`nipype.interfaces.io.DataGrabber` +object and fill in the information from above about the layout of our data. The :class:`nipype.pipeline.NodeWrapper` module wraps the interface object and provides additional housekeeping and pipeline specific functionality. @@ -257,14 +267,14 @@ def makelist(item): datasource.inputs.template = '%s%s/%s%s_%03d.img' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a structure that provides information about the experimental paradigm. This is used by the -:class:`nipype.interfaces.spm.SpecifyModel` to create the information -necessary to generate an SPM design matrix. +:ref:`nipype.algorithms.modelgen.SpecifyModel` +to create the information necessary to generate an SPM design matrix. """ from nipype.interfaces.base import Bunch @@ -272,11 +282,13 @@ def makelist(item): Bunch( conditions=['Task'], onsets=[list(range(6, 84, 12))], durations=[[6]]) ] -"""Setup the contrast structure that needs to be evaluated. This is a + +""" +Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the -following format - [Name,Stat,[list of condition names],[weights on -those conditions]. The condition names must match the `names` listed -in the `subjectinfo` function described above. +following format - ``[Name,Stat,[list of condition names],[weights on +those conditions]``. The condition names must match the ``names`` listed +in the ``subjectinfo`` function described above. """ cont1 = ('active > rest', 'T', ['Task'], [1]) @@ -297,10 +309,10 @@ def makelist(item): l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo l1pipeline.inputs.analysis.contrastestimate.contrasts = contrasts l1pipeline.inputs.analysis.threshold.contrast_index = 1 + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -315,7 +327,7 @@ def makelist(item): pipeline. Thus for this pipeline there will be subject specific sub-directories. -The ``nipype.pipeline.engine.Pipeline.connect`` function creates the +The :func:`nipype.pipeline.engine.Pipeline.connect` function creates the links between the processes, i.e., how data should flow in and out of the processing nodes. """ @@ -332,24 +344,24 @@ def makelist(item): else: level1.connect([(datasource, l1pipeline, [('func', 'preproc.realign.in_files')])]) -""" +""" Setup storage results --------------------- - -Use :class:`nipype.interfaces.io.DataSink` to store selected outputs +Use :ref:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep them. The first step is to create a datasink node and then to connect outputs from the modules above to storage locations. These take the -following form directory_name[.[@]subdir] where parts between [] are +following form ``directory_name[.[@]subdir]`` where parts between ``[]`` are optional. For example 'realign.@mean' below creates a directory called realign in 'l1output/subject_id/' and stores the mean image output from the Realign process in the realign directory. If the @ is left out, then a sub-directory with the name 'mean' would be created and the mean image would be copied to that directory. + """ datasink = pe.Node(interface=nio.DataSink(), name="datasink") @@ -372,14 +384,14 @@ def getstripdir(subject_id): [('analysis.contrastestimate.con_images', 'contrasts.@con'), ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the -analysis on the data the ``nipype.pipeline.engine.Pipeline.Run`` +analysis on the data the :func:`nipype.pipeline.engine.Workflow.run` function needs to be called. """ diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 587ff9b291..9c66ea7aac 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -28,11 +28,10 @@ import nipype.algorithms.rapidart as ra # artifact detection import nipype.algorithms.modelgen as model # model specification import os # system functions -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -45,10 +44,10 @@ # Set the way matlab should be called # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/software/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. @@ -56,18 +55,19 @@ Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses """ preproc = pe.Workflow(name='preproc') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ realign = pe.Node(spm.Realign(), name="realign") realign.inputs.register_to_mean = True + """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -80,18 +80,21 @@ art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' + """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' + """Normalize and smooth functional data using DARTEL template """ @@ -99,6 +102,7 @@ spm.DARTELNorm2MNI(modulate=True), name='normalize_and_smooth_func') fwhmlist = [4] normalize_and_smooth_func.iterables = ('fwhm', fwhmlist) + """Normalize structural data using DARTEL template """ @@ -117,41 +121,47 @@ 'realigned_files')]), (skullstrip, art, [('mask_file', 'mask_file')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec") modelspec.inputs.concatenate_runs = True + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate") + """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. """ selectcontrast = pe.Node(niu.Select(), name="selectcontrast") + """Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of the contrast estimate and a background image into one volume. """ @@ -160,6 +170,7 @@ overlaystats.inputs.stat_thresh = (3, 10) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True + """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. """ @@ -181,10 +192,10 @@ (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')])]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') @@ -198,10 +209,10 @@ 'level1design.mask_image'), ('normalize_struct.normalized_files', 'overlaystats.background_image')]), ]) + """ Data specific components ------------------------ - The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And @@ -230,6 +241,7 @@ infosource = pe.Node( niu.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -240,6 +252,7 @@ """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -256,6 +269,7 @@ datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """We need to create a separate workflow to make the DARTEL template """ @@ -268,6 +282,7 @@ struct=[['subject_id', 'struct']]) datasource_dartel.inputs.sort_filelist = True datasource_dartel.inputs.subject_id = subject_list + """Here we make sure that struct files have names corresponding to the subject ids. This way we will be able to pick the right field flows later. """ @@ -281,10 +296,10 @@ dartel_workflow = spm_wf.create_DARTEL_template(name='dartel_workflow') dartel_workflow.inputs.inputspec.template_prefix = "template" + """This function will allow to pick the right field flow for each subject """ - def pickFieldFlow(dartel_flow_fields, subject_id): from nipype.utils.filemanip import split_filename for f in dartel_flow_fields: @@ -294,17 +309,16 @@ def pickFieldFlow(dartel_flow_fields, subject_id): raise Exception - pick_flow = pe.Node( niu.Function( input_names=['dartel_flow_fields', 'subject_id'], output_names=['dartel_flow_field'], function=pickFieldFlow), name="pick_flow") + """ Experimental paradigm specific components ----------------------------------------- - Here we create a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -312,7 +326,6 @@ def pickFieldFlow(dartel_flow_fields, subject_id): paradigm was used for every participant. """ - def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy @@ -333,7 +346,6 @@ def subjectinfo(subject_id): regressors=None)) return output - """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the following format - [Name,Stat,[list of condition names],[weights on @@ -360,10 +372,10 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -411,11 +423,10 @@ def subjectinfo(subject_id): (infosource, l1pipeline, [(('subject_id', subjectinfo), 'analysis.modelspec.subject_info')]), ]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -457,10 +468,10 @@ def getstripdir(subject_id): (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the @@ -471,10 +482,10 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run(plugin_args={'n_procs': 4}) level1.write_graph() + """ Setup level 2 pipeline ---------------------- - Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast images across a group of first level subjects. Unlike the previous pipeline that iterated over subjects, this pipeline will iterate over @@ -490,6 +501,7 @@ def getstripdir(subject_id): # iterate over all contrast images l2source.iterables = [('fwhm', fwhmlist), ('con', contrast_ids)] l2source.inputs.sort_filelist = True + """Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a simple statistical analysis of the contrasts from the group of subjects (n=2 in this example). @@ -503,6 +515,7 @@ def getstripdir(subject_id): cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True + """As before, we setup a pipeline to connect these two nodes (l2source -> onesamplettest). """ @@ -516,10 +529,10 @@ def getstripdir(subject_id): [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'), ('residual_image', 'residual_image')]), ]) + """ Execute the second level pipeline --------------------------------- - """ if __name__ == '__main__': diff --git a/examples/fmri_spm_face.py b/examples/fmri_spm_face.py index 5644398d54..bff892bbd8 100755 --- a/examples/fmri_spm_face.py +++ b/examples/fmri_spm_face.py @@ -27,11 +27,10 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.modelgen as model # model specification -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -42,22 +41,20 @@ mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # If SPM is not in your MATLAB path you should add it here # mlab.MatlabCommand.set_default_paths('/path/to/your/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. It one is slightly different then the one used in spm_tutorial2. - Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses - """ preproc = pe.Workflow(name='preproc') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ @@ -65,6 +62,7 @@ realign = pe.Node(interface=spm.Realign(), name="realign") slice_timing = pe.Node(interface=spm.SliceTiming(), name="slice_timing") + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ @@ -74,10 +72,12 @@ segment = pe.Node(interface=spm.Segment(), name="segment") segment.inputs.save_bias_corrected = True + """Uncomment the following line for faster execution """ # segment.inputs.gaussians_per_class = [1, 1, 1, 4] + """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. @@ -88,11 +88,13 @@ normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" + """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ smooth = pe.Node(interface=spm.Smooth(), name="smooth") + """`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data using the following function: @@ -129,23 +131,26 @@ def get_vox_dims(volume): 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(interface=spm.Level1Design(), name="level1design") + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ @@ -154,6 +159,7 @@ def get_vox_dims(volume): level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ @@ -176,16 +182,17 @@ def pickfirst(l): (('spmT_images', pickfirst), 'stat_image')]), ]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) + """Pluging in `functional_runs` is a bit more complicated, because model spec expects a list of `runs`. Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and to make one we need a helper function. @@ -199,10 +206,10 @@ def makelist(item): l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) + """ Data specific components ------------------------ - In this tutorial there is only one subject `M03953`. Below we set some variables to inform the ``datasource`` about the @@ -223,6 +230,7 @@ def makelist(item): infosource = pe.Node( interface=util.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -233,6 +241,7 @@ def makelist(item): """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -249,10 +258,10 @@ def makelist(item): datasource.inputs.template = '%s/s%s_%04d%s.img' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a structure that provides information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -260,6 +269,7 @@ def makelist(item): """ from nipype.interfaces.base import Bunch + """We're importing the onset times from a mat file (found on http://www.fil.ion.ucl.ac.uk/spm/data/face_rep/) """ @@ -280,6 +290,7 @@ def makelist(item): regressor_names=None, regressors=None) ] + """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the following format - [Name,Stat,[list of condition names],[weights on @@ -322,6 +333,7 @@ def makelist(item): cond1, cond2, cond3, fam1, fam2, fam3, rep1, rep2, rep3, int1, int2, int3, contf1, contf2, contf3, contf4 ] + """Setting up nodes inputs """ @@ -350,6 +362,7 @@ def makelist(item): l1designref.microtime_resolution = slice_timingref.num_slices l1designref.microtime_onset = slice_timingref.ref_slice l1designref.bases = {'hrf': {'derivs': [1, 1]}} + """ The following lines automatically inform SPM to create a default set of contrats for a factorial design. @@ -361,11 +374,13 @@ def makelist(item): l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo l1pipeline.inputs.analysis.contrastestimate.contrasts = contrasts l1pipeline.inputs.analysis.threshold.contrast_index = 1 + """ Use derivative estimates in the non-parametric model """ l1pipeline.inputs.analysis.contrastestimate.use_derivs = True + """ Setting up parametricvariation of the model """ @@ -402,10 +417,10 @@ def makelist(item): [(preproc, paramanalysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters'), (('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -432,11 +447,10 @@ def makelist(item): (datasource, l1pipeline, [('struct', 'preproc.coregister.source'), ('func', 'preproc.realign.in_files')])]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -475,10 +489,10 @@ def getstripdir(subject_id): 'paramcontrasts.@con'), ('paramanalysis.contrastestimate.spmT_images', 'paramcontrasts.@T')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the diff --git a/examples/fmri_spm_nested.py b/examples/fmri_spm_nested.py index 534b8c960d..e63b3a2cde 100755 --- a/examples/fmri_spm_nested.py +++ b/examples/fmri_spm_nested.py @@ -28,11 +28,10 @@ from nipype.pipeline import engine as pe # pypeline engine from nipype.algorithms import rapidart as ra # artifact detection from nipype.algorithms import modelgen as model # model specification -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -45,18 +44,16 @@ # Set the way matlab should be called # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/software/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. - Example of how to inline functions in connect() ----------------------------------------------- - """ @@ -66,15 +63,13 @@ def _template_path(in_data): """ - Set-up preprocessing workflow ----------------------------- - This is a generic preprocessing workflow that can be used by different analyses - """ preproc = pe.Workflow(name='preproc') + """ A node called :code:`inputnode` is set to designate the path in which input data are located: @@ -82,12 +77,14 @@ def _template_path(in_data): inputnode = pe.Node( niu.IdentityInterface(fields=['in_data']), name='inputnode') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ realign = pe.Node(spm.Realign(), name="realign") realign.inputs.register_to_mean = True + """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -100,24 +97,28 @@ def _template_path(in_data): art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' + """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' + """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. """ normalize = pe.Node(spm.Normalize(), name="normalize") + """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ @@ -137,41 +138,47 @@ def _template_path(in_data): (normalize, art, [('normalized_files', 'realigned_files')]), (skullstrip, art, [('mask_file', 'mask_file')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec") modelspec.inputs.concatenate_runs = True + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate") + """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. """ selectcontrast = pe.Node(niu.Select(), name="selectcontrast") + """Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of the contrast estimate and a background image into one volume. """ @@ -180,6 +187,7 @@ def _template_path(in_data): overlaystats.inputs.stat_thresh = (3, 10) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True + """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. """ @@ -201,10 +209,10 @@ def _template_path(in_data): (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')])]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') @@ -218,10 +226,10 @@ def _template_path(in_data): 'level1design.mask_image'), ('normalize.normalized_source', 'overlaystats.background_image')]), ]) + """ Data specific components ------------------------ - The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And @@ -236,7 +244,6 @@ def _template_path(in_data): In the example below, run 'f3' is of type 'func' and gets mapped to a nifti filename through a template '%s.nii'. So 'f3' would become 'f3.nii'. - """ # Specify the subject directories @@ -248,6 +255,7 @@ def _template_path(in_data): infosource = pe.Node( niu.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -258,6 +266,7 @@ def _template_path(in_data): """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -272,10 +281,10 @@ def _template_path(in_data): datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -331,10 +340,10 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -366,11 +375,10 @@ def subjectinfo(subject_id): (infosource, l1pipeline, [(('subject_id', subjectinfo), 'analysis.modelspec.subject_info')]), ]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -410,10 +418,10 @@ def getstripdir(subject_id): (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the @@ -424,10 +432,10 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run('MultiProc') level1.write_graph() + """ Setup level 2 pipeline ---------------------- - Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast images across a group of first level subjects. Unlike the previous pipeline that iterated over subjects, this pipeline will iterate over @@ -443,6 +451,7 @@ def getstripdir(subject_id): # iterate over all contrast images l2source.iterables = [('fwhm', fwhmlist), ('con', contrast_ids)] l2source.inputs.sort_filelist = True + """Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a simple statistical analysis of the contrasts from the group of subjects (n=2 in this example). @@ -456,6 +465,7 @@ def getstripdir(subject_id): cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True + """As before, we setup a pipeline to connect these two nodes (l2source -> onesamplettest). """ @@ -469,10 +479,10 @@ def getstripdir(subject_id): [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'), ('residual_image', 'residual_image')]), ]) + """ Execute the second level pipeline --------------------------------- - """ if __name__ == '__main__': diff --git a/nipype/__init__.py b/nipype/__init__.py index 74c6a42dd7..18449c5f81 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -1,11 +1,20 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +""" +Information on specific functions, classes, and methods. + +:Release: |version| +:Date: |today| + +Top-level module API +-------------------- + +""" import os from distutils.version import LooseVersion from .info import ( - LONG_DESCRIPTION as __doc__, URL as __url__, STATUS as __status__, __version__, diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 3bbf4632f4..251d196d3f 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -395,8 +395,8 @@ class CompCorInputSpec(BaseInterfaceInputSpec): desc=( "One or more mask files that determines " "ROI (3D). When more that one file is " - "provided `merge_method` or " - "`merge_index` must be provided" + "provided ``merge_method`` or " + "``merge_index`` must be provided" ), ) merge_method = traits.Enum( @@ -407,10 +407,10 @@ class CompCorInputSpec(BaseInterfaceInputSpec): requires=["mask_files"], desc=( "Merge method if multiple masks are " - "present - `union` uses voxels included in" - " at least one input mask, `intersect` " + "present - ``union`` uses voxels included in" + " at least one input mask, ``intersect`` " "uses only voxels present in all input " - "masks, `none` performs CompCor on " + "masks, ``none`` performs CompCor on " "each mask individually" ), ) @@ -418,7 +418,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): low=0, xor=["merge_method"], requires=["mask_files"], - desc=("Position of mask in `mask_files` to use - " "first is the default."), + desc=("Position of mask in ``mask_files`` to use - " "first is the default."), ) mask_names = traits.List( traits.Str, @@ -436,12 +436,12 @@ class CompCorInputSpec(BaseInterfaceInputSpec): traits.Range(low=1), xor=["variance_threshold"], desc="Number of components to return from the decomposition. If " - "`num_components` is `all`, then all components will be " + "``num_components`` is ``all``, then all components will be " "retained.", ) # 6 for BOLD, 4 for ASL # automatically instantiated to 6 in CompCor below if neither - # `num_components` nor `variance_threshold` is defined (for + # ``num_components`` nor ``variance_threshold`` is defined (for # backward compatibility) variance_threshold = traits.Range( low=0.0, @@ -451,7 +451,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): xor=["num_components"], desc="Select the number of components to be returned automatically " "based on their ability to explain variance in the dataset. " - "`variance_threshold` is a fractional value between 0 and 1; " + "``variance_threshold`` is a fractional value between 0 and 1; " "the number of components retained will be equal to the minimum " "number of components necessary to explain the provided " "fraction of variance in the masked time series.", @@ -521,13 +521,14 @@ class CompCorOutputSpec(TraitedSpec): class CompCor(SimpleInterface): """ - Interface with core CompCor computation, used in aCompCor and tCompCor + Interface with core CompCor computation, used in aCompCor and tCompCor. CompCor provides three pre-filter options, all of which include per-voxel mean removal: - - polynomial: Legendre polynomial basis - - cosine: Discrete cosine basis - - False: mean-removal only + + - ``'polynomial'``: Legendre polynomial basis + - ``'cosine'``: Discrete cosine basis + - ``False``: mean-removal only In the case of ``polynomial`` and ``cosine`` filters, a pre-filter file may be saved with a row for each volume/timepoint, and a column for each @@ -545,7 +546,6 @@ class CompCor(SimpleInterface): Example ------- - >>> ccinterface = CompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' @@ -654,10 +654,10 @@ def _run_interface(self, runtime): else: components_criterion = 6 IFLOGGER.warning( - "`num_components` and `variance_threshold` are " + "``num_components`` and ``variance_threshold`` are " "not defined. Setting number of components to 6 " "for backward compatibility. Please set either " - "`num_components` or `variance_threshold`, as " + "``num_components`` or ``variance_threshold``, as " "this feature may be deprecated in the future." ) @@ -812,7 +812,6 @@ class TCompCor(CompCor): Example ------- - >>> ccinterface = TCompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' @@ -906,7 +905,6 @@ class TSNR(BaseInterface): Example ------- - >>> tsnr = TSNR() >>> tsnr.inputs.in_file = 'functional.nii' >>> res = tsnr.run() # doctest: +SKIP @@ -1240,6 +1238,8 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): A helper function for CompCor + Parameters + ---------- mask_files: a list one or more binary mask files mask_method: enum ('union', 'intersect', 'none') @@ -1247,7 +1247,10 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): mask_index: an integer determines which file to return (mutually exclusive with mask_method) - returns: a list of nibabel images + Returns + ------- + masks: a list of nibabel images + """ if isdefined(mask_index) or not isdefined(mask_method): @@ -1310,49 +1313,48 @@ def compute_noise_components( failure_mode="error", mask_names=None, ): - """Compute the noise components from the imgseries for each mask + """ + Compute the noise components from the image series for each mask. Parameters ---------- imgseries: nibabel image Time series data to be decomposed. mask_images: list - List of nibabel images. Time series data from `img_series` is subset + List of nibabel images. Time series data from ``img_series`` is subset according to the spatial extent of each mask, and the subset data is then decomposed using principal component analysis. Masks should be coextensive with either anatomical or spatial noise ROIs. components_criterion: float Number of noise components to return. If this is a decimal value - between 0 and 1, then `create_noise_components` will instead return + between 0 and 1, then ``create_noise_components`` will instead return the smallest number of components necessary to explain the indicated - fraction of variance. If `components_criterion` is `all`, then all + fraction of variance. If ``components_criterion`` is ``all``, then all components will be returned. filter_type: str - Type of filter to apply to time series before computing - noise components. - 'polynomial' - Legendre polynomial basis - 'cosine' - Discrete cosine (DCT) basis - False - None (mean-removal only) + Type of filter to apply to time series before computing noise components. + + - 'polynomial' - Legendre polynomial basis + - 'cosine' - Discrete cosine (DCT) basis + - False - None (mean-removal only) + failure_mode: str Action to be taken in the event that any decomposition fails to - identify any components. `error` indicates that the routine should + identify any components. ``error`` indicates that the routine should raise an exception and exit, while any other value indicates that the routine should return a matrix of NaN values equal in size to the requested decomposition matrix. mask_names: list or None - List of names for each image in `mask_images`. This should be equal in - length to `mask_images`, with the ith element of `mask_names` naming - the ith element of `mask_images`. - - Filter options: - + List of names for each image in ``mask_images``. This should be equal in + length to ``mask_images``, with the ith element of ``mask_names`` naming + the ith element of ``mask_images``. degree: int Order of polynomial used to remove trends from the timeseries period_cut: float Minimum period (in sec) for DCT high-pass filter repetition_time: float Time (in sec) between volume acquisitions. This must be defined if - the `filter_type` is `cosine`. + the ``filter_type`` is ``cosine``. Returns ------- @@ -1363,6 +1365,7 @@ def compute_noise_components( metadata: OrderedDict{str: numpy array} Dictionary of eigenvalues, fractional explained variances, and cumulative explained variances. + """ basis = np.array([]) if components_criterion == "all": diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index b472039075..7ba401a130 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Miscellaneous algorithms -""" +"""Miscellaneous algorithms.""" import os import os.path as op @@ -331,7 +329,7 @@ def replaceext(in_list, ext): return out_list -def matlab2csv(in_array, name, reshape): +def _matlab2csv(in_array, name, reshape): output_array = np.asarray(in_array) if reshape: if len(np.shape(output_array)) > 1: @@ -364,19 +362,19 @@ class Matlab2CSVOutputSpec(TraitedSpec): class Matlab2CSV(BaseInterface): - """Simple interface to save the components of a MATLAB .mat file as a text - file with comma-separated values (CSVs). + """ + Save the components of a MATLAB .mat file as a text file with comma-separated values (CSVs). CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- - >>> from nipype.algorithms import misc >>> mat2csv = misc.Matlab2CSV() >>> mat2csv.inputs.in_file = 'cmatrix.mat' >>> mat2csv.run() # doctest: +SKIP + """ input_spec = Matlab2CSVInputSpec @@ -413,7 +411,7 @@ def _run_interface(self, runtime): variable, type(in_dict[variable]), ) - matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) + _matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) elif len(saved_variables) == 1: _, name, _ = split_filename(self.inputs.in_file) variable = saved_variables[0] @@ -423,7 +421,7 @@ def _run_interface(self, runtime): iflogger.info( "...Converting %s to CSV from %s", variable, self.inputs.in_file ) - matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) + _matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) else: iflogger.error("No values in the MATLAB file?!") return runtime @@ -587,22 +585,23 @@ class MergeCSVFilesOutputSpec(TraitedSpec): class MergeCSVFiles(BaseInterface): - """This interface is designed to facilitate data loading in the R environment. - It takes input CSV files and merges them into a single CSV file. + """ + Merge several CSV files into a single CSV file. + + This interface is designed to facilitate data loading in the R environment. If provided, it will also incorporate column heading names into the resulting CSV file. - CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- - >>> from nipype.algorithms import misc >>> mat2csv = misc.MergeCSVFiles() >>> mat2csv.inputs.in_files = ['degree.mat','clustering.mat'] >>> mat2csv.inputs.column_headings = ['degree','clustering'] >>> mat2csv.run() # doctest: +SKIP + """ input_spec = MergeCSVFilesInputSpec @@ -722,17 +721,18 @@ class AddCSVColumnOutputSpec(TraitedSpec): class AddCSVColumn(BaseInterface): - """Short interface to add an extra column and field to a text file + """ + Short interface to add an extra column and field to a text file. Example ------- - >>> from nipype.algorithms import misc >>> addcol = misc.AddCSVColumn() >>> addcol.inputs.in_file = 'degree.csv' >>> addcol.inputs.extra_column_heading = 'group' >>> addcol.inputs.extra_field = 'male' >>> addcol.run() # doctest: +SKIP + """ input_spec = AddCSVColumnInputSpec @@ -788,7 +788,8 @@ class AddCSVRowOutputSpec(TraitedSpec): class AddCSVRow(BaseInterface): - """Simple interface to add an extra row to a csv file + """ + Simple interface to add an extra row to a CSV file. .. note:: Requires `pandas `_ @@ -801,7 +802,6 @@ class AddCSVRow(BaseInterface): Example ------- - >>> from nipype.algorithms import misc >>> addrow = misc.AddCSVRow() >>> addrow.inputs.in_file = 'scores.csv' @@ -810,6 +810,7 @@ class AddCSVRow(BaseInterface): >>> addrow.inputs.subject_id = 'S400' >>> addrow.inputs.list_of_values = [ 0.4, 0.7, 0.3 ] >>> addrow.run() # doctest: +SKIP + """ input_spec = AddCSVRowInputSpec @@ -917,16 +918,17 @@ class CalculateNormalizedMomentsOutputSpec(TraitedSpec): class CalculateNormalizedMoments(BaseInterface): - """Calculates moments of timeseries. + """ + Calculates moments of timeseries. Example ------- - >>> from nipype.algorithms import misc >>> skew = misc.CalculateNormalizedMoments() >>> skew.inputs.moment = 3 >>> skew.inputs.timeseries_file = 'timeseries.txt' >>> skew.run() # doctest: +SKIP + """ input_spec = CalculateNormalizedMomentsInputSpec @@ -995,7 +997,7 @@ class AddNoiseOutputSpec(TraitedSpec): class AddNoise(BaseInterface): """ - Corrupts with noise the input image + Corrupts with noise the input image. Example @@ -1105,21 +1107,23 @@ class NormalizeProbabilityMapSetOutputSpec(TraitedSpec): class NormalizeProbabilityMapSet(BaseInterface): - """ Returns the input tissue probability maps (tpms, aka volume fractions) - normalized to sum up 1.0 at each voxel within the mask. + """ + Returns the input tissue probability maps (tpms, aka volume fractions). + + The tissue probability maps are normalized to sum up 1.0 at each voxel within the mask. .. note:: Please recall this is not a spatial normalization algorithm Example ------- - >>> from nipype.algorithms import misc >>> normalize = misc.NormalizeProbabilityMapSet() >>> normalize.inputs.in_files = [ 'tpm_00.nii.gz', 'tpm_01.nii.gz', \ 'tpm_02.nii.gz' ] >>> normalize.inputs.in_mask = 'tpms_msk.nii.gz' >>> normalize.run() # doctest: +SKIP + """ input_spec = NormalizeProbabilityMapSetInputSpec @@ -1159,11 +1163,11 @@ class SplitROIsOutputSpec(TraitedSpec): class SplitROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. + ROIs keep time series structure in 4D images. Example ------- - >>> from nipype.algorithms import misc >>> rois = misc.SplitROIs() >>> rois.inputs.in_file = 'diffusion.nii' @@ -1215,11 +1219,11 @@ class MergeROIsOutputSpec(TraitedSpec): class MergeROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. + ROIs keep time series structure in 4D images. Example ------- - >>> from nipype.algorithms import misc >>> rois = misc.MergeROIs() >>> rois.inputs.in_files = ['roi%02d.nii' % i for i in range(1, 6)] @@ -1500,7 +1504,6 @@ class CalculateMedian(BaseInterface): Example ------- - >>> from nipype.algorithms.misc import CalculateMedian >>> mean = CalculateMedian() >>> mean.inputs.in_files = 'functional.nii' diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 2457fe8d2f..e0e9530ff4 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -6,10 +6,6 @@ subject analysis of task-based fMRI experiments. In particular it also includes algorithms for generating regressors for sparse and sparse-clustered acquisition experiments. - -These functions include: - - * SpecifyModel: allows specification of sparse and non-sparse models """ from copy import deepcopy import csv, math, os @@ -36,10 +32,11 @@ def gcd(a, b): - """Returns the greatest common divisor of two integers - - uses Euclid's algorithm + """ + Return the greatest common divisor of two integers (uses Euclid's algorithm). + Examples + -------- >>> gcd(4, 5) 1 >>> gcd(4, 8) @@ -54,31 +51,33 @@ def gcd(a, b): def spm_hrf(RT, P=None, fMRI_T=16): - """ python implementation of spm_hrf - - see spm_hrf for implementation details - - % RT - scan repeat time - % p - parameters of the response function (two gamma - % functions) - % defaults (seconds) - % p(0) - delay of response (relative to onset) 6 - % p(1) - delay of undershoot (relative to onset) 16 - % p(2) - dispersion of response 1 - % p(3) - dispersion of undershoot 1 - % p(4) - ratio of response to undershoot 6 - % p(5) - onset (seconds) 0 - % p(6) - length of kernel (seconds) 32 - % - % hrf - hemodynamic response function - % p - parameters of the response function - - the following code using scipy.stats.distributions.gamma - doesn't return the same result as the spm_Gpdf function :: + """ + python implementation of spm_hrf + + See ``spm_hrf`` for implementation details:: + % RT - scan repeat time + % p - parameters of the response function (two gamma + % functions) + % defaults (seconds) + % p(0) - delay of response (relative to onset) 6 + % p(1) - delay of undershoot (relative to onset) 16 + % p(2) - dispersion of response 1 + % p(3) - dispersion of undershoot 1 + % p(4) - ratio of response to undershoot 6 + % p(5) - onset (seconds) 0 + % p(6) - length of kernel (seconds) 32 + % + % hrf - hemodynamic response function + % p - parameters of the response function + + The following code using ``scipy.stats.distributions.gamma`` + doesn't return the same result as the ``spm_Gpdf`` function:: hrf = gamma.pdf(u, p[0]/p[2], scale=dt/p[2]) - gamma.pdf(u, p[1]/p[3], scale=dt/p[3])/p[4] + Example + ------- >>> print(spm_hrf(2)) [ 0.00000000e+00 8.65660810e-02 3.74888236e-01 3.84923382e-01 2.16117316e-01 7.68695653e-02 1.62017720e-03 -3.06078117e-02 @@ -131,11 +130,11 @@ def orth(x_in, y_in): def scale_timings(timelist, input_units, output_units, time_repetition): - """Scales timings given input and output units (scans/secs) + """ + Scale timings given input and output units (scans/secs). Parameters ---------- - timelist: list of times to scale input_units: 'secs' or 'scans' output_units: Ibid. @@ -157,11 +156,11 @@ def scale_timings(timelist, input_units, output_units, time_repetition): def bids_gen_info( bids_event_files, condition_column="", amplitude_column=None, time_repetition=False, ): - """Generate subject_info structure from a list of BIDS .tsv event files. + """ + Generate a subject_info structure from a list of BIDS .tsv event files. Parameters ---------- - bids_event_files : list of str Filenames of BIDS .tsv event files containing columns including: 'onset', 'duration', and 'trial_type' or the `condition_column` value. @@ -175,8 +174,8 @@ def bids_gen_info( Returns ------- + subject_info: list of Bunch - list of Bunch """ info = [] for bids_event_file in bids_event_files: @@ -210,8 +209,7 @@ def bids_gen_info( def gen_info(run_event_files): - """Generate subject_info structure from a list of event files - """ + """Generate subject_info structure from a list of event files.""" info = [] for i, event_files in enumerate(run_event_files): runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) @@ -245,8 +243,7 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): xor=["subject_info", "event_files", "bids_event_file"], desc="Bunch or List(Bunch) subject-specific " "condition information. see " - ":ref:`SpecifyModel` or " - "SpecifyModel.__doc__ for details", + ":ref:`nipype.algorithms.modelgen.SpecifyModel` or for details", ) event_files = InputMultiPath( traits.List(File(exists=True)), @@ -266,17 +263,17 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): bids_condition_column = traits.Str( default_value="trial_type", usedefault=True, - desc="Column of the file passed to `bids_event_file` to the " + desc="Column of the file passed to ``bids_event_file`` to the " "unique values of which events will be assigned" "to regressors", ) bids_amplitude_column = traits.Str( - desc="Column of the file passed to `bids_event_file` " + desc="Column of the file passed to ``bids_event_file`` " "according to which to assign amplitudes to events" ) realignment_parameters = InputMultiPath( File(exists=True), - desc="Realignment parameters returned " "by motion correction algorithm", + desc="Realignment parameters returned by motion correction algorithm", copyfile=False, ) parameter_source = traits.Enum( @@ -290,7 +287,7 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): ) outlier_files = InputMultiPath( File(exists=True), - desc="Files containing scan outlier indices " "that should be tossed", + desc="Files containing scan outlier indices that should be tossed", copyfile=False, ) functional_runs = InputMultiPath( @@ -326,37 +323,38 @@ class SpecifyModelOutputSpec(TraitedSpec): class SpecifyModel(BaseInterface): - """Makes a model specification compatible with spm/fsl designers. + """ + Makes a model specification compatible with spm/fsl designers. The subject_info field should contain paradigm information in the form of a Bunch or a list of Bunch. The Bunch should contain the following information:: - [Mandatory] - - conditions : list of names - - onsets : lists of onsets corresponding to each condition - - durations : lists of durations corresponding to each condition. Should be - left to a single 0 if all events are being modelled as impulses. - - [Optional] - - regressor_names : list of str - list of names corresponding to each column. Should be None if - automatically assigned. - - regressors : list of lists - values for each regressor - must correspond to the number of - volumes in the functional run - - amplitudes : lists of amplitudes for each event. This will be ignored by - SPM's Level1Design. - - The following two (tmod, pmod) will be ignored by any Level1Design class - other than SPM: - - - tmod : lists of conditions that should be temporally modulated. Should - default to None if not being used. - - pmod : list of Bunch corresponding to conditions - - name : name of parametric modulator - - param : values of the modulator - - poly : degree of modulation + [Mandatory] + conditions : list of names + onsets : lists of onsets corresponding to each condition + durations : lists of durations corresponding to each condition. Should be + left to a single 0 if all events are being modelled as impulses. + + [Optional] + regressor_names : list of str + list of names corresponding to each column. Should be None if + automatically assigned. + regressors : list of lists + values for each regressor - must correspond to the number of + volumes in the functional run + amplitudes : lists of amplitudes for each event. This will be ignored by + SPM's Level1Design. + + The following two (tmod, pmod) will be ignored by any Level1Design class + other than SPM: + + tmod : lists of conditions that should be temporally modulated. Should + default to None if not being used. + pmod : list of Bunch corresponding to conditions + - name : name of parametric modulator + - param : values of the modulator + - poly : degree of modulation Alternatively, you can provide information through event files. @@ -367,7 +365,6 @@ class SpecifyModel(BaseInterface): Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifyModel() @@ -379,8 +376,7 @@ class SpecifyModel(BaseInterface): >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) >>> s.inputs.subject_info = [evs_run2, evs_run3] - Using pmod: - + >>> # Using pmod >>> evs_run2 = Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 50], [100, 180]], \ durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \ None]) @@ -397,9 +393,7 @@ class SpecifyModel(BaseInterface): def _generate_standard_design( self, infolist, functional_runs=None, realignment_parameters=None, outliers=None ): - """ Generates a standard design matrix paradigm given information about - each run - """ + """Generate a standard design matrix paradigm given information about each run.""" sessinfo = [] output_units = "secs" if "output_units" in self.inputs.traits(): @@ -561,26 +555,26 @@ class SpecifySPMModelInputSpec(SpecifyModelInputSpec): concatenate_runs = traits.Bool( False, usedefault=True, - desc="Concatenate all runs to look like a " "single session.", + desc="Concatenate all runs to look like a single session.", ) output_units = traits.Enum( "secs", "scans", usedefault=True, - desc="Units of design event onsets and durations " "(secs or scans)", + desc="Units of design event onsets and durations (secs or scans)", ) class SpecifySPMModel(SpecifyModel): - """Adds SPM specific options to SpecifyModel + """Add SPM specific options to SpecifyModel + + Adds: - adds: - concatenate_runs - output_units Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySPMModel() @@ -734,7 +728,7 @@ def _generate_design(self, infolist=None): class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float( - 0, mandatory=True, desc="Time in seconds to acquire a single " "image volume" + 0, mandatory=True, desc="Time in seconds to acquire a single image volume" ) volumes_in_cluster = traits.Range( 1, usedefault=True, desc="Number of scan volumes in a cluster" @@ -745,7 +739,7 @@ class SpecifySparseModelInputSpec(SpecifyModelInputSpec): ) use_temporal_deriv = traits.Bool( requires=["model_hrf"], - desc="Create a temporal derivative in " "addition to regular regressor", + desc="Create a temporal derivative in addition to regular regressor", ) scale_regressors = traits.Bool( True, desc="Scale regressors by the peak", usedefault=True @@ -754,7 +748,7 @@ class SpecifySparseModelInputSpec(SpecifyModelInputSpec): 0.0, desc="Start of scanning relative to onset of run in secs", usedefault=True ) save_plot = traits.Bool( - desc=("Save plot of sparse design calculation " "(requires matplotlib)") + desc=("Save plot of sparse design calculation (requires matplotlib)") ) @@ -764,18 +758,10 @@ class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec): class SpecifySparseModel(SpecifyModel): - """ Specify a sparse model that is compatible with spm/fsl designers - - References - ---------- - - .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of - sparse-sampling fMRI experiments. Front. Neurosci. 7:55 - http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract + """ Specify a sparse model that is compatible with SPM/FSL designers [1]_. Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySparseModel() @@ -785,11 +771,17 @@ class SpecifySparseModel(SpecifyModel): >>> s.inputs.time_acquisition = 2 >>> s.inputs.high_pass_filter_cutoff = 128. >>> s.inputs.model_hrf = True - >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], \ -durations=[[1]]) - >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \ -durations=[[1]]) - >>> s.inputs.subject_info = [evs_run2, evs_run3] + >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], + ... durations=[[1]]) + >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], + ... durations=[[1]]) + >>> s.inputs.subject_info = [evs_run2, evs_run3] # doctest: +SKIP + + References + ---------- + .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of + sparse-sampling fMRI experiments. Front. Neurosci. 7:55 + http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract """ diff --git a/nipype/info.py b/nipype/info.py index e3f93832b3..aa0d9ee2a2 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -159,7 +159,16 @@ def get_nipype_gitversion(): EXTRA_REQUIRES = { "data": ["datalad"], - "doc": ["Sphinx>=1.4", "numpydoc", "matplotlib", "pydotplus", "pydot>=1.2.3"], + "doc": [ + 'dipy', + 'ipython', + 'matplotlib', + 'nbsphinx', + 'sphinx-argparse', + 'sphinx>=2.1.2', + 'sphinxcontrib-apidoc', + 'sphinxcontrib-napoleon', + ], "duecredit": ["duecredit"], "nipy": ["nitime", "nilearn<0.5.0", "dipy", "nipy", "matplotlib"], "profiler": ["psutil>=5.0"], diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 3e0c02eda7..172f8f3902 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -138,7 +138,8 @@ class AlignEpiAnatPyOutputSpec(TraitedSpec): class AlignEpiAnatPy(AFNIPythonCommand): - """Align EPI to anatomical datasets or vice versa + """Align EPI to anatomical datasets or vice versa. + This Python script computes the alignment between two datasets, typically an EPI and an anatomical structural dataset, and applies the resulting transformation to one or the other to bring them into alignment. @@ -148,11 +149,12 @@ class AlignEpiAnatPy(AFNIPythonCommand): script combines multiple transformations, thereby minimizing the amount of interpolation applied to the data. - Basic Usage: - align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 + Basic Usage:: + + align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 - The user must provide EPI and anatomical datasets and specify the EPI - sub-brick to use as a base in the alignment. + The user must provide :abbr:`EPI (echo-planar imaging)` and anatomical datasets + and specify the EPI sub-brick to use as a base in the alignment. Internally, the script always aligns the anatomical to the EPI dataset, and the resulting transformation is saved to a 1D file. @@ -166,11 +168,8 @@ class AlignEpiAnatPy(AFNIPythonCommand): and requested (with options to turn on and off each of the steps) in order to create the aligned datasets. - For complete details, see the `align_epi_anat.py' Documentation. - `_ - Examples - ======== + -------- >>> from nipype.interfaces import afni >>> al_ea = afni.AlignEpiAnatPy() >>> al_ea.inputs.anat = "structural.nii" @@ -183,6 +182,12 @@ class AlignEpiAnatPy(AFNIPythonCommand): >>> al_ea.cmdline # doctest: +ELLIPSIS 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' >>> res = allineate.run() # doctest: +SKIP + + See Also + -------- + For complete details, see the `align_epi_anat.py documentation. + `__. + """ _cmd = "align_epi_anat.py" @@ -547,8 +552,7 @@ class Allineate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> allineate = afni.Allineate() >>> allineate.inputs.in_file = 'functional.nii' @@ -651,8 +655,7 @@ class AutoTcorrelate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> corr = afni.AutoTcorrelate() >>> corr.inputs.in_file = 'functional.nii' @@ -718,8 +721,7 @@ class Automask(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> automask = afni.Automask() >>> automask.inputs.in_file = 'functional.nii' @@ -750,64 +752,70 @@ class AutoTLRCInputSpec(CommandLineInputSpec): copyfile=False, ) base = traits.Str( - desc=" Reference anatomical volume" - " Usually this volume is in some standard space like" - " TLRC or MNI space and with afni dataset view of" - " (+tlrc)." - " Preferably, this reference volume should have had" - " the skull removed but that is not mandatory." - " AFNI's distribution contains several templates." - ' For a longer list, use "whereami -show_templates"' - "TT_N27+tlrc --> Single subject, skull stripped volume." - " This volume is also known as " - " N27_SurfVol_NoSkull+tlrc elsewhere in " - " AFNI and SUMA land." - " (www.loni.ucla.edu, www.bic.mni.mcgill.ca)" - " This template has a full set of FreeSurfer" - " (surfer.nmr.mgh.harvard.edu)" - " surface models that can be used in SUMA. " - " For details, see Talairach-related link:" - " https://afni.nimh.nih.gov/afni/suma" - "TT_icbm452+tlrc --> Average volume of 452 normal brains." - " Skull Stripped. (www.loni.ucla.edu)" - "TT_avg152T1+tlrc --> Average volume of 152 normal brains." - " Skull Stripped.(www.bic.mni.mcgill.ca)" - "TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1" - " TT_avg152 and TT_EPI volume sources are from" - " SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/)" - "If you do not specify a path for the template, the script" - "will attempt to locate the template AFNI's binaries directory." - "NOTE: These datasets have been slightly modified from" - " their original size to match the standard TLRC" - " dimensions (Jean Talairach and Pierre Tournoux" - " Co-Planar Stereotaxic Atlas of the Human Brain" - " Thieme Medical Publishers, New York, 1988). " - " That was done for internal consistency in AFNI." - " You may use the original form of these" - " volumes if you choose but your TLRC coordinates" - " will not be consistent with AFNI's TLRC database" - " (San Antonio Talairach Daemon database), for example.", + desc="""\ +Reference anatomical volume. +Usually this volume is in some standard space like +TLRC or MNI space and with afni dataset view of +(+tlrc). +Preferably, this reference volume should have had +the skull removed but that is not mandatory. +AFNI's distribution contains several templates. +For a longer list, use "whereami -show_templates" +TT_N27+tlrc --> Single subject, skull stripped volume. +This volume is also known as +N27_SurfVol_NoSkull+tlrc elsewhere in +AFNI and SUMA land. +(www.loni.ucla.edu, www.bic.mni.mcgill.ca) +This template has a full set of FreeSurfer +(surfer.nmr.mgh.harvard.edu) +surface models that can be used in SUMA. +For details, see Talairach-related link: +https://afni.nimh.nih.gov/afni/suma +TT_icbm452+tlrc --> Average volume of 452 normal brains. +Skull Stripped. (www.loni.ucla.edu) +TT_avg152T1+tlrc --> Average volume of 152 normal brains. +Skull Stripped.(www.bic.mni.mcgill.ca) +TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1 +TT_avg152 and TT_EPI volume sources are from +SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/) +If you do not specify a path for the template, the script +will attempt to locate the template AFNI's binaries directory. +NOTE: These datasets have been slightly modified from +their original size to match the standard TLRC +dimensions (Jean Talairach and Pierre Tournoux +Co-Planar Stereotaxic Atlas of the Human Brain +Thieme Medical Publishers, New York, 1988). +That was done for internal consistency in AFNI. +You may use the original form of these +volumes if you choose but your TLRC coordinates +will not be consistent with AFNI's TLRC database +(San Antonio Talairach Daemon database), for example.""", mandatory=True, argstr="-base %s", ) no_ss = traits.Bool( - desc="Do not strip skull of input data set" - "(because skull has already been removed" - "or because template still has the skull)" - "NOTE: The -no_ss option is not all that optional." - " Here is a table of when you should and should not use -no_ss" - " Template Template" - " WITH skull WITHOUT skull" - " Dset." - " WITH skull -no_ss xxx " - " " - " WITHOUT skull No Cigar -no_ss" - " " - " Template means: Your template of choice" - " Dset. means: Your anatomical dataset" - " -no_ss means: Skull stripping should not be attempted on Dset" - " xxx means: Don't put anything, the script will strip Dset" - " No Cigar means: Don't try that combination, it makes no sense.", + desc="""\ +Do not strip skull of input data set +(because skull has already been removed +or because template still has the skull) +NOTE: The ``-no_ss`` option is not all that optional. +Here is a table of when you should and should not use ``-no_ss`` + + +------------------+------------+---------------+ + | Dataset | Template | + +==================+============+===============+ + | | w/ skull | wo/ skull | + +------------------+------------+---------------+ + | WITH skull | ``-no_ss`` | xxx | + +------------------+------------+---------------+ + | WITHOUT skull | No Cigar | ``-no_ss`` | + +------------------+------------+---------------+ + +Template means: Your template of choice +Dset. means: Your anatomical dataset +``-no_ss`` means: Skull stripping should not be attempted on Dset +xxx means: Don't put anything, the script will strip Dset +No Cigar means: Don't try that combination, it makes no sense.""", argstr="-no_ss", ) @@ -819,7 +827,7 @@ class AutoTLRC(AFNICommand): `_ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> autoTLRC = afni.AutoTLRC() >>> autoTLRC.inputs.in_file = 'structural.nii' @@ -931,8 +939,7 @@ class Bandpass(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> bandpass = afni.Bandpass() @@ -1000,8 +1007,7 @@ class BlurInMask(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> bim = afni.BlurInMask() >>> bim.inputs.in_file = 'functional.nii' @@ -1056,8 +1062,7 @@ class BlurToFWHM(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> blur = afni.preprocess.BlurToFWHM() >>> blur.inputs.in_file = 'epi.nii' @@ -1113,8 +1118,7 @@ class ClipLevel(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces.afni import preprocess >>> cliplevel = preprocess.ClipLevel() >>> cliplevel.inputs.in_file = 'anatomical.nii' @@ -1196,8 +1200,7 @@ class DegreeCentrality(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> degree = afni.DegreeCentrality() >>> degree.inputs.in_file = 'functional.nii' @@ -1251,8 +1254,7 @@ class Despike(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> despike = afni.Despike() >>> despike.inputs.in_file = 'functional.nii' @@ -1292,8 +1294,7 @@ class Detrend(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> detrend = afni.Detrend() >>> detrend.inputs.in_file = 'functional.nii' @@ -1348,7 +1349,8 @@ class ECMInputSpec(CentralityInputSpec): ) eps = traits.Float( desc="sets the stopping criterion for the power iteration; " - "l2|v_old - v_new| < eps*|v_old|; default = 0.001", + ":math:`l2\\|v_\\text{old} - v_\\text{new}\\| < eps\\|v_\\text{old}\\|`; " + "default = 0.001", argstr="-eps %f", ) max_iter = traits.Int( @@ -1371,8 +1373,7 @@ class ECM(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> ecm = afni.ECM() >>> ecm.inputs.in_file = 'functional.nii' @@ -1428,8 +1429,7 @@ class Fim(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> fim = afni.Fim() >>> fim.inputs.in_file = 'functional.nii' @@ -1480,8 +1480,7 @@ class Fourier(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> fourier = afni.Fourier() >>> fourier.inputs.in_file = 'functional.nii' @@ -1546,8 +1545,7 @@ class Hist(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> hist = afni.Hist() >>> hist.inputs.in_file = 'functional.nii' @@ -1608,8 +1606,7 @@ class LFCD(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> lfcd = afni.LFCD() >>> lfcd.inputs.in_file = 'functional.nii' @@ -1657,8 +1654,7 @@ class Maskave(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> maskave = afni.Maskave() >>> maskave.inputs.in_file = 'functional.nii' @@ -1712,8 +1708,7 @@ class Means(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> means = afni.Means() >>> means.inputs.in_file_a = 'im1.nii' @@ -1782,7 +1777,7 @@ class OutlierCountInputSpec(CommandLineInputSpec): False, usedefault=True, argstr="-range", - desc="write out the median + 3.5 MAD of outlier count with each " "timepoint", + desc="write out the median + 3.5 MAD of outlier count with each timepoint", ) save_outliers = traits.Bool(False, usedefault=True, desc="enables out_file option") outliers_file = File( @@ -1820,8 +1815,7 @@ class OutlierCount(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> toutcount = afni.OutlierCount() >>> toutcount.inputs.in_file = 'functional.nii' @@ -1909,7 +1903,7 @@ class QualityIndexInputSpec(CommandLineInputSpec): False, usedefault=True, argstr="-range", - desc="write out the median + 3.5 MAD of outlier count with each " "timepoint", + desc="write out the median + 3.5 MAD of outlier count with each timepoint", ) out_file = File( name_template="%s_tqual", @@ -1926,16 +1920,12 @@ class QualityIndexOutputSpec(TraitedSpec): class QualityIndex(CommandLine): - """Computes a `quality index' for each sub-brick in a 3D+time dataset. + """Computes a quality index for each sub-brick in a 3D+time dataset. The output is a 1D time series with the index for each sub-brick. The results are written to stdout. - For complete details, see the `3dTqual Documentation - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tqual = afni.QualityIndex() >>> tqual.inputs.in_file = 'functional.nii' @@ -1943,6 +1933,11 @@ class QualityIndex(CommandLine): '3dTqual functional.nii > functional_tqual' >>> res = tqual.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dTqual Documentation + `_ + """ _cmd = "3dTqual" @@ -2032,28 +2027,33 @@ class ROIStatsInputSpec(CommandLineInputSpec): ] stat = InputMultiObject( traits.Enum(_stat_names), - desc="statistics to compute. Options include: " - " * mean = Compute the mean using only non_zero voxels." - " Implies the opposite for the mean computed " - " by default.\n" - " * median = Compute the median of nonzero voxels\n" - " * mode = Compute the mode of nonzero voxels." - " (integral valued sets only)\n" - " * minmax = Compute the min/max of nonzero voxels\n" - " * sum = Compute the sum using only nonzero voxels.\n" - " * voxels = Compute the number of nonzero voxels\n" - " * sigma = Compute the standard deviation of nonzero" - " voxels\n" - "Statistics that include zero-valued voxels:\n" - " * zerominmax = Compute the min/max of all voxels.\n" - " * zerosigma = Compute the standard deviation of all" - " voxels.\n" - " * zeromedian = Compute the median of all voxels.\n" - " * zeromode = Compute the mode of all voxels.\n" - " * summary = Only output a summary line with the grand " - " mean across all briks in the input dataset." - " This option cannot be used with nomeanout.\n" - "More that one option can be specified.", + desc="""\ +Statistics to compute. Options include: + + * mean = Compute the mean using only non_zero voxels. + Implies the opposite for the mean computed + by default. + * median = Compute the median of nonzero voxels + * mode = Compute the mode of nonzero voxels. + (integral valued sets only) + * minmax = Compute the min/max of nonzero voxels + * sum = Compute the sum using only nonzero voxels. + * voxels = Compute the number of nonzero voxels + * sigma = Compute the standard deviation of nonzero + voxels + +Statistics that include zero-valued voxels: + + * zerominmax = Compute the min/max of all voxels. + * zerosigma = Compute the standard deviation of all + voxels. + * zeromedian = Compute the median of all voxels. + * zeromode = Compute the mode of all voxels. + * summary = Only output a summary line with the grand + mean across all briks in the input dataset. + This option cannot be used with nomeanout. + +More that one option can be specified.""", argstr="%s...", ) out_file = File( @@ -2077,8 +2077,7 @@ class ROIStats(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> roistats = afni.ROIStats() >>> roistats.inputs.in_file = 'functional.nii' @@ -2192,7 +2191,7 @@ class Retroicor(AFNICommand): `_ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> ret = afni.Retroicor() >>> ret.inputs.in_file = 'functional.nii' @@ -2285,8 +2284,7 @@ class Seg(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces.afni import preprocess >>> seg = preprocess.Seg() >>> seg.inputs.in_file = 'structural.nii' @@ -2343,8 +2341,7 @@ class SkullStrip(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> skullstrip = afni.SkullStrip() >>> skullstrip.inputs.in_file = 'functional.nii' @@ -2531,8 +2528,7 @@ class TCorrMap(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcm = afni.TCorrMap() >>> tcm.inputs.in_file = 'functional.nii' @@ -2598,8 +2594,7 @@ class TCorrelate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcorrelate = afni.TCorrelate() >>> tcorrelate.inputs.xset= 'u_rc1s1_Template.nii' @@ -2637,7 +2632,7 @@ class TNormInputSpec(AFNICommandInputSpec): desc="L2 normalize (sum of squares = 1) [DEFAULT]", argstr="-norm2" ) normR = traits.Bool( - desc="normalize so sum of squares = number of time points * e.g., so RMS = 1.", + desc="normalize so sum of squares = number of time points \\* e.g., so RMS = 1.", argstr="-normR", ) norm1 = traits.Bool( @@ -2647,28 +2642,28 @@ class TNormInputSpec(AFNICommandInputSpec): desc="Scale so max absolute value = 1 (L_infinity norm)", argstr="-normx" ) polort = traits.Int( - desc="""Detrend with polynomials of order p before normalizing - [DEFAULT = don't do this] - * Use '-polort 0' to remove the mean, for example""", + desc="""\ +Detrend with polynomials of order p before normalizing [DEFAULT = don't do this]. +Use '-polort 0' to remove the mean, for example""", argstr="-polort %s", ) L1fit = traits.Bool( - desc="""Detrend with L1 regression (L2 is the default) - * This option is here just for the hell of it""", + desc="""\ +Detrend with L1 regression (L2 is the default) +This option is here just for the hell of it""", argstr="-L1fit", ) class TNorm(AFNICommand): - """Shifts voxel time series from input so that seperate slices are aligned + """Shifts voxel time series from input so that separate slices are aligned to the same temporal origin. For complete details, see the `3dTnorm Documentation. `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tnorm = afni.TNorm() >>> tnorm.inputs.in_file = 'functional.nii' @@ -2702,104 +2697,128 @@ class TProjectInputSpec(AFNICommandInputSpec): name_source="in_file", ) censor = File( - desc="""filename of censor .1D time series - * This is a file of 1s and 0s, indicating which - time points are to be included (1) and which are - to be excluded (0).""", + desc="""\ +Filename of censor .1D time series. +This is a file of 1s and 0s, indicating which +time points are to be included (1) and which are +to be excluded (0).""", argstr="-censor %s", exists=True, ) censortr = traits.List( traits.Str(), - desc="""list of strings that specify time indexes - to be removed from the analysis. Each string is - of one of the following forms: - 37 => remove global time index #37 - 2:37 => remove time index #37 in run #2 - 37..47 => remove global time indexes #37-47 - 37-47 => same as above - 2:37..47 => remove time indexes #37-47 in run #2 - *:0-2 => remove time indexes #0-2 in all runs - +Time indexes within each run start at 0. - +Run indexes start at 1 (just be to confusing). - +N.B.: 2:37,47 means index #37 in run #2 and - global time index 47; it does NOT mean - index #37 in run #2 AND index #47 in run #2.""", + desc="""\ +List of strings that specify time indexes +to be removed from the analysis. Each string is +of one of the following forms: + +* ``37`` => remove global time index #37 +* ``2:37`` => remove time index #37 in run #2 +* ``37..47`` => remove global time indexes #37-47 +* ``37-47`` => same as above +* ``2:37..47`` => remove time indexes #37-47 in run #2 +* ``*:0-2`` => remove time indexes #0-2 in all runs + + * Time indexes within each run start at 0. + * Run indexes start at 1 (just be to confusing). + * N.B.: 2:37,47 means index #37 in run #2 and + global time index 47; it does NOT mean + index #37 in run #2 AND index #47 in run #2. + +""", argstr="-CENSORTR %s", ) cenmode = traits.Enum( "KILL", "ZERO", "NTRP", - desc="""specifies how censored time points are treated in - the output dataset: - + mode = ZERO ==> put zero values in their place - ==> output datset is same length as input - + mode = KILL ==> remove those time points - ==> output dataset is shorter than input - + mode = NTRP ==> censored values are replaced by interpolated - neighboring (in time) non-censored values, - BEFORE any projections, and then the - analysis proceeds without actual removal - of any time points -- this feature is to - keep the Spanish Inquisition happy. - * The default mode is KILL !!!""", + desc="""\ +Specifies how censored time points are treated in +the output dataset: + +* mode = ZERO -- put zero values in their place; + output datset is same length as input +* mode = KILL -- remove those time points; + output dataset is shorter than input +* mode = NTRP -- censored values are replaced by interpolated + neighboring (in time) non-censored values, + BEFORE any projections, and then the + analysis proceeds without actual removal + of any time points -- this feature is to + keep the Spanish Inquisition happy. +* The default mode is KILL !!! + +""", argstr="-cenmode %s", ) concat = File( - desc="""The catenation file, as in 3dDeconvolve, containing the - TR indexes of the start points for each contiguous run - within the input dataset (the first entry should be 0). - ++ Also as in 3dDeconvolve, if the input dataset is - automatically catenated from a collection of datasets, - then the run start indexes are determined directly, - and '-concat' is not needed (and will be ignored). - ++ Each run must have at least 9 time points AFTER - censoring, or the program will not work! - ++ The only use made of this input is in setting up - the bandpass/stopband regressors. - ++ '-ort' and '-dsort' regressors run through all time - points, as read in. If you want separate projections - in each run, then you must either break these ort files - into appropriate components, OR you must run 3dTproject - for each run separately, using the appropriate pieces - from the ort files via the '{...}' selector for the - 1D files and the '[...]' selector for the datasets.""", + desc="""\ +The catenation file, as in 3dDeconvolve, containing the +TR indexes of the start points for each contiguous run +within the input dataset (the first entry should be 0). + +* Also as in 3dDeconvolve, if the input dataset is + automatically catenated from a collection of datasets, + then the run start indexes are determined directly, + and '-concat' is not needed (and will be ignored). +* Each run must have at least 9 time points AFTER + censoring, or the program will not work! +* The only use made of this input is in setting up + the bandpass/stopband regressors. +* '-ort' and '-dsort' regressors run through all time + points, as read in. If you want separate projections + in each run, then you must either break these ort files + into appropriate components, OR you must run 3dTproject + for each run separately, using the appropriate pieces + from the ort files via the ``{...}`` selector for the + 1D files and the ``[...]`` selector for the datasets. + +""", exists=True, argstr="-concat %s", ) noblock = traits.Bool( - desc="""Also as in 3dDeconvolve, if you want the program to treat - an auto-catenated dataset as one long run, use this option. - ++ However, '-noblock' will not affect catenation if you use - the '-concat' option.""", + desc="""\ +Also as in 3dDeconvolve, if you want the program to treat +an auto-catenated dataset as one long run, use this option. +However, '-noblock' will not affect catenation if you use +the '-concat' option.""", argstr="-noblock", ) ort = File( - desc="""Remove each column in file - ++ Each column will have its mean removed.""", + desc="""\ +Remove each column in file. +Each column will have its mean removed.""", exists=True, argstr="-ort %s", ) polort = traits.Int( - desc="""Remove polynomials up to and including degree pp. - ++ Default value is 2. - ++ It makes no sense to use a value of pp greater than - 2, if you are bandpassing out the lower frequencies! - ++ For catenated datasets, each run gets a separate set - set of pp+1 Legendre polynomial regressors. - ++ Use of -polort -1 is not advised (if data mean != 0), - even if -ort contains constant terms, as all means are - removed.""", + desc="""\ +Remove polynomials up to and including degree pp. + +* Default value is 2. +* It makes no sense to use a value of pp greater than + 2, if you are bandpassing out the lower frequencies! +* For catenated datasets, each run gets a separate set + set of pp+1 Legendre polynomial regressors. +* Use of -polort -1 is not advised (if data mean != 0), + even if -ort contains constant terms, as all means are + removed. + +""", argstr="-polort %d", ) dsort = InputMultiObject( File(exists=True, copyfile=False), argstr="-dsort %s...", - desc="""Remove the 3D+time time series in dataset fset. - ++ That is, 'fset' contains a different nuisance time - series for each voxel (e.g., from AnatICOR). - ++ Multiple -dsort options are allowed.""", + desc="""\ +Remove the 3D+time time series in dataset fset. + +* That is, 'fset' contains a different nuisance time + series for each voxel (e.g., from AnatICOR). +* Multiple -dsort options are allowed. + +""", ) bandpass = traits.Tuple( traits.Float, @@ -2814,31 +2833,38 @@ class TProjectInputSpec(AFNICommandInputSpec): argstr="-stopband %g %g", ) TR = traits.Float( - desc="""Use time step dd for the frequency calculations, - rather than the value stored in the dataset header.""", + desc="""\ +Use time step dd for the frequency calculations, +rather than the value stored in the dataset header.""", argstr="-TR %g", ) mask = File( exists=True, - desc="""Only operate on voxels nonzero in the mset dataset. - ++ Voxels outside the mask will be filled with zeros. - ++ If no masking option is given, then all voxels - will be processed.""", + desc="""\ +Only operate on voxels nonzero in the mset dataset. + +* Voxels outside the mask will be filled with zeros. +* If no masking option is given, then all voxels + will be processed. + +""", argstr="-mask %s", ) automask = traits.Bool( desc="""Generate a mask automatically""", xor=["mask"], argstr="-automask" ) blur = traits.Float( - desc="""Blur (inside the mask only) with a filter that has - width (FWHM) of fff millimeters. - ++ Spatial blurring (if done) is after the time - series filtering.""", + desc="""\ +Blur (inside the mask only) with a filter that has +width (FWHM) of fff millimeters. +Spatial blurring (if done) is after the time +series filtering.""", argstr="-blur %g", ) norm = traits.Bool( - desc="""Normalize each output time series to have sum of - squares = 1. This is the LAST operation.""", + desc=""" +Normalize each output time series to have sum of +squares = 1. This is the LAST operation.""", argstr="-norm", ) @@ -2848,18 +2874,14 @@ class TProject(AFNICommand): This program projects (detrends) out various 'nuisance' time series from each voxel in the input dataset. Note that all the projections are done via linear regression, including the frequency-based options such - as '-passband'. In this way, you can bandpass time-censored data, and at + as ``-passband``. In this way, you can bandpass time-censored data, and at the same time, remove other time series of no interest (e.g., physiological estimates, motion parameters). Shifts voxel time series from input so that seperate slices are aligned to the same temporal origin. - For complete details, see the `3dTproject Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tproject = afni.TProject() >>> tproject.inputs.in_file = 'functional.nii' @@ -2871,6 +2893,11 @@ class TProject(AFNICommand): '3dTproject -input functional.nii -automask -bandpass 0.00667 99999 -polort 3 -prefix projected.nii.gz' >>> res = tproject.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dTproject Documentation. + `__ + """ _cmd = "3dTproject" @@ -2972,8 +2999,7 @@ class TShift(AFNICommand): `_ Examples - ======== - + -------- Slice timing details may be specified explicitly via the ``slice_timing`` input: @@ -3108,28 +3134,28 @@ class TSmoothInputSpec(AFNICommandInputSpec): desc="Sets the data type of the output dataset", argstr="-datum %s" ) lin = traits.Bool( - desc="3 point linear filter: 0.15*a + 0.70*b + 0.15*c" + desc=r"3 point linear filter: :math:`0.15\,a + 0.70\,b + 0.15\,c`" "[This is the default smoother]", argstr="-lin", ) med = traits.Bool(desc="3 point median filter: median(a,b,c)", argstr="-med") osf = traits.Bool( desc="3 point order statistics filter:" - "0.15*min(a,b,c) + 0.70*median(a,b,c) + 0.15*max(a,b,c)", + r":math:`0.15\,min(a,b,c) + 0.70\,median(a,b,c) + 0.15\,max(a,b,c)`", argstr="-osf", ) lin3 = traits.Int( - desc="3 point linear filter: 0.5*(1-m)*a + m*b + 0.5*(1-m)*c" + desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c" "Here, 'm' is a number strictly between 0 and 1.", argstr="-3lin %d", ) hamming = traits.Int( argstr="-hamming %d", - desc="Use N point Hamming windows." "(N must be odd and bigger than 1.)", + desc="Use N point Hamming windows. (N must be odd and bigger than 1.)", ) blackman = traits.Int( argstr="-blackman %d", - desc="Use N point Blackman windows." "(N must be odd and bigger than 1.)", + desc="Use N point Blackman windows. (N must be odd and bigger than 1.)", ) custom = File( argstr="-custom %s", @@ -3150,8 +3176,7 @@ class TSmooth(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> smooth = afni.TSmooth() @@ -3252,8 +3277,7 @@ class Volreg(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> volreg = afni.Volreg() >>> volreg.inputs.in_file = 'functional.nii' @@ -3350,14 +3374,10 @@ class WarpOutputSpec(TraitedSpec): class Warp(AFNICommand): - """Use 3dWarp for spatially transforming a dataset - - For complete details, see the `3dWarp Documentation. - `_ + """Use 3dWarp for spatially transforming a dataset. Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> warp = afni.Warp() >>> warp.inputs.in_file = 'structural.nii' @@ -3375,6 +3395,11 @@ class Warp(AFNICommand): '3dWarp -newgrid 1.000000 -prefix trans.nii.gz structural.nii' >>> res = warp_2.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dWarp Documentation. + `__. + """ _cmd = "3dWarp" @@ -3422,51 +3447,59 @@ class QwarpInputSpec(AFNICommandInputSpec): name_source=["in_file"], desc="""\ Sets the prefix/suffix for the output datasets. + * The source dataset is warped to match the base -and gets prefix 'ppp'. (Except if '-plusminus' is used + and gets prefix 'ppp'. (Except if '-plusminus' is used * The final interpolation to this output dataset is -done using the 'wsinc5' method. See the output of - 3dAllineate -HELP -(in the "Modifying '-final wsinc5'" section) for -the lengthy technical details. + done using the 'wsinc5' method. See the output of + 3dAllineate -HELP + (in the "Modifying '-final wsinc5'" section) for + the lengthy technical details. * The 3D warp used is saved in a dataset with -prefix 'ppp_WARP' -- this dataset can be used -with 3dNwarpApply and 3dNwarpCat, for example. + prefix 'ppp_WARP' -- this dataset can be used + with 3dNwarpApply and 3dNwarpCat, for example. * To be clear, this is the warp from source dataset - coordinates to base dataset coordinates, where the - values at each base grid point are the xyz displacments - needed to move that grid point's xyz values to the - corresponding xyz values in the source dataset: - base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z) - Another way to think of this warp is that it 'pulls' - values back from source space to base space. + coordinates to base dataset coordinates, where the + values at each base grid point are the xyz displacments + needed to move that grid point's xyz values to the + corresponding xyz values in the source dataset: + base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z) + Another way to think of this warp is that it 'pulls' + values back from source space to base space. * 3dNwarpApply would use 'ppp_WARP' to transform datasets -aligned with the source dataset to be aligned with the -base dataset. -** If you do NOT want this warp saved, use the option '-nowarp'. --->> (However, this warp is usually the most valuable possible output!) + aligned with the source dataset to be aligned with the + base dataset. + +**If you do NOT want this warp saved, use the option '-nowarp'**. +(However, this warp is usually the most valuable possible output!) + * If you want to calculate and save the inverse 3D warp, -use the option '-iwarp'. This inverse warp will then be -saved in a dataset with prefix 'ppp_WARPINV'. + use the option '-iwarp'. This inverse warp will then be + saved in a dataset with prefix 'ppp_WARPINV'. * This inverse warp could be used to transform data from base -space to source space, if you need to do such an operation. + space to source space, if you need to do such an operation. * You can easily compute the inverse later, say by a command like - 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' -or the inverse can be computed as needed in 3dNwarpApply, like - 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ...""", - ) + 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' + or the inverse can be computed as needed in 3dNwarpApply, like + 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ... + +""") resample = traits.Bool( - desc="This option simply resamples the source dataset to match the" - "base dataset grid. You can use this if the two datasets" - "overlap well (as seen in the AFNI GUI), but are not on the" - "same 3D grid." - "* If they don't overlap well, allineate them first" - "* The reampling here is done with the" - "'wsinc5' method, which has very little blurring artifact." - "* If the base and source datasets ARE on the same 3D grid," - "then the -resample option will be ignored." - "* You CAN use -resample with these 3dQwarp options:" - "-plusminus -inilev -iniwarp -duplo", + desc="""\ +This option simply resamples the source dataset to match the +base dataset grid. You can use this if the two datasets +overlap well (as seen in the AFNI GUI), but are not on the +same 3D grid. + +* If they don't overlap well, allineate them first +* The reampling here is done with the + 'wsinc5' method, which has very little blurring artifact. +* If the base and source datasets ARE on the same 3D grid, + then the -resample option will be ignored. +* You CAN use -resample with these 3dQwarp options: + -plusminus -inilev -iniwarp -duplo + +""", argstr="-resample", ) allineate = traits.Bool( @@ -3477,7 +3510,7 @@ class QwarpInputSpec(AFNICommandInputSpec): argstr="-allineate", ) allineate_opts = traits.Str( - desc="add extra options to the 3dAllineate command to be run by " "3dQwarp.", + desc="add extra options to the 3dAllineate command to be run by 3dQwarp.", argstr="-allineate_opts %s", requires=["allineate"], ) @@ -3489,53 +3522,64 @@ class QwarpInputSpec(AFNICommandInputSpec): ) pear = traits.Bool( desc="Use strict Pearson correlation for matching." - "* Not usually recommended, since the 'clipped Pearson' method" + "Not usually recommended, since the 'clipped Pearson' method" "used by default will reduce the impact of outlier values.", argstr="-pear", ) noneg = traits.Bool( - desc="Replace negative values in either input volume with 0." - "* If there ARE negative input values, and you do NOT use -noneg," - "then strict Pearson correlation will be used, since the 'clipped'" - "method only is implemented for non-negative volumes." - "* '-noneg' is not the default, since there might be situations where" - "you want to align datasets with positive and negative values mixed." - "* But, in many cases, the negative values in a dataset are just the" - "result of interpolation artifacts (or other peculiarities), and so" - "they should be ignored. That is what '-noneg' is for.", + desc="""\ +Replace negative values in either input volume with 0. + +* If there ARE negative input values, and you do NOT use -noneg, + then strict Pearson correlation will be used, since the 'clipped' + method only is implemented for non-negative volumes. +* '-noneg' is not the default, since there might be situations where + you want to align datasets with positive and negative values mixed. +* But, in many cases, the negative values in a dataset are just the + result of interpolation artifacts (or other peculiarities), and so + they should be ignored. That is what '-noneg' is for. + +""", argstr="-noneg", ) nopenalty = traits.Bool( - desc="Replace negative values in either input volume with 0." - "* If there ARE negative input values, and you do NOT use -noneg," - "then strict Pearson correlation will be used, since the 'clipped'" - "method only is implemented for non-negative volumes." - "* '-noneg' is not the default, since there might be situations where" - "you want to align datasets with positive and negative values mixed." - "* But, in many cases, the negative values in a dataset are just the" - "result of interpolation artifacts (or other peculiarities), and so" - "they should be ignored. That is what '-noneg' is for.", + desc="""\ +Replace negative values in either input volume with 0. + +* If there ARE negative input values, and you do NOT use -noneg, + then strict Pearson correlation will be used, since the 'clipped' + method only is implemented for non-negative volumes. +* '-noneg' is not the default, since there might be situations where + you want to align datasets with positive and negative values mixed. +* But, in many cases, the negative values in a dataset are just the + result of interpolation artifacts (or other peculiarities), and so + they should be ignored. That is what '-noneg' is for. + +""", argstr="-nopenalty", ) penfac = traits.Float( - desc="Use this value to weight the penalty." - "The default value is 1.Larger values mean the" - "penalty counts more, reducing grid distortions," - "insha'Allah; '-nopenalty' is the same as '-penfac 0'." - " -->>* [23 Sep 2013] -- Zhark increased the default value of" - " the penalty by a factor of 5, and also made it get" - " progressively larger with each level of refinement." - " Thus, warping results will vary from earlier instances" - " of 3dQwarp." - " * The progressive increase in the penalty at higher levels" - " means that the 'cost function' can actually look like the" - " alignment is getting worse when the levels change." - " * IF you wish to turn off this progression, for whatever" - " reason (e.g., to keep compatibility with older results)," - " use the option '-penold'.To be completely compatible with" - " the older 3dQwarp, you'll also have to use '-penfac 0.2'.", argstr="-penfac %f", - ) + desc="""\ +Use this value to weight the penalty. +The default value is 1. Larger values mean the +penalty counts more, reducing grid distortions, +insha'Allah; '-nopenalty' is the same as '-penfac 0'. +In 23 Sep 2013 Zhark increased the default value of +the penalty by a factor of 5, and also made it get +progressively larger with each level of refinement. +Thus, warping results will vary from earlier instances +of 3dQwarp. + +* The progressive increase in the penalty at higher levels + means that the 'cost function' can actually look like the + alignment is getting worse when the levels change. +* IF you wish to turn off this progression, for whatever + reason (e.g., to keep compatibility with older results), + use the option '-penold'.To be completely compatible with + the older 3dQwarp, you'll also have to use '-penfac 0.2'. + +""") noweight = traits.Bool( desc="If you want a binary weight (the old default), use this option." "That is, each voxel in the base volume automask will be" @@ -3545,106 +3589,120 @@ class QwarpInputSpec(AFNICommandInputSpec): weight = File( desc="Instead of computing the weight from the base dataset," "directly input the weight volume from dataset 'www'." - "* Useful if you know what over parts of the base image you" + "Useful if you know what over parts of the base image you" "want to emphasize or de-emphasize the matching functional.", argstr="-weight %s", exists=True, ) wball = traits.List( traits.Int(), - desc="-wball x y z r f" - "Enhance automatic weight from '-useweight' by a factor" - "of 1+f*Gaussian(FWHM=r) centered in the base image at" - "DICOM coordinates (x,y,z) and with radius 'r'. The" - "goal of this option is to try and make the alignment" - "better in a specific part of the brain." - "* Example: -wball 0 14 6 30 40" - "to emphasize the thalamic area (in MNI/Talairach space)." - "* The 'r' parameter must be positive!" - "* The 'f' parameter must be between 1 and 100 (inclusive)." - "* '-wball' does nothing if you input your own weight" - "with the '-weight' option." - "* '-wball' does change the binary weight created by" - "the '-noweight' option." - "* You can only use '-wball' once in a run of 3dQwarp." - "*** The effect of '-wball' is not dramatic. The example" - "above makes the average brain image across a collection" - "of subjects a little sharper in the thalamic area, which" - "might have some small value. If you care enough about" - "alignment to use '-wball', then you should examine the" - "results from 3dQwarp for each subject, to see if the" - "alignments are good enough for your purposes.", + desc=""""\ +``-wball x y z r f`` +Enhance automatic weight from '-useweight' by a factor +of 1+f\\*Gaussian(FWHM=r) centered in the base image at +DICOM coordinates (x,y,z) and with radius 'r'. The +goal of this option is to try and make the alignment +better in a specific part of the brain. +Example: -wball 0 14 6 30 40 +to emphasize the thalamic area (in MNI/Talairach space). + +* The 'r' parameter must be positive! +* The 'f' parameter must be between 1 and 100 (inclusive). +* '-wball' does nothing if you input your own weight + with the '-weight' option. +* '-wball' does change the binary weight created by + the '-noweight' option. +* You can only use '-wball' once in a run of 3dQwarp. + +**The effect of '-wball' is not dramatic.** The example +above makes the average brain image across a collection +of subjects a little sharper in the thalamic area, which +might have some small value. If you care enough about +alignment to use '-wball', then you should examine the +results from 3dQwarp for each subject, to see if the +alignments are good enough for your purposes.""", argstr="-wball %s", minlen=5, maxlen=5, + xor=['wmask'], ) traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") wmask = traits.Tuple( (File(exists=True), traits.Float()), - desc="-wmask ws f" - "Similar to '-wball', but here, you provide a dataset 'ws'" - "that indicates where to increase the weight." - "* The 'ws' dataset must be on the same 3D grid as the base dataset." - "* 'ws' is treated as a mask -- it only matters where it" - "is nonzero -- otherwise, the values inside are not used." - "* After 'ws' comes the factor 'f' by which to increase the" - "automatically computed weight. Where 'ws' is nonzero," - "the weighting will be multiplied by (1+f)." - "* As with '-wball', the factor 'f' should be between 1 and 100." - "* You cannot use '-wball' and '-wmask' together!", + desc="""\ +Similar to '-wball', but here, you provide a dataset 'ws' +that indicates where to increase the weight. + +* The 'ws' dataset must be on the same 3D grid as the base dataset. +* 'ws' is treated as a mask -- it only matters where it + is nonzero -- otherwise, the values inside are not used. +* After 'ws' comes the factor 'f' by which to increase the + automatically computed weight. Where 'ws' is nonzero, + the weighting will be multiplied by (1+f). +* As with '-wball', the factor 'f' should be between 1 and 100. + +""", argstr="-wpass %s %f", + xor=['wball'], ) out_weight_file = File( argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset" ) blur = traits.List( traits.Float(), - desc="Gaussian blur the input images by 'bb' (FWHM) voxels before" - "doing the alignment (the output dataset will not be blurred)." - "The default is 2.345 (for no good reason)." - "* Optionally, you can provide 2 values for 'bb', and then" - "the first one is applied to the base volume, the second" - "to the source volume." - "-->>* e.g., '-blur 0 3' to skip blurring the base image" - "(if the base is a blurry template, for example)." - "* A negative blur radius means to use 3D median filtering," - "rather than Gaussian blurring. This type of filtering will" - "better preserve edges, which can be important in alignment." - "* If the base is a template volume that is already blurry," - "you probably don't want to blur it again, but blurring" - "the source volume a little is probably a good idea, to" - "help the program avoid trying to match tiny features." - "* Note that -duplo will blur the volumes some extra" - "amount for the initial small-scale warping, to make" - "that phase of the program converge more rapidly.", + desc="""\ +Gaussian blur the input images by 'bb' (FWHM) voxels before +doing the alignment (the output dataset will not be blurred). +The default is 2.345 (for no good reason). + +* Optionally, you can provide 2 values for 'bb', and then + the first one is applied to the base volume, the second + to the source volume. + e.g., '-blur 0 3' to skip blurring the base image + (if the base is a blurry template, for example). +* A negative blur radius means to use 3D median filtering, + rather than Gaussian blurring. This type of filtering will + better preserve edges, which can be important in alignment. +* If the base is a template volume that is already blurry, + you probably don't want to blur it again, but blurring + the source volume a little is probably a good idea, to + help the program avoid trying to match tiny features. +* Note that -duplo will blur the volumes some extra + amount for the initial small-scale warping, to make + that phase of the program converge more rapidly. + +""", argstr="-blur %s", minlen=1, maxlen=2, ) pblur = traits.List( traits.Float(), - desc="Use progressive blurring; that is, for larger patch sizes," - "the amount of blurring is larger. The general idea is to" - "avoid trying to match finer details when the patch size" - "and incremental warps are coarse. When '-blur' is used" - "as well, it sets a minimum amount of blurring that will" - "be used. [06 Aug 2014 -- '-pblur' may become the default someday]." - "* You can optionally give the fraction of the patch size that" - "is used for the progressive blur by providing a value between" - "0 and 0.25 after '-pblur'. If you provide TWO values, the" - "the first fraction is used for progressively blurring the" - "base image and the second for the source image. The default" - "parameters when just '-pblur' is given is the same as giving" - "the options as '-pblur 0.09 0.09'." - "* '-pblur' is useful when trying to match 2 volumes with high" - "amounts of detail; e.g, warping one subject's brain image to" - "match another's, or trying to warp to match a detailed template." - "* Note that using negative values with '-blur' means that the" - "progressive blurring will be done with median filters, rather" - "than Gaussian linear blurring." - "-->>*** The combination of the -allineate and -pblur options will make" - "the results of using 3dQwarp to align to a template somewhat" - "less sensitive to initial head position and scaling.", + desc="""\ +Use progressive blurring; that is, for larger patch sizes, +the amount of blurring is larger. The general idea is to +avoid trying to match finer details when the patch size +and incremental warps are coarse. When '-blur' is used +as well, it sets a minimum amount of blurring that will +be used. [06 Aug 2014 -- '-pblur' may become the default someday]. + +* You can optionally give the fraction of the patch size that + is used for the progressive blur by providing a value between + 0 and 0.25 after '-pblur'. If you provide TWO values, the + the first fraction is used for progressively blurring the + base image and the second for the source image. The default + parameters when just '-pblur' is given is the same as giving + the options as '-pblur 0.09 0.09'. +* '-pblur' is useful when trying to match 2 volumes with high + amounts of detail; e.g, warping one subject's brain image to + match another's, or trying to warp to match a detailed template. +* Note that using negative values with '-blur' means that the + progressive blurring will be done with median filters, rather + than Gaussian linear blurring. + +Note: The combination of the -allineate and -pblur options will make +the results of using 3dQwarp to align to a template somewhat +less sensitive to initial head position and scaling.""", argstr="-pblur %s", minlen=1, maxlen=2, @@ -3653,7 +3711,7 @@ class QwarpInputSpec(AFNICommandInputSpec): desc="Here, 'ee' is a dataset to specify a mask of voxels" "to EXCLUDE from the analysis -- all voxels in 'ee'" "that are NONZERO will not be used in the alignment." - "* The base image always automasked -- the emask is" + "The base image always automasked -- the emask is" "extra, to indicate voxels you definitely DON'T want" "included in the matching process, even if they are" "inside the brain.", @@ -3666,192 +3724,236 @@ class QwarpInputSpec(AFNICommandInputSpec): noZdis = traits.Bool(desc="Warp will not displace in z direction", argstr="-noZdis") iniwarp = traits.List( File(exists=True, copyfile=False), - desc="A dataset with an initial nonlinear warp to use." - "* If this option is not used, the initial warp is the identity." - "* You can specify a catenation of warps (in quotes) here, as in" - "program 3dNwarpApply." - "* As a special case, if you just input an affine matrix in a .1D" - "file, that will work also -- it is treated as giving the initial" - 'warp via the string "IDENT(base_dataset) matrix_file.aff12.1D".' - "* You CANNOT use this option with -duplo !!" - "* -iniwarp is usually used with -inilev to re-start 3dQwarp from" - "a previous stopping point.", + desc="""\ +A dataset with an initial nonlinear warp to use. + +* If this option is not used, the initial warp is the identity. +* You can specify a catenation of warps (in quotes) here, as in + program 3dNwarpApply. +* As a special case, if you just input an affine matrix in a .1D + file, that will work also -- it is treated as giving the initial + warp via the string "IDENT(base_dataset) matrix_file.aff12.1D". +* You CANNOT use this option with -duplo !! +* -iniwarp is usually used with -inilev to re-start 3dQwarp from + a previous stopping point. + +""", argstr="-iniwarp %s", xor=["duplo"], ) inilev = traits.Int( - desc="The initial refinement 'level' at which to start." - "* Usually used with -iniwarp; CANNOT be used with -duplo." - "* The combination of -inilev and -iniwarp lets you take the" - "results of a previous 3dQwarp run and refine them further:" - "Note that the source dataset in the second run is the SAME as" - "in the first run. If you don't see why this is necessary," - "then you probably need to seek help from an AFNI guru.", + desc="""\ +The initial refinement 'level' at which to start. + +* Usually used with -iniwarp; CANNOT be used with -duplo. +* The combination of -inilev and -iniwarp lets you take the + results of a previous 3dQwarp run and refine them further: + Note that the source dataset in the second run is the SAME as + in the first run. If you don't see why this is necessary, + then you probably need to seek help from an AFNI guru. + +""", argstr="-inilev %d", xor=["duplo"], ) minpatch = traits.Int( - desc="* The value of mm should be an odd integer." - "* The default value of mm is 25." - "* For more accurate results than mm=25, try 19 or 13." - "* The smallest allowed patch size is 5." - "* You may want stop at a larger patch size (say 7 or 9) and use" - "the -Qfinal option to run that final level with quintic warps," - "which might run faster and provide the same degree of warp detail." - "* Trying to make two different brain volumes match in fine detail" - "is usually a waste of time, especially in humans. There is too" - "much variability in anatomy to match gyrus to gyrus accurately." - "For this reason, the default minimum patch size is 25 voxels." - "Using a smaller '-minpatch' might try to force the warp to" - "match features that do not match, and the result can be useless" - "image distortions -- another reason to LOOK AT THE RESULTS.", + desc="""\ +The value of mm should be an odd integer. + +* The default value of mm is 25. +* For more accurate results than mm=25, try 19 or 13. +* The smallest allowed patch size is 5. +* You may want stop at a larger patch size (say 7 or 9) and use + the -Qfinal option to run that final level with quintic warps, + which might run faster and provide the same degree of warp detail. +* Trying to make two different brain volumes match in fine detail + is usually a waste of time, especially in humans. There is too + much variability in anatomy to match gyrus to gyrus accurately. + For this reason, the default minimum patch size is 25 voxels. + Using a smaller '-minpatch' might try to force the warp to + match features that do not match, and the result can be useless + image distortions -- another reason to LOOK AT THE RESULTS. + +""", argstr="-minpatch %d", ) maxlev = traits.Int( - desc="The initial refinement 'level' at which to start." - "* Usually used with -iniwarp; CANNOT be used with -duplo." - "* The combination of -inilev and -iniwarp lets you take the" - "results of a previous 3dQwarp run and refine them further:" - "Note that the source dataset in the second run is the SAME as" - "in the first run. If you don't see why this is necessary," - "then you probably need to seek help from an AFNI guru.", + desc="""\ +The initial refinement 'level' at which to start. + +* Usually used with -iniwarp; CANNOT be used with -duplo. +* The combination of -inilev and -iniwarp lets you take the + results of a previous 3dQwarp run and refine them further: + Note that the source dataset in the second run is the SAME as + in the first run. If you don't see why this is necessary, + then you probably need to seek help from an AFNI guru. + +""", argstr="-maxlev %d", xor=["duplo"], position=-1, ) gridlist = File( - desc="This option provides an alternate way to specify the patch" - "grid sizes used in the warp optimization process. 'gl' is" - "a 1D file with a list of patches to use -- in most cases," - "you will want to use it in the following form:" - "-gridlist '1D: 0 151 101 75 51'" - "* Here, a 0 patch size means the global domain. Patch sizes" - "otherwise should be odd integers >= 5." - "* If you use the '0' patch size again after the first position," - "you will actually get an iteration at the size of the" - "default patch level 1, where the patch sizes are 75% of" - "the volume dimension. There is no way to force the program" - "to literally repeat the sui generis step of lev=0." - "* You cannot use -gridlist with -duplo or -plusminus!", + desc="""\ +This option provides an alternate way to specify the patch +grid sizes used in the warp optimization process. 'gl' is +a 1D file with a list of patches to use -- in most cases, +you will want to use it in the following form: +``-gridlist '1D: 0 151 101 75 51'`` + +* Here, a 0 patch size means the global domain. Patch sizes + otherwise should be odd integers >= 5. +* If you use the '0' patch size again after the first position, + you will actually get an iteration at the size of the + default patch level 1, where the patch sizes are 75% of + the volume dimension. There is no way to force the program + to literally repeat the sui generis step of lev=0. + +""", argstr="-gridlist %s", exists=True, copyfile=False, xor=["duplo", "plusminus"], ) allsave = traits.Bool( - desc="This option lets you save the output warps from each level" - "of the refinement process. Mostly used for experimenting." - "* Cannot be used with -nopadWARP, -duplo, or -plusminus." - "* Will only save all the outputs if the program terminates" - "normally -- if it crashes, or freezes, then all these" - "warps are lost.", + desc=""" +This option lets you save the output warps from each level" +of the refinement process. Mostly used for experimenting." +Will only save all the outputs if the program terminates" +normally -- if it crashes, or freezes, then all these" +warps are lost.""", argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"], ) duplo = traits.Bool( - desc="Start off with 1/2 scale versions of the volumes," - "for getting a speedy coarse first alignment." - "* Then scales back up to register the full volumes." - "The goal is greater speed, and it seems to help this" - "positively piggish program to be more expeditious." - "* However, accuracy is somewhat lower with '-duplo'," - "for reasons that currenly elude Zhark; for this reason," - "the Emperor does not usually use '-duplo'.", + desc="""\ +Start off with 1/2 scale versions of the volumes," +for getting a speedy coarse first alignment." + +* Then scales back up to register the full volumes." + The goal is greater speed, and it seems to help this" + positively piggish program to be more expeditious." +* However, accuracy is somewhat lower with '-duplo'," + for reasons that currenly elude Zhark; for this reason," + the Emperor does not usually use '-duplo'. + +""", argstr="-duplo", xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ) workhard = traits.Bool( - desc="Iterate more times, which can help when the volumes are" - "hard to align at all, or when you hope to get a more precise" - "alignment." - "* Slows the program down (possibly a lot), of course." - "* When you combine '-workhard' with '-duplo', only the" - "full size volumes get the extra iterations." - "* For finer control over which refinement levels work hard," - "you can use this option in the form (for example)" - " -workhard:4:7" - "which implies the extra iterations will be done at levels" - "4, 5, 6, and 7, but not otherwise." - "* You can also use '-superhard' to iterate even more, but" - "this extra option will REALLY slow things down." - "-->>* Under most circumstances, you should not need to use either" - "-workhard or -superhard." - "-->>* The fastest way to register to a template image is via the" - "-duplo option, and without the -workhard or -superhard options." - "-->>* If you use this option in the form '-Workhard' (first letter" - "in upper case), then the second iteration at each level is" - "done with quintic polynomial warps.", + desc="""\ +Iterate more times, which can help when the volumes are +hard to align at all, or when you hope to get a more precise +alignment. + +* Slows the program down (possibly a lot), of course. +* When you combine '-workhard' with '-duplo', only the + full size volumes get the extra iterations. +* For finer control over which refinement levels work hard, + you can use this option in the form (for example) ``-workhard:4:7`` + which implies the extra iterations will be done at levels + 4, 5, 6, and 7, but not otherwise. +* You can also use '-superhard' to iterate even more, but + this extra option will REALLY slow things down. + + * Under most circumstances, you should not need to use either + ``-workhard`` or ``-superhard``. + * The fastest way to register to a template image is via the + ``-duplo`` option, and without the ``-workhard`` or ``-superhard`` options. + * If you use this option in the form '-Workhard' (first letter + in upper case), then the second iteration at each level is + done with quintic polynomial warps. + +""", argstr="-workhard", xor=["boxopt", "ballopt"], ) Qfinal = traits.Bool( - desc="At the finest patch size (the final level), use Hermite" - "quintic polynomials for the warp instead of cubic polynomials." - "* In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis" - "function parameters over which to optimize (2 polynomials" - "dependent on each of the x,y,z directions, and 3 different" - "directions of displacement)." - "* There are 3x3x3x3=81 quintic polynomial parameters per patch." - "* With -Qfinal, the final level will have more detail in" - "the allowed warps, at the cost of yet more CPU time." - "* However, no patch below 7x7x7 in size will be done with quintic" - "polynomials." - "* This option is also not usually needed, and is experimental.", + desc="""\ +At the finest patch size (the final level), use Hermite +quintic polynomials for the warp instead of cubic polynomials. + +* In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis + function parameters over which to optimize (2 polynomials + dependent on each of the x,y,z directions, and 3 different + directions of displacement). +* There are 3x3x3x3=81 quintic polynomial parameters per patch. +* With -Qfinal, the final level will have more detail in + the allowed warps, at the cost of yet more CPU time. +* However, no patch below 7x7x7 in size will be done with quintic + polynomials. +* This option is also not usually needed, and is experimental. + +""", argstr="-Qfinal", ) Qonly = traits.Bool( - desc="Use Hermite quintic polynomials at all levels." - "* Very slow (about 4 times longer). Also experimental." - "* Will produce a (discrete representation of a) C2 warp.", + desc="""\ +Use Hermite quintic polynomials at all levels. + +* Very slow (about 4 times longer). Also experimental. +* Will produce a (discrete representation of a) C2 warp. + +""", argstr="-Qonly", ) plusminus = traits.Bool( - desc="Normally, the warp displacements dis(x) are defined to match" - "base(x) to source(x+dis(x)). With this option, the match" - "is between base(x-dis(x)) and source(x+dis(x)) -- the two" - "images 'meet in the middle'." - "* One goal is to mimic the warping done to MRI EPI data by" - "field inhomogeneities, when registering between a 'blip up'" - "and a 'blip down' down volume, which will have opposite" - "distortions." - "* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since" - "base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x))" - "wherever we see x, we have base(x) matches source(Wp(INV(Wm(x))));" - "that is, the warp V(x) that one would get from the 'usual' way" - "of running 3dQwarp is V(x) = Wp(INV(Wm(x)))." - "* Conversely, we can calculate Wp(x) in terms of V(x) as follows:" - "If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2;" - "then Wp(x) = V(INV(Vh(x)))" - "* With the above formulas, it is possible to compute Wp(x) from" - "V(x) and vice-versa, using program 3dNwarpCalc. The requisite" - "commands are left as an exercise for the aspiring AFNI Jedi Master." - "* You can use the semi-secret '-pmBASE' option to get the V(x)" - "warp and the source dataset warped to base space, in addition to" - "the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps." - "-->>* Alas: -plusminus does not work with -duplo or -allineate :-(" - "* However, you can use -iniwarp with -plusminus :-)" - "-->>* The outputs have _PLUS (from the source dataset) and _MINUS" - "(from the base dataset) in their filenames, in addition to" - "the prefix. The -iwarp option, if present, will be ignored.", + desc="""\ +Normally, the warp displacements dis(x) are defined to match +base(x) to source(x+dis(x)). With this option, the match +is between base(x-dis(x)) and source(x+dis(x)) -- the two +images 'meet in the middle'. + +* One goal is to mimic the warping done to MRI EPI data by + field inhomogeneities, when registering between a 'blip up' + and a 'blip down' down volume, which will have opposite + distortions. +* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since + base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x)) + wherever we see x, we have base(x) matches source(Wp(INV(Wm(x)))); + that is, the warp V(x) that one would get from the 'usual' way + of running 3dQwarp is V(x) = Wp(INV(Wm(x))). +* Conversely, we can calculate Wp(x) in terms of V(x) as follows: + If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2; + then Wp(x) = V(INV(Vh(x))) +* With the above formulas, it is possible to compute Wp(x) from + V(x) and vice-versa, using program 3dNwarpCalc. The requisite + commands are left as an exercise for the aspiring AFNI Jedi Master. +* You can use the semi-secret '-pmBASE' option to get the V(x) + warp and the source dataset warped to base space, in addition to + the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps. + + * Alas: -plusminus does not work with -duplo or -allineate :-( + * However, you can use -iniwarp with -plusminus :-) + * The outputs have _PLUS (from the source dataset) and _MINUS + (from the base dataset) in their filenames, in addition to + the prefix. The -iwarp option, if present, will be ignored. + +""", argstr="-plusminus", xor=["duplo", "allsave", "iwarp"], ) nopad = traits.Bool( - desc="Do NOT use zero-padding on the 3D base and source images." - "[Default == zero-pad, if needed]" - "* The underlying model for deformations goes to zero at the" - "edge of the volume being warped. However, if there is" - "significant data near an edge of the volume, then it won't" - "get displaced much, and so the results might not be good." - "* Zero padding is designed as a way to work around this potential" - "problem. You should NOT need the '-nopad' option for any" - "reason that Zhark can think of, but it is here to be symmetrical" - "with 3dAllineate." - "* Note that the output (warped from source) dataset will be on the" - "base dataset grid whether or not zero-padding is allowed. However," - "unless you use the following option, allowing zero-padding (i.e.," - "the default operation) will make the output WARP dataset(s) be" - "on a larger grid (also see '-expad' below).", + desc="""\ +Do NOT use zero-padding on the 3D base and source images. +[Default == zero-pad, if needed] + +* The underlying model for deformations goes to zero at the + edge of the volume being warped. However, if there is + significant data near an edge of the volume, then it won't + get displaced much, and so the results might not be good. +* Zero padding is designed as a way to work around this potential + problem. You should NOT need the '-nopad' option for any + reason that Zhark can think of, but it is here to be symmetrical + with 3dAllineate. +* Note that the output (warped from source) dataset will be on the + base dataset grid whether or not zero-padding is allowed. However, + unless you use the following option, allowing zero-padding (i.e., + the default operation) will make the output WARP dataset(s) be + on a larger grid (also see '-expad' below). + +""", argstr="-nopad", ) nopadWARP = traits.Bool( @@ -3864,13 +3966,13 @@ class QwarpInputSpec(AFNICommandInputSpec): expad = traits.Int( desc="This option instructs the program to pad the warp by an extra" "'EE' voxels (and then 3dQwarp starts optimizing it)." - "* This option is seldom needed, but can be useful if you" + "This option is seldom needed, but can be useful if you" "might later catenate the nonlinear warp -- via 3dNwarpCat --" "with an affine transformation that contains a large shift." "Under that circumstance, the nonlinear warp might be shifted" "partially outside its original grid, so expanding that grid" "can avoid this problem." - "* Note that this option perforce turns off '-nopadWARP'.", + "Note that this option perforce turns off '-nopadWARP'.", argstr="-expad %d", xor=["nopadWARP"], ) @@ -3888,7 +3990,7 @@ class QwarpInputSpec(AFNICommandInputSpec): baxopt = traits.Bool( desc="Use the 'box' optimization limits instead of the 'ball'" "[this is the default at present]." - "* Note that if '-workhard' is used, then ball and box optimization" + "Note that if '-workhard' is used, then ball and box optimization" "are alternated in the different iterations at each level, so" "these two options have no effect in that case.", argstr="-boxopt", @@ -3917,7 +4019,7 @@ class QwarpInputSpec(AFNICommandInputSpec): position=-2, ) lpa = traits.Bool( - desc="Local Pearson maximization" "This option has not be extensively tested", + desc="Local Pearson maximization. This option has not be extensively tested", argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"], ) @@ -3937,7 +4039,7 @@ class QwarpInputSpec(AFNICommandInputSpec): ) nmi = traits.Bool( desc="Normalized Mutual Information: a matching function for the adventurous" - "This option has NOT be extensively tested for usefullness" + "This option has NOT been extensively tested for usefullness" "and should be considered experimental at this infundibulum.", argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"], @@ -3965,15 +4067,11 @@ class QwarpOutputSpec(TraitedSpec): class Qwarp(AFNICommand): - """A version of 3dQwarp + """ Allineate your images prior to passing them to this workflow. - For complete details, see the `3dQwarp Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' @@ -4007,6 +4105,7 @@ class Qwarp(AFNICommand): >>> qwarp.cmdline '3dQwarp -base epi.nii -blur 0.0 3.0 -source structural.nii -iwarp -prefix anatSSQ.nii.gz \ -resample -verb -lpc' + >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -4017,6 +4116,7 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.blur = [0,3] >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -prefix ppp_structural' + >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -4029,6 +4129,7 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.out_file = 'Q25' >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -minpatch 25 -prefix Q25' + >>> res = qwarp.run() # doctest: +SKIP >>> qwarp2 = afni.Qwarp() >>> qwarp2.inputs.in_file = 'structural.nii' @@ -4040,6 +4141,7 @@ class Qwarp(AFNICommand): >>> qwarp2.cmdline '3dQwarp -base mni.nii -blur 0.0 2.0 -source structural.nii -inilev 7 -iniwarp Q25_\ warp+tlrc.HEAD -prefix Q11' + >>> res2 = qwarp2.run() # doctest: +SKIP >>> res2 = qwarp2.run() # doctest: +SKIP >>> qwarp3 = afni.Qwarp() @@ -4050,7 +4152,15 @@ class Qwarp(AFNICommand): >>> qwarp3.cmdline "3dQwarp -allineate -allineate_opts '-cose lpa -verb' -base mni.nii -source structural.nii \ -prefix ppp_structural" - >>> res3 = qwarp3.run() # doctest: +SKIP """ + + >>> res3 = qwarp3.run() # doctest: +SKIP + + See Also + -------- + For complete details, see the `3dQwarp Documentation. + `__ + + """ _cmd = "3dQwarp" input_spec = QwarpInputSpec @@ -4176,12 +4286,8 @@ class QwarpPlusMinus(Qwarp): """A version of 3dQwarp for performing field susceptibility correction using two images with opposing phase encoding directions. - For complete details, see the `3dQwarp Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> qwarp = afni.QwarpPlusMinus() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' @@ -4189,9 +4295,14 @@ class QwarpPlusMinus(Qwarp): >>> qwarp.inputs.base_file = 'sub-01_dir-RL_epi.nii.gz' >>> qwarp.cmdline '3dQwarp -prefix Qwarp.nii.gz -plusminus -base sub-01_dir-RL_epi.nii.gz \ - -source sub-01_dir-LR_epi.nii.gz -nopadWARP' +-source sub-01_dir-LR_epi.nii.gz -nopadWARP' >>> res = warp.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dQwarp Documentation. + `__ + """ input_spec = QwarpPlusMinusInputSpec diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index 61287b934e..a6515987e0 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -1,14 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" -AFNI utility interfaces. - -Examples --------- -See the docstrings of the individual classes for examples. - -""" +"""AFNI utility interfaces.""" import os import os.path as op import re @@ -76,8 +69,7 @@ class ABoverlap(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> aboverlap = afni.ABoverlap() >>> aboverlap.inputs.in_file_a = 'functional.nii' @@ -149,8 +141,7 @@ class AFNItoNIFTI(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> a2n = afni.AFNItoNIFTI() >>> a2n.inputs.in_file = 'afni_output.3D' @@ -217,8 +208,7 @@ class Autobox(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> abox = afni.Autobox() >>> abox.inputs.in_file = 'structural.nii' @@ -277,7 +267,7 @@ class BrickStatInputSpec(CommandLineInputSpec): traits.Float, traits.Float, desc="p0 ps p1 write the percentile values starting " - "at p0% and ending at p1% at a step of ps%. " + "at p0\\% and ending at p1\\% at a step of ps%. " "only one sub-brick is accepted.", argstr="-percentile %.3f %.3f %.3f", ) @@ -295,8 +285,7 @@ class BrickStat(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> brickstat = afni.BrickStat() >>> brickstat.inputs.in_file = 'functional.nii' @@ -350,46 +339,46 @@ class BucketInputSpec(AFNICommandInputSpec): position=-1, mandatory=True, argstr="%s", - desc="List of tuples of input datasets and subbrick selection strings" - "as described in more detail in the following afni help string" - "Input dataset specified using one of these forms:" - " 'prefix+view', 'prefix+view.HEAD', or 'prefix+view.BRIK'." - "You can also add a sub-brick selection list after the end of the" - "dataset name. This allows only a subset of the sub-bricks to be" - "included into the output (by default, all of the input dataset" - "is copied into the output). A sub-brick selection list looks like" - "one of the following forms:" - " fred+orig[5] ==> use only sub-brick #5" - " fred+orig[5,9,17] ==> use #5, #9, and #17" - " fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8" - " fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13" - "Sub-brick indexes start at 0. You can use the character '$'" - "to indicate the last sub-brick in a dataset; for example, you" - "can select every third sub-brick by using the selection list" - " fred+orig[0..$(3)]" - "N.B.: The sub-bricks are output in the order specified, which may" - " not be the order in the original datasets. For example, using" - " fred+orig[0..$(2),1..$(2)]" - " will cause the sub-bricks in fred+orig to be output into the" - " new dataset in an interleaved fashion. Using" - " fred+orig[$..0]" - " will reverse the order of the sub-bricks in the output." - "N.B.: Bucket datasets have multiple sub-bricks, but do NOT have" - " a time dimension. You can input sub-bricks from a 3D+time dataset" - " into a bucket dataset. You can use the '3dinfo' program to see" - " how many sub-bricks a 3D+time or a bucket dataset contains." - "N.B.: In non-bucket functional datasets (like the 'fico' datasets" - " output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick" - " [0] is the 'intensity' and sub-brick [1] is the statistical parameter" - " used as a threshold. Thus, to create a bucket dataset using the" - " intensity from dataset A and the threshold from dataset B, and" - " calling the output dataset C, you would type" - " 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1]'" - "WARNING: using this program, it is possible to create a dataset that" - " has different basic datum types for different sub-bricks" - " (e.g., shorts for brick 0, floats for brick 1)." - " Do NOT do this! Very few AFNI programs will work correctly" - " with such datasets!", + desc="""\ +List of tuples of input datasets and subbrick selection strings +as described in more detail in the following afni help string +Input dataset specified using one of these forms: +``prefix+view``, ``prefix+view.HEAD``, or ``prefix+view.BRIK``. +You can also add a sub-brick selection list after the end of the +dataset name. This allows only a subset of the sub-bricks to be +included into the output (by default, all of the input dataset +is copied into the output). A sub-brick selection list looks like +one of the following forms:: + + fred+orig[5] ==> use only sub-brick #5 + fred+orig[5,9,17] ==> use #5, #9, and #17 + fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8 + fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13 + +Sub-brick indexes start at 0. You can use the character '$' +to indicate the last sub-brick in a dataset; for example, you +can select every third sub-brick by using the selection list +``fred+orig[0..$(3)]`` +N.B.: The sub-bricks are output in the order specified, which may +not be the order in the original datasets. For example, using +``fred+orig[0..$(2),1..$(2)]`` +will cause the sub-bricks in fred+orig to be output into the +new dataset in an interleaved fashion. Using ``fred+orig[$..0]`` +will reverse the order of the sub-bricks in the output. +N.B.: Bucket datasets have multiple sub-bricks, but do NOT have +a time dimension. You can input sub-bricks from a 3D+time dataset +into a bucket dataset. You can use the '3dinfo' program to see +how many sub-bricks a 3D+time or a bucket dataset contains. +N.B.: In non-bucket functional datasets (like the 'fico' datasets +output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick +``[0]`` is the 'intensity' and sub-brick [1] is the statistical parameter +used as a threshold. Thus, to create a bucket dataset using the +intensity from dataset A and the threshold from dataset B, and +calling the output dataset C, you would type:: + + 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1] + +""", ) out_file = File(argstr="-prefix %s", name_template="buck") @@ -398,12 +387,16 @@ class Bucket(AFNICommand): """Concatenate sub-bricks from input datasets into one big 'bucket' dataset. - For complete details, see the `3dbucket Documentation. - `_ + .. danger:: - Examples - ======== + Using this program, it is possible to create a dataset that + has different basic datum types for different sub-bricks + (e.g., shorts for brick 0, floats for brick 1). + Do NOT do this! Very few AFNI programs will work correctly + with such datasets! + Examples + -------- >>> from nipype.interfaces import afni >>> bucket = afni.Bucket() >>> bucket.inputs.in_file = [('functional.nii',"{2..$}"), ('functional.nii',"{1}")] @@ -412,6 +405,11 @@ class Bucket(AFNICommand): "3dbucket -prefix vr_base functional.nii'{2..$}' functional.nii'{1}'" >>> res = bucket.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dbucket Documentation. + `__. + """ _cmd = "3dbucket" @@ -459,8 +457,7 @@ class Calc(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> calc = afni.Calc() >>> calc.inputs.in_file_a = 'functional.nii' @@ -531,8 +528,7 @@ class CatInputSpec(AFNICommandInputSpec): "fint", "cint", argstr="-form %s", - desc="specify data type for output. Valid types are 'int', " - "'nice', 'double', 'fint', and 'cint'.", + desc="specify data type for output.", xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], ) stack = traits.Bool( @@ -578,8 +574,7 @@ class Cat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cat1d = afni.Cat() >>> cat1d.inputs.sel = "'[0,2]'" @@ -642,8 +637,7 @@ class CatMatvec(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cmv = afni.CatMatvec() >>> cmv.inputs.in_file = [('structural.BRIK::WARP_DATA','I')] @@ -738,8 +732,7 @@ class CenterMass(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cm = afni.CenterMass() >>> cm.inputs.in_file = 'structural.nii' @@ -748,6 +741,7 @@ class CenterMass(AFNICommandBase): >>> cm.cmdline '3dCM -roi_vals 2 10 structural.nii > cm.txt' >>> res = 3dcm.run() # doctest: +SKIP + """ _cmd = "3dCM" @@ -806,8 +800,7 @@ class ConvertDset(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> convertdset = afni.ConvertDset() >>> convertdset.inputs.in_file = 'lh.pial_converted.gii' @@ -816,6 +809,7 @@ class ConvertDset(AFNICommandBase): >>> convertdset.cmdline 'ConvertDset -o_niml_asc -input lh.pial_converted.gii -prefix lh.pial_converted.niml.dset' >>> res = convertdset.run() # doctest: +SKIP + """ _cmd = "ConvertDset" @@ -852,11 +846,10 @@ class Copy(AFNICommand): or different type using 3dcopy command For complete details, see the `3dcopy Documentation. - `_ + `__ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> copy3d = afni.Copy() >>> copy3d.inputs.in_file = 'functional.nii' @@ -914,7 +907,7 @@ class DotInputSpec(AFNICommandInputSpec): ) dodot = traits.Bool(desc="Return the dot product (unscaled).", argstr="-dodot") docoef = traits.Bool( - desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b*dset1", + desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b\\*dset1", argstr="-docoef", ) dosums = traits.Bool( @@ -943,7 +936,11 @@ class Dot(AFNICommand): """Correlation coefficient between sub-brick pairs. All datasets in in_files list will be concatenated. You can use sub-brick selectors in the file specification. - Note: This program is not efficient when more than two subbricks are input. + + .. warning:: + + This program is not efficient when more than two subbricks are input. + For complete details, see the `3ddot Documentation. `_ @@ -1015,36 +1012,13 @@ class Edge3InputSpec(AFNICommandInputSpec): class Edge3(AFNICommand): """Does 3D Edge detection using the library 3DEdge - by Gregoire Malandain (gregoire.malandain@sophia.inria.fr). + by Gregoire Malandain. For complete details, see the `3dedge3 Documentation. `_ - references_ = [{'entry': BibTeX('@article{Deriche1987,' - 'author={R. Deriche},' - 'title={Optimal edge detection using recursive filtering},' - 'journal={International Journal of Computer Vision},' - 'volume={2},', - 'pages={167-187},' - 'year={1987},' - '}'), - 'tags': ['method'], - }, - {'entry': BibTeX('@article{MongaDericheMalandainCocquerez1991,' - 'author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez},' - 'title={Recursive filtering and edge tracking: two primary tools for 3D edge detection},' - 'journal={Image and vision computing},' - 'volume={9},', - 'pages={203-214},' - 'year={1991},' - '}'), - 'tags': ['method'], - }, - ] - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> edge3 = afni.Edge3() >>> edge3.inputs.in_file = 'functional.nii' @@ -1059,6 +1033,25 @@ class Edge3(AFNICommand): _cmd = "3dedge3" input_spec = Edge3InputSpec output_spec = AFNICommandOutputSpec + references_ = [ + {'entry': BibTeX("""\ +@article{Deriche1987, +author={R. Deriche}, +title={Optimal edge detection using recursive filtering}, +journal={International Journal of Computer Vision}, +volume={2},' +pages={167-187}, +year={1987}, +}"""), 'tags': ['method']}, + {'entry': BibTeX("""\ +@article{MongaDericheMalandainCocquerez1991, + author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez}, + title={Recursive filtering and edge tracking: two primary tools for 3D edge detection}, + journal={Image and vision computing}, + volume={9},' + pages={203-214}, + year={1991}, +}"""), 'tags': ['method']}] class EvalInputSpec(AFNICommandInputSpec): @@ -1097,8 +1090,7 @@ class Eval(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> eval = afni.Eval() >>> eval.inputs.in_file_a = 'seed.1D' @@ -1252,17 +1244,6 @@ class FWHMx(AFNICommandBase): For complete details, see the `3dFWHMx Documentation. `_ - Examples - -------- - - >>> from nipype.interfaces import afni - >>> fwhm = afni.FWHMx() - >>> fwhm.inputs.in_file = 'functional.nii' - >>> fwhm.cmdline - '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' - >>> res = fwhm.run() # doctest: +SKIP - - (Classic) METHOD: * Calculate ratio of variance of first differences to data variance. @@ -1296,14 +1277,12 @@ class FWHMx(AFNICommandBase): 3dClustSim has also been modified to use the ACF model given above to generate noise random fields. - .. note:: TL;DR or summary The take-awaymessage is that the 'classic' 3dFWHMx and 3dClustSim analysis, using a pure Gaussian ACF, is not very correct for FMRI data -- I cannot speak for PET or MEG data. - .. warning:: Do NOT use 3dFWHMx on the statistical results (e.g., '-bucket') from @@ -1311,7 +1290,6 @@ class FWHMx(AFNICommandBase): the smoothness of the time series NOISE, not of the statistics. This proscription is especially true if you plan to use 3dClustSim next!! - .. note:: Recommendations * For FMRI statistical purposes, you DO NOT want the FWHM to reflect @@ -1327,7 +1305,6 @@ class FWHMx(AFNICommandBase): * If you do not use '-detrend', the program attempts to find non-zero spatial structure in the input, and will print a warning message if it is detected. - .. note:: Notes on -demend * I recommend this option, and it is not the default only for historical @@ -1340,6 +1317,14 @@ class FWHMx(AFNICommandBase): structure in the image will bias the estimation of the FWHM of the image time series NOISE (which is usually the point of using 3dFWHMx). + Examples + -------- + >>> from nipype.interfaces import afni + >>> fwhm = afni.FWHMx() + >>> fwhm.inputs.in_file = 'functional.nii' + >>> fwhm.cmdline + '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' + >>> res = fwhm.run() # doctest: +SKIP """ @@ -1371,21 +1356,19 @@ def _parse_inputs(self, skip=None): def _format_arg(self, name, trait_spec, value): if name == "detrend": - if isinstance(value, bool): - if value: - return trait_spec.argstr - else: - return None + if value is True: + return trait_spec.argstr + elif value is False: + return None elif isinstance(value, int): return trait_spec.argstr + " %d" % value if name == "acf": - if isinstance(value, bool): - if value: - return trait_spec.argstr - else: - self._acf = False - return None + if value is True: + return trait_spec.argstr + elif value is False: + self._acf = False + return None elif isinstance(value, tuple): return trait_spec.argstr + " %s %f" % value elif isinstance(value, (str, bytes)): @@ -1471,30 +1454,33 @@ class LocalBistatInputSpec(AFNICommandInputSpec): stat = InputMultiPath( traits.Enum(_stat_names), mandatory=True, - desc="statistics to compute. Possible names are :" - " * pearson = Pearson correlation coefficient" - " * spearman = Spearman correlation coefficient" - " * quadrant = Quadrant correlation coefficient" - " * mutinfo = Mutual Information" - " * normuti = Normalized Mutual Information" - " * jointent = Joint entropy" - " * hellinger= Hellinger metric" - " * crU = Correlation ratio (Unsymmetric)" - " * crM = Correlation ratio (symmetrized by Multiplication)" - " * crA = Correlation ratio (symmetrized by Addition)" - " * L2slope = slope of least-squares (L2) linear regression of " - " the data from dataset1 vs. the dataset2 " - " (i.e., d2 = a + b*d1 ==> this is 'b')" - " * L1slope = slope of least-absolute-sum (L1) linear " - " regression of the data from dataset1 vs. " - " the dataset2" - " * num = number of the values in the region: " - " with the use of -mask or -automask, " - " the size of the region around any given " - " voxel will vary; this option lets you " - " map that size." - " * ALL = all of the above, in that order" - "More than one option can be used.", + desc="""\ +Statistics to compute. Possible names are: + + * pearson = Pearson correlation coefficient + * spearman = Spearman correlation coefficient + * quadrant = Quadrant correlation coefficient + * mutinfo = Mutual Information + * normuti = Normalized Mutual Information + * jointent = Joint entropy + * hellinger= Hellinger metric + * crU = Correlation ratio (Unsymmetric) + * crM = Correlation ratio (symmetrized by Multiplication) + * crA = Correlation ratio (symmetrized by Addition) + * L2slope = slope of least-squares (L2) linear regression of + the data from dataset1 vs. the dataset2 + (i.e., d2 = a + b*d1 ==> this is 'b') + * L1slope = slope of least-absolute-sum (L1) linear + regression of the data from dataset1 vs. + the dataset2 + * num = number of the values in the region: + with the use of -mask or -automask, + the size of the region around any given + voxel will vary; this option lets you + map that size. + * ALL = all of the above, in that order + +More than one option can be used.""", argstr="-stat %s...", ) mask_file = File( @@ -1534,8 +1520,7 @@ class LocalBistat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> bistat = afni.LocalBistat() >>> bistat.inputs.in_file1 = 'functional.nii' @@ -1608,48 +1593,51 @@ class LocalstatInputSpec(AFNICommandInputSpec): ), ), mandatory=True, - desc="statistics to compute. Possible names are :\n" - " * mean = average of the values\n" - " * stdev = standard deviation\n" - " * var = variance (stdev*stdev)\n" - " * cvar = coefficient of variation = stdev/fabs(mean)\n" - " * median = median of the values\n" - " * MAD = median absolute deviation\n" - " * min = minimum\n" - " * max = maximum\n" - " * absmax = maximum of the absolute values\n" - " * num = number of the values in the region:\n" - " with the use of -mask or -automask," - " the size of the region around any given" - " voxel will vary; this option lets you" - " map that size. It may be useful if you" - " plan to compute a t-statistic (say) from" - " the mean and stdev outputs.\n" - " * sum = sum of the values in the region\n" - " * FWHM = compute (like 3dFWHM) image smoothness" - " inside each voxel's neighborhood. Results" - " are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz." - " Places where an output is -1 are locations" - " where the FWHM value could not be computed" - " (e.g., outside the mask).\n" - " * FWHMbar= Compute just the average of the 3 FWHM values" - " (normally would NOT do this with FWHM also).\n" - " * perc:P0:P1:Pstep = \n" - " Compute percentiles between P0 and P1 with a " - " step of Pstep.\n" - " Default P1 is equal to P0 and default P2 = 1\n" - " * rank = rank of the voxel's intensity\n" - " * frank = rank / number of voxels in neighborhood\n" - " * P2skew = Pearson's second skewness coefficient" - " 3 * (mean - median) / stdev\n" - " * ALL = all of the above, in that order " - " (except for FWHMbar and perc).\n" - " * mMP2s = Exactly the same output as:" - " median, MAD, P2skew," - " but a little faster\n" - " * mmMP2s = Exactly the same output as:" - " mean, median, MAD, P2skew\n" - "More than one option can be used.", + desc="""\ +statistics to compute. Possible names are: + + * mean = average of the values + * stdev = standard deviation + * var = variance (stdev\\*stdev) + * cvar = coefficient of variation = stdev/fabs(mean) + * median = median of the values + * MAD = median absolute deviation + * min = minimum + * max = maximum + * absmax = maximum of the absolute values + * num = number of the values in the region: + with the use of -mask or -automask, + the size of the region around any given + voxel will vary; this option lets you + map that size. It may be useful if you + plan to compute a t-statistic (say) from + the mean and stdev outputs. + * sum = sum of the values in the region + * FWHM = compute (like 3dFWHM) image smoothness + inside each voxel's neighborhood. Results + are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz. + Places where an output is -1 are locations + where the FWHM value could not be computed + (e.g., outside the mask). + * FWHMbar= Compute just the average of the 3 FWHM values + (normally would NOT do this with FWHM also). + * perc:P0:P1:Pstep = + Compute percentiles between P0 and P1 with a + step of Pstep. + Default P1 is equal to P0 and default P2 = 1 + * rank = rank of the voxel's intensity + * frank = rank / number of voxels in neighborhood + * P2skew = Pearson's second skewness coefficient + 3 \\* (mean - median) / stdev + * ALL = all of the above, in that order + (except for FWHMbar and perc). + * mMP2s = Exactly the same output as: + median, MAD, P2skew, + but a little faster + * mmMP2s = Exactly the same output as: + mean, median, MAD, P2skew + +More than one option can be used.""", argstr="-stat %s...", ) mask_file = File( @@ -1664,12 +1652,12 @@ class LocalstatInputSpec(AFNICommandInputSpec): desc="Compute the mask as in program 3dAutomask.", argstr="-automask" ) nonmask = traits.Bool( - desc="Voxels not in the mask WILL have their local statistics " - "computed from all voxels in their neighborhood that ARE in " - "the mask.\n" - " * For instance, this option can be used to compute the " - " average local white matter time series, even at non-WM " - " voxels.", + desc="""\ +Voxels not in the mask WILL have their local statistics +computed from all voxels in their neighborhood that ARE in +the mask. For instance, this option can be used to compute the +average local white matter time series, even at non-WM +voxels.""", argstr="-use_nonmask", ) reduce_grid = traits.Either( @@ -1737,8 +1725,7 @@ class Localstat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> localstat = afni.Localstat() >>> localstat.inputs.in_file = 'functional.nii' @@ -1796,8 +1783,7 @@ class MaskToolInputSpec(AFNICommandInputSpec): "short", "float", argstr="-datum %s", - desc="specify data type for output. Valid types are 'byte', " - "'short' and 'float'.", + desc="specify data type for output.", ) dilate_inputs = Str( desc="Use this option to dilate and/or erode datasets as they are " @@ -1843,8 +1829,7 @@ class MaskTool(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> masktool = afni.MaskTool() >>> masktool.inputs.in_file = 'functional.nii' @@ -1889,8 +1874,7 @@ class Merge(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> merge = afni.Merge() >>> merge.inputs.in_files = ['functional.nii', 'functional2.nii'] @@ -1938,8 +1922,7 @@ class Notes(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> notes = afni.Notes() >>> notes.inputs.in_file = 'functional.HEAD' @@ -1948,6 +1931,7 @@ class Notes(CommandLine): >>> notes.cmdline '3dNotes -a "This note is added." -h "This note is added to history." functional.HEAD' >>> res = notes.run() # doctest: +SKIP + """ _cmd = "3dNotes" @@ -2000,8 +1984,7 @@ class NwarpAdjust(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> adjust = afni.NwarpAdjust() >>> adjust.inputs.warps = ['func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz'] @@ -2117,8 +2100,7 @@ class NwarpApply(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> nwarp = afni.NwarpApply() >>> nwarp.inputs.in_file = 'Fred+orig' @@ -2215,8 +2197,7 @@ class NwarpCat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> nwarpcat = afni.NwarpCat() >>> nwarpcat.inputs.in_files = ['Q25_warp+tlrc.HEAD', ('IDENT', 'structural.nii')] @@ -2450,8 +2431,7 @@ class Refit(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> refit = afni.Refit() >>> refit.inputs.in_file = 'structural.nii' @@ -2466,6 +2446,7 @@ class Refit(AFNICommandBase): >>> refit_2.cmdline "3drefit -atrfloat IJK_TO_DICOM_REAL '1 0.2 0 0 -0.2 1 0 0 0 0 1 0' structural.nii" >>> res = refit_2.run() # doctest: +SKIP + """ _cmd = "3drefit" @@ -2510,29 +2491,32 @@ class ReHoInputSpec(CommandLineInputSpec): "vertices", xor=["sphere", "ellipsoid"], argstr="-nneigh %s", - desc="voxels in neighborhood. can be: " - "* faces (for voxel and 6 facewise neighbors, only),\n" - "* edges (for voxel and 18 face- and edge-wise neighbors),\n" - "* vertices (for voxel and 26 face-, edge-, and node-wise " - "neighbors).\n", + desc=""" +voxels in neighborhood. can be: +``faces`` (for voxel and 6 facewise neighbors, only), +``edges`` (for voxel and 18 face- and edge-wise neighbors), +``vertices`` (for voxel and 26 face-, edge-, and node-wise neighbors).""", ) sphere = traits.Float( argstr="-neigh_RAD %s", xor=["neighborhood", "ellipsoid"], - desc="for additional voxelwise neighborhood control, the " - "radius R of a desired neighborhood can be put in; R is " - "a floating point number, and must be >1. Examples of " - "the numbers of voxels in a given radius are as follows " - "(you can roughly approximate with the ol' 4*PI*(R^3)/3 " - "thing):\n" - " R=2.0 -> V=33,\n" - " R=2.3 -> V=57, \n" - " R=2.9 -> V=93, \n" - " R=3.1 -> V=123, \n" - " R=3.9 -> V=251, \n" - " R=4.5 -> V=389, \n" - " R=6.1 -> V=949, \n" - "but you can choose most any value.", + desc=r"""\ +For additional voxelwise neighborhood control, the +radius R of a desired neighborhood can be put in; R is +a floating point number, and must be >1. Examples of +the numbers of voxels in a given radius are as follows +(you can roughly approximate with the ol' :math:`4\pi\,R^3/3` +thing): + + * R=2.0 -> V=33 + * R=2.3 -> V=57, + * R=2.9 -> V=93, + * R=3.1 -> V=123, + * R=3.9 -> V=251, + * R=4.5 -> V=389, + * R=6.1 -> V=949, + +but you can choose most any value.""", ) ellipsoid = traits.Tuple( traits.Float, @@ -2540,13 +2524,14 @@ class ReHoInputSpec(CommandLineInputSpec): traits.Float, xor=["sphere", "neighborhood"], argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", - desc="Tuple indicating the x, y, and z radius of an ellipsoid " - "defining the neighbourhood of each voxel.\n" - "The 'hood is then made according to the following relation:" - "(i/A)^2 + (j/B)^2 + (k/C)^2 <=1.\n" - "which will have approx. V=4*PI*A*B*C/3. The impetus for " - "this freedom was for use with data having anisotropic " - "voxel edge lengths.", + desc=r"""\ +Tuple indicating the x, y, and z radius of an ellipsoid +defining the neighbourhood of each voxel. +The 'hood is then made according to the following relation: +:math:`(i/A)^2 + (j/B)^2 + (k/C)^2 \le 1.` +which will have approx. :math:`V=4 \pi \, A B C/3`. The impetus for +this freedom was for use with data having anisotropic +voxel edge lengths.""", ) label_set = File( exists=True, @@ -2572,8 +2557,7 @@ class ReHo(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> reho = afni.ReHo() >>> reho.inputs.in_file = 'functional.nii' @@ -2648,8 +2632,7 @@ class Resample(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> resample = afni.Resample() >>> resample.inputs.in_file = 'functional.nii' @@ -2708,8 +2691,7 @@ class TCat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcat = afni.TCat() >>> tcat.inputs.in_files = ['functional.nii', 'functional2.nii'] @@ -2760,8 +2742,7 @@ class TCatSubBrick(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcsb = afni.TCatSubBrick() >>> tcsb.inputs.in_files = [('functional.nii', "'{2..$}'"), ('functional2.nii', "'{2..$}'")] @@ -2808,8 +2789,7 @@ class TStat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tstat = afni.TStat() >>> tstat.inputs.in_file = 'functional.nii' @@ -2890,8 +2870,7 @@ class To3D(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> to3d = afni.To3D() >>> to3d.inputs.datatype = 'float' @@ -3005,8 +2984,7 @@ class Undump(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> unndump = afni.Undump() >>> unndump.inputs.in_file = 'structural.nii' @@ -3068,7 +3046,7 @@ class UnifizeInputSpec(AFNICommandInputSpec): argstr="-noduplo", ) epi = traits.Bool( - desc="Assume the input dataset is a T2 (or T2*) weighted EPI time " + desc="Assume the input dataset is a T2 (or T2\\*) weighted EPI time " "series. After computing the scaling, apply it to ALL volumes " "(TRs) in the input dataset. That is, a given voxel will be " "scaled by the same factor at each TR. " @@ -3144,8 +3122,7 @@ class Unifize(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> unifize = afni.Unifize() >>> unifize.inputs.in_file = 'structural.nii' @@ -3186,8 +3163,7 @@ class ZCutUp(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zcutup = afni.ZCutUp() >>> zcutup.inputs.in_file = 'functional.nii' @@ -3243,8 +3219,7 @@ class GCOR(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> gcor = afni.GCOR() >>> gcor.inputs.in_file = 'structural.nii' @@ -3318,10 +3293,10 @@ class Axialize(AFNICommand): with the data brick oriented as axial slices. For complete details, see the `3dcopy Documentation. - `_ + `__ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> axial3d = afni.Axialize() >>> axial3d.inputs.in_file = 'functional.nii' @@ -3389,8 +3364,7 @@ class Zcat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zcat = afni.Zcat() >>> zcat.inputs.in_files = ['functional2.nii', 'functional3.nii'] @@ -3398,6 +3372,7 @@ class Zcat(AFNICommand): >>> zcat.cmdline '3dZcat -prefix cat_functional.nii functional2.nii functional3.nii' >>> res = zcat.run() # doctest: +SKIP + """ _cmd = "3dZcat" @@ -3498,11 +3473,10 @@ class Zeropad(AFNICommand): """Adds planes of zeros to a dataset (i.e., pads it out). For complete details, see the `3dZeropad Documentation. - `_ + `__ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zeropad = afni.Zeropad() >>> zeropad.inputs.in_files = 'functional.nii' @@ -3516,6 +3490,7 @@ class Zeropad(AFNICommand): >>> zeropad.cmdline '3dZeropad -A 10 -I 10 -L 10 -P 10 -R 10 -S 10 -prefix pad_functional.nii functional.nii' >>> res = zeropad.run() # doctest: +SKIP + """ _cmd = "3dZeropad" diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 6aee26655e..3e74b59924 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1653,15 +1653,18 @@ class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): "b", "br", argstr="-t %s", - desc=""" - transform type - t: translation - r: rigid - a: rigid + affine - s: rigid + affine + deformable syn (default) - sr: rigid + deformable syn - b: rigid + affine + deformable b-spline syn - br: rigid + deformable b-spline syn""", + desc="""\ +Transform type + + * t: translation + * r: rigid + * a: rigid + affine + * s: rigid + affine + deformable syn (default) + * sr: rigid + deformable syn + * b: rigid + affine + deformable b-spline syn + * br: rigid + deformable b-spline syn + +""", usedefault=True, ) diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index e3fe579844..ec83982191 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -331,7 +331,7 @@ class N4BiasFieldCorrectionInputSpec(ANTSCommandInputSpec): False, mandatory=True, usedefault=True, - desc="copy headers of the original image into the " "output (corrected) file", + desc="copy headers of the original image into the output (corrected) file", ) rescale_intensities = traits.Bool( False, @@ -545,11 +545,11 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): "antsCT_", argstr="-o %s", usedefault=True, - desc=("Prefix that is prepended to all output" " files (default = antsCT_)"), + desc=("Prefix that is prepended to all output files"), ) image_suffix = traits.Str( "nii.gz", - desc=("any of standard ITK formats," " nii.gz is default"), + desc=("any of standard ITK formats, nii.gz is default"), argstr="-s %s", usedefault=True, ) @@ -583,11 +583,11 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): ) prior_segmentation_weight = traits.Float( argstr="-w %f", - desc=("Atropos spatial prior *probability* weight for" " the segmentation"), + desc=("Atropos spatial prior *probability* weight for the segmentation"), ) segmentation_iterations = traits.Int( argstr="-n %d", - desc=("N4 -> Atropos -> N4 iterations during segmentation" " (default = 3)"), + desc=("N4 -> Atropos -> N4 iterations during segmentation (default = 3)"), ) posterior_formulation = traits.Str( argstr="-b %s", @@ -611,7 +611,7 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): 1, argstr="-u %d", desc=( - "Use random number generated from system clock in Atropos" " (default = 1)" + "Use random number generated from system clock in Atropos (default = 1)" ), ) b_spline_smoothing = traits.Bool( @@ -856,7 +856,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): argstr="-o %s", usedefault=True, desc=( - "Prefix that is prepended to all output" " files (default = highress001_)" + "Prefix that is prepended to all output files" ), ) @@ -871,7 +871,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): ) image_suffix = traits.Str( "nii.gz", - desc=("any of standard ITK formats," " nii.gz is default"), + desc=("any of standard ITK formats, nii.gz is default"), argstr="-s %s", usedefault=True, ) @@ -880,7 +880,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): 1, argstr="-u %d", desc=( - "Use random number generated from system clock in Atropos" " (default = 1)" + "Use random number generated from system clock in Atropos (default = 1)" ), ) keep_temporary_files = traits.Int( @@ -932,6 +932,8 @@ class BrainExtractionOutputSpec(TraitedSpec): class BrainExtraction(ANTSCommand): """ + Atlas-based brain extraction. + Examples -------- >>> from nipype.interfaces.ants.segmentation import BrainExtraction @@ -941,8 +943,8 @@ class BrainExtraction(ANTSCommand): >>> brainextraction.inputs.brain_template = 'study_template.nii.gz' >>> brainextraction.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' >>> brainextraction.cmdline - 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ --s nii.gz -o highres001_' + 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 -s nii.gz -o highres001_' + """ input_spec = BrainExtractionInputSpec @@ -1121,23 +1123,23 @@ class JointFusionInputSpec(ANTSCommandInputSpec): desc="Warped atlas segmentations", ) method = traits.Str( - default="Joint", + "Joint", argstr="-m %s", usedefault=True, desc=( "Select voting method. Options: Joint (Joint" - " Label Fusion). May be followed by optional" - " parameters in brackets, e.g., -m Joint[0.1,2]" + "Label Fusion). May be followed by optional" + "parameters in brackets, e.g., -m Joint[0.1,2]" ), ) alpha = traits.Float( - default=0.1, + 0.1, usedefault=True, requires=["method"], desc=("Regularization term added to matrix Mx for inverse"), ) beta = traits.Int( - default=2, + 2, usedefault=True, requires=["method"], desc=("Exponent for mapping intensity difference to joint error"), @@ -1155,7 +1157,7 @@ class JointFusionInputSpec(ANTSCommandInputSpec): maxlen=3, argstr="-rp %s", desc=( - "Patch radius for similarity measures, " "scalar or vector. Default: 2x2x2" + "Patch radius for similarity measures, scalar or vector. Default: 2x2x2" ), ) search_radius = traits.ListInt( @@ -1181,9 +1183,10 @@ class JointFusionOutputSpec(TraitedSpec): class JointFusion(ANTSCommand): """ + Segmentation fusion tool. + Examples -------- - >>> from nipype.interfaces.ants import JointFusion >>> at = JointFusion() >>> at.inputs.dimension = 3 @@ -1198,8 +1201,7 @@ class JointFusion(ANTSCommand): ... 'segmentation1.nii.gz'] >>> at.inputs.target_image = 'T1.nii' >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz \ --l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' >>> at.inputs.method = 'Joint' >>> at.inputs.alpha = 0.5 @@ -1207,8 +1209,8 @@ class JointFusion(ANTSCommand): >>> at.inputs.patch_radius = [3,2,1] >>> at.inputs.search_radius = [1,2,3] >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii \ --l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + """ input_spec = JointFusionInputSpec @@ -1334,6 +1336,7 @@ class DenoiseImage(ANTSCommand): >>> denoise_3.inputs.save_noise = True >>> denoise_3.cmdline 'DenoiseImage -i im1.nii -n Gaussian -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' + """ input_spec = DenoiseImageInputSpec @@ -1433,7 +1436,7 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): minlen=3, maxlen=3, argstr="-p %s", - desc=("Patch radius for similarity measures." "Default: 2x2x2"), + desc=("Patch radius for similarity measures. Default: 2x2x2"), ) patch_metric = traits.Enum( "PC", @@ -1485,7 +1488,7 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): out_label_post_prob_name_format = traits.Str( "antsJointFusionPosterior_%d.nii.gz", requires=["out_label_fusion", "out_intensity_fusion_name_format"], - desc="Optional label posterior probability " "image file name format.", + desc="Optional label posterior probability image file name format.", ) out_atlas_voting_weight_name_format = traits.Str( "antsJointFusionVotingWeight_%d.nii.gz", @@ -1494,7 +1497,7 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): "out_intensity_fusion_name_format", "out_label_post_prob_name_format", ], - desc="Optional atlas voting weight image " "file name format.", + desc="Optional atlas voting weight image file name format.", ) verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) @@ -1510,7 +1513,6 @@ class AntsJointFusion(ANTSCommand): """ Examples -------- - >>> from nipype.interfaces.ants import AntsJointFusion >>> antsjointfusion = AntsJointFusion() >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' @@ -1518,22 +1520,18 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] >>> antsjointfusion.inputs.target_image = ['im1.nii'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ --b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" >>> antsjointfusion.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ --b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'], ... ['rc2s1.nii','rc2s2.nii'] ] >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', ... 'segmentation1.nii.gz'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii \ --s 3x3x3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.dimension = 3 >>> antsjointfusion.inputs.alpha = 0.5 @@ -1541,29 +1539,21 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.patch_radius = [3,2,1] >>> antsjointfusion.inputs.search_radius = [3] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii \ --p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii -p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.search_radius = ['mask.nii'] >>> antsjointfusion.inputs.verbose = True >>> antsjointfusion.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] >>> antsjointfusion.inputs.exclusion_image_label = ['1','2'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ --o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' >>> antsjointfusion.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' >>> antsjointfusion.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' >>> antsjointfusion.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ --o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, \ -ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] \ --p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" """ @@ -1678,8 +1668,8 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): exists=True, argstr='--segmentation-image "%s"', mandatory=True, - desc="A segmentation image must be supplied labeling the gray and white matters." - " Default values = 2 and 3, respectively.", + desc="A segmentation image must be supplied labeling the gray and white matters. " + "Default values = 2 and 3, respectively.", ) gray_matter_label = traits.Int( @@ -1711,7 +1701,7 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): ) convergence = traits.Str( - default="[50,0.001,10]", + "[50,0.001,10]", argstr='--convergence "%s"', usedefault=True, desc="Convergence is determined by fitting a line to the normalized energy profile of" @@ -1816,12 +1806,7 @@ class KellyKapowski(ANTSCommand): >>> kk.inputs.convergence = "[45,0.0,10]" >>> kk.inputs.thickness_prior_estimate = 10 >>> kk.cmdline - 'KellyKapowski --convergence "[45,0.0,10]" \ ---output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" \ ---image-dimensionality 3 --gradient-step 0.025000 \ ---maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 \ ---segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 \ ---smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' + 'KellyKapowski --convergence "[45,0.0,10]" --output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" --image-dimensionality 3 --gradient-step 0.025000 --maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 --segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 --smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' """ @@ -1831,20 +1816,19 @@ class KellyKapowski(ANTSCommand): references_ = [ { - "entry": BibTeX( - "@book{Das2009867," - "author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}," - "title={Registration based cortical thickness measurement.}," - "journal={NeuroImage}," - "volume={45}," - "number={37}," - "pages={867--879}," - "year={2009}," - "issn={1053-8119}," - "url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}," - "doi={https://doi.org/10.1016/j.neuroimage.2008.12.016}" - "}" - ), + "entry": BibTeX("""\ +@book{Das2009867, + author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}, + title={Registration based cortical thickness measurement.}, + journal={NeuroImage}, + volume={45}, + number={37}, + pages={867--879}, + year={2009}, + issn={1053-8119}, + url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}, + doi={https://doi.org/10.1016/j.neuroimage.2008.12.016} +}"""), "description": "The details on the implementation of DiReCT.", "tags": ["implementation"], } diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 3e3c75be50..a08cfb1764 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -31,14 +31,11 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): mask_image = File( "none", argstr="%s", exists=True, desc="mask image", position=3, usedefault=True ) - colormap = traits.Str( + colormap = traits.Enum( + "grey", "red", "green", "blue", "copper", "jet", "hsv", "spring", "summer", + "autumn", "winter", "hot", "cool", "overunder", "custom", argstr="%s", - usedefault=True, - desc=( - "Possible colormaps: grey, red, green, " - "blue, copper, jet, hsv, spring, summer, " - "autumn, winter, hot, cool, overunder, custom " - ), + desc="Select a colormap", mandatory=True, position=4, ) @@ -52,10 +49,10 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): argstr="%d", desc="maximum input", mandatory=True, position=7 ) minimum_RGB_output = traits.Int( - 0, usedefault=True, argstr="%d", desc="", position=8 + 0, usedefault=True, argstr="%d", position=8 ) maximum_RGB_output = traits.Int( - 255, usedefault=True, argstr="%d", desc="", position=9 + 255, usedefault=True, argstr="%d", position=9 ) @@ -65,6 +62,8 @@ class ConvertScalarImageToRGBOutputSpec(TraitedSpec): class ConvertScalarImageToRGB(ANTSCommand): """ + Convert scalar images to RGB. + Examples -------- >>> from nipype.interfaces.ants.visualization import ConvertScalarImageToRGB @@ -76,6 +75,7 @@ class ConvertScalarImageToRGB(ANTSCommand): >>> converter.inputs.maximum_input = 6 >>> converter.cmdline 'ConvertScalarImageToRGB 3 T1.nii.gz rgb.nii.gz none jet none 0 6 0 255' + """ _cmd = "ConvertScalarImageToRGB" diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index fd4c701fff..a0e7325580 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -128,10 +128,7 @@ def _get_filecopy_info(cls): class BaseInterface(Interface): - """Implements common interface functionality. - - Implements - ---------- + """Implement common interface functionality. * Initializes inputs/outputs from input_spec/output_spec * Provides help based on input_spec and output_spec @@ -144,18 +141,19 @@ class BaseInterface(Interface): This class cannot be instantiated. - - Relevant Interface attributes - ----------------------------- - - ``input_spec`` points to the traited class for the inputs - ``output_spec`` points to the traited class for the outputs - ``_redirect_x`` should be set to ``True`` when the interface requires - connecting to a ``$DISPLAY`` (default is ``False``). - ``resource_monitor`` if ``False`` prevents resource-monitoring this - interface, if ``True`` monitoring will be enabled IFF the general - Nipype config is set on (``resource_monitor = true``). - + Attributes + ---------- + input_spec: HasTraits + points to the traited class for the inputs + output_spec: HasTraits + points to the traited class for the outputs + _redirect_x: bool + should be set to ``True`` when the interface requires + connecting to a ``$DISPLAY`` (default is ``False``). + resource_monitor: bool + If ``False``, prevents resource-monitoring this interface + If ``True`` monitoring will be enabled IFF the general + Nipype config is set on (``resource_monitor = true``). """ @@ -323,14 +321,15 @@ def run(self, cwd=None, ignore_exception=None, **inputs): Parameters ---------- - cwd : specify a folder where the interface should be run inputs : allows the interface settings to be updated Returns ------- - results : an InterfaceResult object containing a copy of the instance - that was executed, provenance information and, if successful, results + results : :obj:`InterfaceResult` + A copy of the instance that was executed, provenance information and, + if successful, results + """ from ...utils.profiler import ResourceMonitor @@ -577,7 +576,6 @@ class SimpleInterface(BaseInterface): Examples -------- - >>> from nipype.interfaces.base import ( ... SimpleInterface, BaseInterfaceInputSpec, TraitedSpec) @@ -602,6 +600,7 @@ class SimpleInterface(BaseInterface): >>> dbl.inputs.x = 2 >>> dbl.run().outputs.doubled 4.0 + """ def __init__(self, from_file=None, resource_monitor=None, **inputs): @@ -620,14 +619,11 @@ class must be instantiated with a command argument Parameters ---------- - - command : string + command : str define base immutable `command` you wish to run - - args : string, optional + args : str, optional optional arguments passed to base `command` - Examples -------- >>> import pprint @@ -637,7 +633,7 @@ class must be instantiated with a command argument >>> cli.cmdline 'ls -al' - # Use get_traitsfree() to check all inputs set + >>> # Use get_traitsfree() to check all inputs set >>> pprint.pprint(cli.inputs.get_traitsfree()) # doctest: {'args': '-al', 'environ': {'DISPLAY': ':1'}} @@ -758,7 +754,8 @@ def _run_interface(self, runtime, correct_return_codes=(0,)): Returns ------- - runtime : updated runtime information + runtime : + updated runtime information adds stdout, stderr, merged, cmdline, dependencies, command_path """ @@ -997,6 +994,7 @@ class MpiCommandLine(CommandLine): >>> mpi_cli.inputs.n_procs = 8 >>> mpi_cli.cmdline 'mpiexec -n 8 my_mpi_prog -v' + """ input_spec = MpiCommandLineInputSpec diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 579f97def8..b42a73f501 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -30,8 +30,7 @@ from ... import config, __version__ - -FLOAT_FORMAT = "{:.10f}".format +_float_fmt = "{:.10f}".format nipype_version = Version(__version__) @@ -325,7 +324,7 @@ def _get_sorteddict( else: out = hash elif isinstance(objekt, float): - out = FLOAT_FORMAT(objekt) + out = _float_fmt(objekt) else: out = objekt return out diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index e3e1a229f6..4b01754be0 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -32,10 +32,11 @@ def __str__(self): class Bunch(object): - """Dictionary-like class that provides attribute-style access to it's items. + """ + Dictionary-like class that provides attribute-style access to it's items. - A `Bunch` is a simple container that stores it's items as class - attributes. Internally all items are stored in a dictionary and + A ``Bunch`` is a simple container that stores it's items as class + attributes [1]_. Internally all items are stored in a dictionary and the class exposes several of the dictionary methods. Examples @@ -48,10 +49,8 @@ class Bunch(object): >>> inputs Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=False) - Notes - ----- - The Bunch pattern came from the Python Cookbook: - + References + ---------- .. [1] A. Martelli, D. Hudgeon, "Collecting a Bunch of Named Items", Python Cookbook, 2nd Ed, Chapter 4.18, 2005. diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index 0d26017ea7..84177a16ad 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -1,4 +1,9 @@ # -*- coding: utf-8 -*- +"""This script provides interfaces for BrainSuite command line tools. +Please see brainsuite.org for more information. + +Author: Jason Wong +""" import os import re as regex @@ -12,12 +17,6 @@ isdefined, ) -"""This script provides interfaces for BrainSuite command line tools. -Please see brainsuite.org for more information. - -Author: Jason Wong -""" - class BseInputSpec(CommandLineInputSpec): @@ -124,7 +123,7 @@ class BfcInputSpec(CommandLineInputSpec): ) inputMaskFile = File(desc="mask file", argstr="-m %s", hash_files=False) outputMRIVolume = File( - desc="output bias-corrected MRI volume.If unspecified, output file name will be auto generated.", + desc="output bias-corrected MRI volume. If unspecified, output file name will be auto generated.", argstr="-o %s", hash_files=False, genfile=True, @@ -150,7 +149,13 @@ class BfcInputSpec(CommandLineInputSpec): histogramType = traits.Enum( "ellipse", "block", - desc="Options for type of histogram\nellipse: use ellipsoid for ROI histogram\nblock :use block for ROI histogram", + desc="""\ +Options for type of histogram: + + * ``ellipse``: use ellipsoid for ROI histogram + * ``block``:use block for ROI histogram + +""", argstr="%s", ) iterativeMode = traits.Bool( @@ -178,8 +183,14 @@ class BfcInputSpec(CommandLineInputSpec): "low", "medium", "high", - desc="Preset options for bias_model\n low: small bias model [0.95,1.05]\n" - "medium: medium bias model [0.90,1.10]\n high: high bias model [0.80,1.20]", + desc="""\ +Preset options for bias_model + + * low: small bias model [0.95,1.05] + * medium: medium bias model [0.90,1.10] + * high: high bias model [0.80,1.20] + +""", argstr="%s", ) intermediate_file_type = traits.Enum( @@ -1314,7 +1325,7 @@ class BDPInputSpec(CommandLineInputSpec): "saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, " "L3). This is the default behavior if no diffusion modeling flags are " "specified. The estimated diffusion tensors can be visualized by loading " - "the saved *.eig.nii.gz file in BrainSuite. BDP reports diffusivity (MD, " + "the saved ``*.eig.nii.gz`` file in BrainSuite. BDP reports diffusivity (MD, " "axial, radial, L2 and L3) in a unit which is reciprocal inverse of the " "unit of input b-value. ", ) diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index 4eadb98207..c91c02569c 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -"""The ants module provides basic functions for interfacing with ants - functions. -""" +"""Convert3D is a command-line tool for converting 3D images between common file formats.""" import os from glob import glob diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index a16dbd9149..0c44b4abea 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -134,8 +134,8 @@ class SFPICOCalibData(StdOutCommandLine): which stores information about the datafile, is generated along with the datafile. - Example 1 - --------- + Examples + -------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam @@ -151,8 +151,6 @@ class SFPICOCalibData(StdOutCommandLine): data produced can be varied by specifying the ranges and steps of the parameters for both the one and two fibre datasets used. - Example 2 - --------- To create a custom calibration dataset >>> import nipype.interfaces.camino as cam @@ -172,6 +170,7 @@ class SFPICOCalibData(StdOutCommandLine): simulate the one fibre cases and 72,912 voxels simulate the various two fibre cases. However, care should be taken to ensure that enough data is generated for calculating the LUT. # doctest: +SKIP + """ _cmd = "sfpicocalibdata" @@ -218,14 +217,16 @@ class SFLUTGenInputSpec(StdOutCommandLineInputSpec): "bingham", "watson", argstr="-pdf %s", - desc=( - "Sets the distribution to use for the calibration. The default is the Bingham " - "distribution, which allows elliptical probability density contours. " - "Currently supported options are: " - " bingham - The Bingham distribution, which allows elliptical probability " - " density contours. " - " watson - The Watson distribution. This distribution is rotationally symmetric." - ), + desc="""\ +Sets the distribution to use for the calibration. The default is the Bingham +distribution, which allows elliptical probability density contours. +Currently supported options are: + + * bingham -- The Bingham distribution, which allows elliptical probability + density contours. + * watson -- The Watson distribution. This distribution is rotationally symmetric. + +""", usedefault=True, ) binincsize = traits.Int( @@ -286,9 +287,9 @@ class SFLUTGen(StdOutCommandLine): This utility uses calibration data generated from SFPICOCalibData and peak information created by SFPeaks. - The utility outputs two lut's, *_oneFibreSurfaceCoeffs.Bdouble and - *_twoFibreSurfaceCoeffs.Bdouble. Each of these files contains big- - endian doubles as standard. The format of the output is: :: + The utility outputs two lut's, ``*_oneFibreSurfaceCoeffs.Bdouble`` and + ``*_twoFibreSurfaceCoeffs.Bdouble``. Each of these files contains big-endian doubles + as standard. The format of the output is:: dimensions (1 for Watson, 2 for Bingham) order (the order of the polynomial) @@ -298,12 +299,12 @@ class SFLUTGen(StdOutCommandLine): coefficient_N In the case of the Watson, there is a single set of coefficients, - which are ordered: :: + which are ordered:: constant, x, x^2, ..., x^order. In the case of the Bingham, there are two sets of coefficients (one - for each surface), ordered so that: :: + for each surface), ordered so that:: for j = 1 to order for k = 1 to order @@ -311,7 +312,7 @@ class SFLUTGen(StdOutCommandLine): where j+k < order Example - --------- + ------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam @@ -319,6 +320,7 @@ class SFLUTGen(StdOutCommandLine): >>> lutgen.inputs.in_file = 'QSH_peaks.Bdouble' >>> lutgen.inputs.info_file = 'PICO_calib.info' >>> lutgen.run() # doctest: +SKIP + """ _cmd = "sflutgen" diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 5ec7fe8c63..2b7d0ff337 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -146,8 +146,8 @@ class Conmat(CommandLine): In all cases, distance to the seed point is defined along the streamline path. - Example 1 - --------- + Examples + -------- To create a standard connectivity matrix based on streamline counts. >>> import nipype.interfaces.camino as cam @@ -156,8 +156,6 @@ class Conmat(CommandLine): >>> conmat.inputs.target_file = 'atlas.nii.gz' >>> conmat.run() # doctest: +SKIP - Example 1 - --------- To create a standard connectivity matrix and mean tractwise FA statistics. >>> import nipype.interfaces.camino as cam @@ -167,6 +165,7 @@ class Conmat(CommandLine): >>> conmat.inputs.scalar_file = 'fa.nii.gz' >>> conmat.tract_stat = 'mean' >>> conmat.run() # doctest: +SKIP + """ _cmd = "conmat" diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 6a17271bcf..7e74fe8ad6 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -357,7 +357,7 @@ def _gen_model_options(): # @NoSelf argstr="-fixedmod %s", minlen=4, maxlen=4, - desc="Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with |q|=Q and diffusion time tau. The N measurements with |q|=Q have unique directions. The program reads in the directions from the files in directory PointSets.", + desc="Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with :math:`|q|=Q` and diffusion time tau. The N measurements with :math:`|q|=Q` have unique directions. The program reads in the directions from the files in directory PointSets.", ) fixedbvalue = traits.List( diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index f152f32762..0cd8b0c49c 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -73,8 +73,8 @@ class QBallMX(StdOutCommandLine): Generates a reconstruction matrix for Q-Ball. Used in LinRecon with the same scheme file to reconstruct data. - Example 1 - --------- + Examples + -------- To create a linear transform matrix using Spherical Harmonics (sh). >>> import nipype.interfaces.camino as cam @@ -84,8 +84,6 @@ class QBallMX(StdOutCommandLine): >>> qballmx.inputs.order = 6 >>> qballmx.run() # doctest: +SKIP - Example 2 - --------- To create a linear transform matrix using Radial Basis Functions (rbf). This command uses the default setting of rbf sigma = 0.2618 (15 degrees), data smoothing sigma = 0.1309 (7.5 degrees), rbf @@ -106,6 +104,7 @@ class QBallMX(StdOutCommandLine): >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.inputs.bgmask = 'brain_mask.nii' >>> qballcoeffs.run() # doctest: +SKIP + """ _cmd = "qballmx" @@ -187,7 +186,7 @@ class LinRecon(StdOutCommandLine): are stored row by row. Example - --------- + ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). @@ -206,6 +205,7 @@ class LinRecon(StdOutCommandLine): >>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble' >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.run() # doctest: +SKIP + """ _cmd = "linrecon" @@ -236,14 +236,19 @@ class MESDInputSpec(StdOutCommandLineInputSpec): argstr="-filter %s", position=2, mandatory=True, - desc=( - "The inversion index specifies the type of inversion to perform on the data." - "The currently available choices are:" - "Inverter name | Inverter parameters" - "---------------|------------------" - "SPIKE | bd (b-value x diffusivity along the fibre.)" - "PAS | r" - ), + desc=""" +The inversion index specifies the type of inversion to perform on the data. +The currently available choices are: + + +----------------+---------------------------------------------+ + | Inverter name | Inverter parameters | + +================+=============================================+ + | SPIKE | bd (b-value x diffusivity along the fibre.) | + +----------------+---------------------------------------------+ + | PAS | r | + +----------------+---------------------------------------------+ + +""", ) inverter_param = traits.Float( argstr="%f", @@ -365,7 +370,7 @@ class MESD(StdOutCommandLine): Sweet and Alexander "Reduced Encoding Persistent Angular Structure" 572 ISMRM 2010. Example - --------- + ------- Run MESD on every voxel of the data file SubjectA.Bfloat using the PASMRI kernel. >>> import nipype.interfaces.camino as cam @@ -375,6 +380,7 @@ class MESD(StdOutCommandLine): >>> mesd.inputs.inverter = 'PAS' >>> mesd.inputs.inverter_param = 1.4 >>> mesd.run() # doctest: +SKIP + """ _cmd = "mesd" @@ -565,7 +571,7 @@ class SFPeaks(StdOutCommandLine): Example - --------- + ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). @@ -577,6 +583,7 @@ class SFPeaks(StdOutCommandLine): >>> sf_peaks.inputs.density = 100 >>> sf_peaks.inputs.searchradius = 1.0 >>> sf_peaks.run() # doctest: +SKIP + """ _cmd = "sfpeaks" diff --git a/nipype/interfaces/camino2trackvis/__init__.py b/nipype/interfaces/camino2trackvis/__init__.py index 94d3e458a7..b132a20f0c 100644 --- a/nipype/interfaces/camino2trackvis/__init__.py +++ b/nipype/interfaces/camino2trackvis/__init__.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Camino2Trackvis top level namespace -""" - +"""Camino-Trackvis allows interoperability between Camino and TrackVis.""" from .convert import Camino2Trackvis, Trackvis2Camino diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index f4e7e7dfd1..63d7a385da 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -""" -Provides interfaces to various commands provided by Camino-Trackvis -""" +"""Provides interfaces to various commands provided by Camino-Trackvis.""" import os diff --git a/nipype/interfaces/cmtk/__init__.py b/nipype/interfaces/cmtk/__init__.py index 60c7d636d5..fc45bc986e 100644 --- a/nipype/interfaces/cmtk/__init__.py +++ b/nipype/interfaces/cmtk/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""CMP implements a full processing pipeline for creating connectomes with dMRI data.""" from .cmtk import ROIGen, CreateMatrix, CreateNodes from .nx import NetworkXMetrics, AverageNetworks from .parcellation import Parcellate diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 237b092f35..9c2e5815e0 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -133,18 +133,23 @@ def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): def create_endpoints_array(fib, voxelSize): - """ Create the endpoints arrays for each fiber + """ Create the endpoints arrays for each fiber. + Parameters ---------- - fib: the fibers data - voxelSize: 3-tuple containing the voxel size of the ROI image + fib : + the fibers data + voxelSize: + 3-tuple containing the voxel size of the ROI image + Returns ------- - (endpoints: matrix of size [#fibers, 2, 3] containing for each fiber the - index of its first and last point in the voxelSize volume - endpointsmm) : endpoints in milimeter coordinates - """ + endpoints : matrix of size [#fibers, 2, 3] + containing for each fiber the ndex of its first and last point in the voxelSize volume + endpointsmm : matrix of size [#fibers, 2, 3] + endpoints in milimeter coordinates + """ # Init n = len(fib) endpoints = np.zeros((n, 2, 3)) diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index e224daa082..4e1db9ffb7 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -97,18 +97,19 @@ class NetworkBasedStatistic(LibraryBaseInterface): """ Calculates and outputs the average network given a set of input NetworkX gpickle files + See Also + -------- For documentation of Network-based statistic parameters: - - https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py + https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> nbs = cmtk.NetworkBasedStatistic() >>> nbs.inputs.in_group1 = ['subj1.pck', 'subj2.pck'] # doctest: +SKIP >>> nbs.inputs.in_group2 = ['pat1.pck', 'pat2.pck'] # doctest: +SKIP >>> nbs.run() # doctest: +SKIP + """ input_spec = NetworkBasedStatisticInputSpec diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index c34d372a7e..3886fe8844 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -447,11 +447,11 @@ class NetworkXMetrics(BaseInterface): Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> nxmetrics = cmtk.NetworkXMetrics() >>> nxmetrics.inputs.in_file = 'subj1.pck' >>> nxmetrics.run() # doctest: +SKIP + """ input_spec = NetworkXMetricsInputSpec @@ -636,7 +636,6 @@ class AverageNetworks(BaseInterface): Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> avg = cmtk.AverageNetworks() >>> avg.inputs.in_files = ['subj1.pck', 'subj2.pck'] diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index c7397b2133..4868ff3df4 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -661,17 +661,25 @@ def crop_and_move_datasets( def extract(Z, shape, position, fill): - """ Extract voxel neighbourhood -Parameters ----------- -Z: the original data -shape: tuple containing neighbourhood dimensions -position: tuple containing central point indexes -fill: value for the padding of Z -Returns -------- -R: the neighbourhood of the specified point in Z -""" + """Extract voxel neighbourhood + + Parameters + ---------- + Z : + the original data + shape : + tuple containing neighbourhood dimensions + position : + tuple containing central point indexes + fill : + value for the padding of Z + + Returns + ------- + R : + the neighbourhood of the specified point in Z + + """ R = ( np.ones(shape, dtype=Z.dtype) * fill ) # initialize output block to the fill value diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 87d9b7b3df..c88f11ba6a 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -"""The dcm2nii module provides basic functions for dicom conversion -""" +"""dcm2nii converts images from the proprietary scanner DICOM format to NIfTI.""" import os import re from copy import deepcopy diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index d7223468c8..f23461814c 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -"""Provides interfaces to various commands provided by dcmstack -""" +"""dcmstack allows series of DICOM images to be stacked into multi-dimensional arrays.""" import os from os import path as op diff --git a/nipype/interfaces/diffusion_toolkit/__init__.py b/nipype/interfaces/diffusion_toolkit/__init__.py index cef13227c4..c3927800a3 100644 --- a/nipype/interfaces/diffusion_toolkit/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Diffusion Toolkit performs data reconstruction and fiber tracking on diffusion MR images.""" from .base import Info from .postproc import SplineFilter, TrackMerge from .dti import DTIRecon, DTITracker diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 765ef6d8b9..bc30656b00 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -46,25 +46,28 @@ class DTIReconInputSpec(CommandLineInputSpec): traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +Specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. If 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +This information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when.""", argstr="-iop %f", ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", + desc="""\ +When oblique angle(s) applied, some SIEMENS DTI protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", argstr="-oc", ) b0_threshold = traits.Float( - desc="""program will use b0 image with the given threshold to mask out high - background of fa/adc maps. by default it will calculate threshold - automatically. but if it failed, you need to set it manually.""", + desc="""\ +Program will use b0 image with the given threshold to mask out high +background of fa/adc maps. by default it will calculate threshold +automatically. but if it failed, you need to set it manually.""", argstr="-b0_th", ) @@ -167,12 +170,16 @@ class DTITrackerInputSpec(CommandLineInputSpec): "analyze", "ni1", "nii.gz", - desc="""input and output file type. accepted values are: - analyze -> analyze format 7.5 - ni1 -> nifti format saved in seperate .hdr and .img file - nii -> nifti format with one .nii file - nii.gz -> nifti format with compression - default type is 'nii'""", + desc="""\ +Input and output file type. Accepted values are: + +* analyze -> analyze format 7.5 +* ni1 -> nifti format saved in seperate .hdr and .img file +* nii -> nifti format with one .nii file +* nii.gz -> nifti format with compression + +Default type is 'nii' +""", argstr="-it %s", ) tracking_method = traits.Enum( @@ -180,29 +187,35 @@ class DTITrackerInputSpec(CommandLineInputSpec): "rk2", "tl", "sl", - desc="""fact -> use FACT method for tracking. this is the default method. - rk2 -> use 2nd order runge-kutta method for tracking. - tl -> use tensorline method for tracking. - sl -> use interpolated streamline method with fixed step-length""", + desc="""\ +Tracking algorithm. + +* fact -> use FACT method for tracking. this is the default method. +* rk2 -> use 2nd order runge-kutta method for tracking. +* tl -> use tensorline method for tracking. +* sl -> use interpolated streamline method with fixed step-length + +""", argstr="-%s", ) step_length = traits.Float( - desc="""set step length, in the unit of minimum voxel size. - default value is 0.5 for interpolated streamline method - and 0.1 for other methods""", + desc="""\ +Step length, in the unit of minimum voxel size. +default value is 0.5 for interpolated streamline method +and 0.1 for other methods""", argstr="-l %f", ) angle_threshold = traits.Float( desc="set angle threshold. default value is 35 degree", argstr="-at %f" ) angle_threshold_weight = traits.Float( - desc="set angle threshold weighting factor. weighting will be be applied \ - on top of the angle_threshold", + desc="set angle threshold weighting factor. weighting will be be applied " + "on top of the angle_threshold", argstr="-atw %f", ) random_seed = traits.Int( - desc="use random location in a voxel instead of the center of the voxel \ - to seed. can also define number of seed per voxel. default is 1", + desc="use random location in a voxel instead of the center of the voxel " + "to seed. can also define number of seed per voxel. default is 1", argstr="-rseed %d", ) invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") @@ -215,14 +228,14 @@ class DTITrackerInputSpec(CommandLineInputSpec): desc="first mask image", mandatory=True, argstr="-m %s", position=2 ) mask1_threshold = traits.Float( - desc="threshold value for the first mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the first mask image, if not given, the program will " + "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc="threshold value for the second mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the second mask image, if not given, the program will " + "try automatically find the threshold", position=5, ) input_data_prefix = traits.Str( diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 1935015d0c..8d8c5bf9bd 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -33,50 +33,54 @@ class HARDIMatInputSpec(CommandLineInputSpec): ) order = traits.Int( argstr="-order %s", - desc="""maximum order of spherical harmonics. must be even number. default - is 4""", + desc="""maximum order of spherical harmonics. must be even number. default is 4""", ) odf_file = File( exists=True, argstr="-odf %s", - desc="""filename that contains the reconstruction points on a HEMI-sphere. - use the pre-set 181 points by default""", + desc="""\ +Filename that contains the reconstruction points on a HEMI-sphere. +Use the pre-set 181 points by default""", ) reference_file = File( exists=True, argstr="-ref %s", - desc="""provide a dicom or nifti image as the reference for the program to - figure out the image orientation information. if no such info was - found in the given image header, the next 5 options -info, etc., - will be used if provided. if image orientation info can be found - in the given reference, all other 5 image orientation options will - be IGNORED""", + desc="""\ +Provide a dicom or nifti image as the reference for the program to +figure out the image orientation information. if no such info was +found in the given image header, the next 5 options -info, etc., +will be used if provided. if image orientation info can be found +in the given reference, all other 5 image orientation options will +be IGNORED""", ) image_info = File( exists=True, argstr="-info %s", - desc="""specify image information file. the image info file is generated - from original dicom image by diff_unpack program and contains image - orientation and other information needed for reconstruction and - tracking. by default will look into the image folder for .info file""", + desc="""\ +specify image information file. the image info file is generated +from original dicom image by diff_unpack program and contains image +orientation and other information needed for reconstruction and +tracking. by default will look into the image folder for .info file""", ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", + desc="""\ +when oblique angle(s) applied, some SIEMENS dti protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", argstr="-oc", ) @@ -143,10 +147,11 @@ class ODFReconInputSpec(CommandLineInputSpec): ) n_b0 = traits.Int( argstr="-b0 %s", - desc="""number of b0 scans. by default the program gets this information - from the number of directions and number of volumes in - the raw data. useful when dealing with incomplete raw - data set or only using part of raw data set to reconstruct""", + desc="""\ +number of b0 scans. by default the program gets this information +from the number of directions and number of volumes in +the raw data. useful when dealing with incomplete raw +data set or only using part of raw data set to reconstruct""", mandatory=True, ) output_type = traits.Enum( @@ -159,9 +164,10 @@ class ODFReconInputSpec(CommandLineInputSpec): usedefault=True, ) sharpness = traits.Float( - desc="""smooth or sharpen the raw data. factor > 0 is smoothing. - factor < 0 is sharpening. default value is 0 - NOTE: this option applies to DSI study only""", + desc="""\ +smooth or sharpen the raw data. factor > 0 is smoothing. +factor < 0 is sharpening. default value is 0 +NOTE: this option applies to DSI study only""", argstr="-s %f", ) filter = traits.Bool( @@ -176,19 +182,21 @@ class ODFReconInputSpec(CommandLineInputSpec): traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", + desc="""\ +when oblique angle(s) applied, some SIEMENS dti protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", argstr="-oc", ) @@ -255,23 +263,27 @@ class ODFTrackerInputSpec(CommandLineInputSpec): ) runge_kutta2 = traits.Bool( argstr="-rk2", - desc="""use 2nd order runge-kutta method for tracking. - default tracking method is non-interpolate streamline""", + desc="""\ +use 2nd order runge-kutta method for tracking. +default tracking method is non-interpolate streamline""", ) step_length = traits.Float( argstr="-l %f", - desc="""set step length, in the unit of minimum voxel size. - default value is 0.1.""", + desc="""\ +set step length, in the unit of minimum voxel size. +default value is 0.1.""", ) angle_threshold = traits.Float( argstr="-at %f", - desc="""set angle threshold. default value is 35 degree for - default tracking method and 25 for rk2""", + desc="""\ +set angle threshold. default value is 35 degree for +default tracking method and 25 for rk2""", ) random_seed = traits.Int( argstr="-rseed %s", - desc="""use random location in a voxel instead of the center of the voxel - to seed. can also define number of seed per voxel. default is 1""", + desc="""\ +use random location in a voxel instead of the center of the voxel +to seed. can also define number of seed per voxel. default is 1""", ) invert_x = traits.Bool(argstr="-ix", desc="invert x component of the vector") invert_y = traits.Bool(argstr="-iy", desc="invert y component of the vector") @@ -284,39 +296,42 @@ class ODFTrackerInputSpec(CommandLineInputSpec): desc="first mask image", mandatory=True, argstr="-m %s", position=2 ) mask1_threshold = traits.Float( - desc="threshold value for the first mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the first mask image, if not given, the program will " + "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc="threshold value for the second mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the second mask image, if not given, the program will " + "try automatically find the threshold", position=5, ) limit = traits.Int( argstr="-limit %d", - desc="""in some special case, such as heart data, some track may go into - infinite circle and take long time to stop. this option allows - setting a limit for the longest tracking steps (voxels)""", + desc="""\ +in some special case, such as heart data, some track may go into +infinite circle and take long time to stop. this option allows +setting a limit for the longest tracking steps (voxels)""", ) dsi = traits.Bool( argstr="-dsi", - desc=""" specify the input odf data is dsi. because dsi recon uses fixed - pre-calculated matrix, some special orientation patch needs to - be applied to keep dti/dsi/q-ball consistent.""", + desc="""\ +specify the input odf data is dsi. because dsi recon uses fixed +pre-calculated matrix, some special orientation patch needs to +be applied to keep dti/dsi/q-ball consistent.""", ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) slice_order = traits.Int( @@ -333,17 +348,18 @@ class ODFTrackerInputSpec(CommandLineInputSpec): "LPS", "LPI", argstr="-vorder %s", - desc="""specify the voxel order in RL/AP/IS (human brain) reference. must be - 3 letters with no space in between. - for example, RAS means the voxel row is from L->R, the column - is from P->A and the slice order is from I->S. - by default voxel order is determined by the image orientation - (but NOT guaranteed to be correct because of various standards). - for example, siemens axial image is LPS, coronal image is LIP and - sagittal image is PIL. - this information also is NOT needed for tracking but will be saved - in the track file and is essential for track display to map onto - the right coordinates""", + desc="""\ +specify the voxel order in RL/AP/IS (human brain) reference. must be +3 letters with no space in between. +for example, RAS means the voxel row is from L->R, the column +is from P->A and the slice order is from I->S. +by default voxel order is determined by the image orientation +(but NOT guaranteed to be correct because of various standards). +for example, siemens axial image is LPS, coronal image is LIP and +sagittal image is PIL. +this information also is NOT needed for tracking but will be saved +in the track file and is essential for track display to map onto +the right coordinates""", ) diff --git a/nipype/interfaces/dipy/__init__.py b/nipype/interfaces/dipy/__init__.py index 1bd5dcb217..ec840871ba 100644 --- a/nipype/interfaces/dipy/__init__.py +++ b/nipype/interfaces/dipy/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""DIPY is a computational neuroimaging tool for diffusion MRI.""" from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode, DTI from .preprocess import Resample, Denoise diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 6f77d9a47e..f2b197f372 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -94,23 +94,25 @@ class TensorMode(DipyDiffusionInterface): """ Creates a map of the mode of the diffusion tensors given a set of diffusion-weighted images, as well as their associated b-values and - b-vectors. Fits the diffusion tensors and calculates tensor mode + b-vectors [1]_. Fits the diffusion tensors and calculates tensor mode with Dipy. - .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor - Invariants and the Analysis of Diffusion Tensor Magnetic Resonance - Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, - 2006. - Example ------- - >>> import nipype.interfaces.dipy as dipy >>> mode = dipy.TensorMode() >>> mode.inputs.in_file = 'diffusion.nii' >>> mode.inputs.in_bvec = 'bvecs' >>> mode.inputs.in_bval = 'bvals' >>> mode.run() # doctest: +SKIP + + References + ---------- + .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor + Invariants and the Analysis of Diffusion Tensor Magnetic Resonance + Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, + 2006. + """ input_spec = TensorModeInputSpec diff --git a/nipype/interfaces/dtitk/__init__.py b/nipype/interfaces/dtitk/__init__.py index 6c9569114c..d1420c3afb 100644 --- a/nipype/interfaces/dtitk/__init__.py +++ b/nipype/interfaces/dtitk/__init__.py @@ -1,11 +1,10 @@ -"""The dtitk module provides classes for interfacing with the `Diffusion -Tensor Imaging Toolkit (DTI-TK) +""" +DTI-TK is a spatial normalization and atlas construction toolkit for DTI. + +Interfaces for the `Diffusion Tensor Imaging Toolkit (DTI-TK) `_ command line tools. -Top-level namespace for dti-tk. """ - -# from .base import () from .registration import ( Rigid, Affine, diff --git a/nipype/interfaces/dtitk/utils.py b/nipype/interfaces/dtitk/utils.py index e959fd8f0c..c5850450a6 100644 --- a/nipype/interfaces/dtitk/utils.py +++ b/nipype/interfaces/dtitk/utils.py @@ -66,11 +66,10 @@ class TVAdjustVoxSpOutputSpec(TraitedSpec): class TVAdjustVoxSp(CommandLineDtitk): """ - Adjusts the voxel space of a tensor volume + Adjusts the voxel space of a tensor volume. Example ------- - >>> from nipype.interfaces import dtitk >>> node = dtitk.TVAdjustVoxSp() >>> node.inputs.in_file = 'im1.nii' @@ -78,6 +77,7 @@ class TVAdjustVoxSp(CommandLineDtitk): >>> node.cmdline 'TVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP + """ input_spec = TVAdjustVoxSpInputSpec @@ -119,19 +119,19 @@ class SVAdjustVoxSpOutputSpec(TraitedSpec): class SVAdjustVoxSp(CommandLineDtitk): """ - Adjusts the voxel space of a scalar volume + Adjusts the voxel space of a scalar volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.SVAdjustVoxSp() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.SVAdjustVoxSp() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = SVAdjustVoxSpInputSpec output_spec = SVAdjustVoxSpOutputSpec @@ -189,19 +189,19 @@ class TVResampleOutputSpec(TraitedSpec): class TVResample(CommandLineDtitk): """ - Resamples a tensor volume + Resamples a tensor volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.TVResample() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.TVResample() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = TVResampleInputSpec output_spec = TVResampleOutputSpec @@ -256,19 +256,19 @@ class SVResampleOutputSpec(TraitedSpec): class SVResample(CommandLineDtitk): """ - Resamples a scalar volume + Resamples a scalar volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.SVResample() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.SVResample() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = SVResampleInputSpec output_spec = SVResampleOutputSpec @@ -290,19 +290,19 @@ class TVtoolOutputSpec(TraitedSpec): class TVtool(CommandLineDtitk): """ - Calculates a tensor metric volume from a tensor volume + Calculates a tensor metric volume from a tensor volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.TVtool() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.in_flag = 'fa' + >>> node.cmdline + 'TVtool -in im1.nii -fa -out im1_fa.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.TVtool() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.in_flag = 'fa' - >>> node.cmdline - 'TVtool -in im1.nii -fa -out im1_fa.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = TVtoolInputSpec output_spec = TVtoolOutputSpec @@ -383,11 +383,10 @@ class BinThreshOutputSpec(TraitedSpec): class BinThresh(CommandLineDtitk): """ - Binarizes an image + Binarizes an image. Example ------- - >>> from nipype.interfaces import dtitk >>> node = dtitk.BinThresh() >>> node.inputs.in_file = 'im1.nii' @@ -398,6 +397,7 @@ class BinThresh(CommandLineDtitk): >>> node.cmdline 'BinaryThresholdImageFilter im1.nii im1_thrbin.nii 0 100 1 0' >>> node.run() # doctest: +SKIP + """ input_spec = BinThreshInputSpec diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 8404aad802..6dc6a7e154 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Experimental Slicer wrapper - Work in progress.""" import os import warnings import xml.dom.minidom @@ -22,9 +22,7 @@ class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): class SlicerCommandLine(CommandLine): - """Experimental Slicer wrapper. Work in progress. - - """ + """Experimental Slicer wrapper. Work in progress.""" _cmd = "Slicer3" input_spec = SlicerCommandLineInputSpec diff --git a/nipype/interfaces/elastix/__init__.py b/nipype/interfaces/elastix/__init__.py index e7ddc2a9f7..8f60ed8ff1 100644 --- a/nipype/interfaces/elastix/__init__.py +++ b/nipype/interfaces/elastix/__init__.py @@ -2,8 +2,6 @@ # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for elastix.""" - - +"""elastix is a toolbox for rigid and nonrigid registration of images.""" from .registration import Registration, ApplyWarp, AnalyzeWarp, PointsWarp from .utils import EditTransform diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 803ac571cb..705cf895e4 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for freesurfer.""" +"""FreeSurfer is an open source software suite for processing and analyzing brain MRI images.""" from .base import Info, FSCommand, no_freesurfer from .preprocess import ( diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 8bf7918ae4..5209d731c9 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -116,7 +116,6 @@ class MRISPreproc(FSCommand): Examples -------- - >>> preproc = MRISPreproc() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -176,8 +175,8 @@ class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): ) copy_inputs = traits.Bool( desc="If running as a node, set this to True " - + "this will copy some implicit inputs to the " - + "node directory." + "this will copy some implicit inputs to the " + "node directory." ) @@ -185,7 +184,7 @@ class MRISPreprocReconAll(MRISPreproc): """Extends MRISPreproc to allow it to be used in a recon-all workflow Examples - ======== + -------- >>> preproc = MRISPreprocReconAll() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -194,6 +193,7 @@ class MRISPreprocReconAll(MRISPreproc): >>> preproc.inputs.out_file = 'concatenated_file.mgz' >>> preproc.cmdline 'mris_preproc --hemi lh --out concatenated_file.mgz --s subject_id --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' + """ input_spec = MRISPreprocReconAllInputSpec @@ -451,7 +451,6 @@ class GLMFit(FSCommand): Examples -------- - >>> glmfit = GLMFit() >>> glmfit.inputs.in_file = 'functional.nii' >>> glmfit.inputs.one_sample = True @@ -614,7 +613,6 @@ class Binarize(FSCommand): Examples -------- - >>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii') >>> binvol.cmdline 'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000' @@ -755,7 +753,6 @@ class Concatenate(FSCommand): Examples -------- - Combine two input volumes into one volume with two frames >>> concat = Concatenate() @@ -974,7 +971,6 @@ class SegStats(FSCommand): Examples -------- - >>> import nipype.interfaces.freesurfer as fs >>> ss = fs.SegStats() >>> ss.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -1077,8 +1073,8 @@ class SegStatsReconAllInputSpec(SegStatsInputSpec): aseg = File(exists=True, desc="Mandatory implicit input in 5.3") copy_inputs = traits.Bool( desc="If running as a node, set this to True " - + "otherwise, this will copy the implicit inputs " - + "to the node directory." + "otherwise, this will copy the implicit inputs " + "to the node directory." ) @@ -1089,7 +1085,7 @@ class SegStatsReconAll(SegStats): To ensure backwards compatability of SegStats, this class was created. Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import SegStatsReconAll >>> segstatsreconall = SegStatsReconAll() >>> segstatsreconall.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -1118,6 +1114,7 @@ class SegStatsReconAll(SegStats): >>> segstatsreconall.inputs.exclude_id = 0 >>> segstatsreconall.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --brain-vol-from-seg --surf-ctx-vol --empty --etiv --euler --excl-ctxgmwm --excludeid 0 --subcortgray --subject 10335 --supratent --totalgray --surf-wm-vol --sum ./summary.stats' + """ input_spec = SegStatsReconAllInputSpec @@ -1248,7 +1245,6 @@ class Label2Vol(FSCommand): Examples -------- - >>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii') >>> binvol.cmdline 'mri_label2vol --fillthresh 0.5 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii' @@ -1340,7 +1336,6 @@ class MS_LDA(FSCommand): Examples -------- - >>> grey_label = 2 >>> white_label = 3 >>> zero_value = 1 @@ -1351,6 +1346,7 @@ class MS_LDA(FSCommand): images=['FLASH1.mgz', 'FLASH2.mgz', 'FLASH3.mgz']) >>> optimalWeights.cmdline 'mri_ms_LDA -conform -label label.mgz -lda 2 3 -shift 1 -W -synth synth_out.mgz -weight weights.txt FLASH1.mgz FLASH2.mgz FLASH3.mgz' + """ _cmd = "mri_ms_LDA" @@ -1625,12 +1621,11 @@ class SphericalAverageInputSpec(FSTraitedSpec): argstr="%s", mandatory=True, position=-5, - desc="""Filename from the average subject directory. - Example: to use rh.entorhinal.label as the input label - filename, set fname to 'rh.entorhinal' and which to - 'label'. The program will then search for - '{in_average}/label/rh.entorhinal.label' - """, + desc="""\ +Filename from the average subject directory. +Example: to use rh.entorhinal.label as the input label filename, set fname to 'rh.entorhinal' +and which to 'label'. The program will then search for +``/label/rh.entorhinal.label``""", ) which = traits.Enum( "coords", @@ -1673,6 +1668,7 @@ class SphericalAverage(FSCommand): >>> sphericalavg.inputs.threshold = 5 >>> sphericalavg.cmdline 'mris_spherical_average -erode 2 -o 10335 -t 5.0 label lh.entorhinal lh pial . test.out' + """ _cmd = "mris_spherical_average" diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index f7e09f7629..39a444495c 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -2491,7 +2491,7 @@ class MNIBiasCorrection(FSCommand): >>> correct.cmdline 'mri_nu_correct.mni --distance 50 --i norm.mgz --n 6 --o norm_output.mgz --proto-iters 1000' - References: + References ---------- [http://freesurfer.net/fswiki/mri_nu_correct.mni] [http://www.bic.mni.mcgill.ca/software/N3] @@ -2677,11 +2677,13 @@ class CANormalize(FSCommand): """This program creates a normalized volume using the brain volume and an input gca file. - For complete details, see the `FS Documentation `_ + See Also + -------- + For complete details, see the `FS Documentation + `__. Examples - ======== - + -------- >>> from nipype.interfaces import freesurfer >>> ca_normalize = freesurfer.CANormalize() >>> ca_normalize.inputs.in_file = "T1.mgz" @@ -2689,6 +2691,7 @@ class CANormalize(FSCommand): >>> ca_normalize.inputs.transform = "trans.mat" # in practice use .lta transforms >>> ca_normalize.cmdline 'mri_ca_normalize T1.mgz atlas.nii.gz trans.mat T1_norm.mgz' + """ _cmd = "mri_ca_normalize" @@ -2752,16 +2755,20 @@ class CARegisterOutputSpec(TraitedSpec): class CARegister(FSCommandOpenMP): """Generates a multi-dimensional talairach transform from a gca file and talairach.lta file - For complete details, see the `FS Documentation `_ + See Also + -------- + For complete details, see the `FS Documentation + `__ Examples - ======== + -------- >>> from nipype.interfaces import freesurfer >>> ca_register = freesurfer.CARegister() >>> ca_register.inputs.in_file = "norm.mgz" >>> ca_register.inputs.out_file = "talairach.m3z" >>> ca_register.cmdline 'mri_ca_register norm.mgz talairach.m3z' + """ _cmd = "mri_ca_register" @@ -2851,12 +2858,15 @@ class CALabelOutputSpec(TraitedSpec): class CALabel(FSCommandOpenMP): - """ - For complete details, see the `FS Documentation `_ + """Label subcortical structures based in GCA model. - Examples - ======== + See Also + -------- + For complete details, see the `FS Documentation + `__ + Examples + -------- >>> from nipype.interfaces import freesurfer >>> ca_label = freesurfer.CALabel() >>> ca_label.inputs.in_file = "norm.mgz" @@ -2865,6 +2875,7 @@ class CALabel(FSCommandOpenMP): >>> ca_label.inputs.template = "Template_6.nii" # in practice use .gcs extension >>> ca_label.cmdline 'mri_ca_label norm.mgz trans.mat Template_6.nii out.mgz' + """ _cmd = "mri_ca_label" diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index c93f813088..d6fd82cc15 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -48,8 +48,7 @@ class MPRtoMNI305(FSScriptCommand): For complete details, see FreeSurfer documentation Examples - ======== - + -------- >>> from nipype.interfaces.freesurfer import MPRtoMNI305, Info >>> mprtomni305 = MPRtoMNI305() >>> mprtomni305.inputs.target = 'structural.nii' diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 02a3831bc6..a5bc6f36b5 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -446,7 +446,7 @@ class SurfaceSmoothInputSpec(FSTraitedSpec): True, argstr="--cortex", usedefault=True, - desc="only smooth within $hemi.cortex.label", + desc="only smooth within ``$hemi.cortex.label``", ) reshape = traits.Bool( argstr="--reshape", desc="reshape surface vector to fit in non-mgh format" @@ -468,14 +468,13 @@ class SurfaceSmooth(FSCommand): smoothing process. If the latter, the underlying program will calculate the correct number of iterations internally. - .. seealso:: - - SmoothTessellation() Interface - For smoothing a tessellated surface (e.g. in gifti or .stl) + See Also + -------- + `nipype.interfaces.freesurfer.utils.SmoothTessellation`_ interface for + smoothing a tessellated surface (e.g. in gifti or .stl) Examples -------- - >>> import nipype.interfaces.freesurfer as fs >>> smoother = fs.SurfaceSmooth() >>> smoother.inputs.in_file = "lh.cope1.mgz" @@ -1569,16 +1568,12 @@ class MRIPretess(FSCommand): """ Uses Freesurfer's mri_pretess to prepare volumes to be tessellated. - Description - ----------- - Changes white matter (WM) segmentation so that the neighbors of all voxels labeled as WM have a face in common - no edges or corners allowed. Example ------- - >>> import nipype.interfaces.freesurfer as fs >>> pretess = fs.MRIPretess() >>> pretess.inputs.in_filled = 'wm.mgz' @@ -1675,10 +1670,6 @@ def _gen_outfilename(self): class SmoothTessellationInputSpec(FSTraitedSpec): - """ - This program smooths the tessellation of a surface using 'mris_smooth' - """ - in_file = File( exists=True, mandatory=True, @@ -1694,17 +1685,17 @@ class SmoothTessellationInputSpec(FSTraitedSpec): argstr="-n %d", desc="Number of smoothing iterations (default=10)" ) snapshot_writing_iterations = traits.Int( - argstr="-w %d", desc='Write snapshot every "n" iterations' + argstr="-w %d", desc='Write snapshot every *n* iterations' ) use_gaussian_curvature_smoothing = traits.Bool( argstr="-g", desc="Use Gaussian curvature smoothing" ) gaussian_curvature_norm_steps = traits.Int( - argstr="%d ", desc="Use Gaussian curvature smoothing" + argstr="%d", desc="Use Gaussian curvature smoothing" ) gaussian_curvature_smoothing_steps = traits.Int( - argstr="%d", desc="Use Gaussian curvature smoothing" + argstr=" %d", desc="Use Gaussian curvature smoothing" ) disable_estimates = traits.Bool( @@ -1722,10 +1713,10 @@ class SmoothTessellationInputSpec(FSTraitedSpec): desc="output filename or True to generate one", ) out_curvature_file = File( - argstr="-c %s", desc='Write curvature to ?h.curvname (default "curv")' + argstr="-c %s", desc='Write curvature to ``?h.curvname`` (default "curv")' ) out_area_file = File( - argstr="-b %s", desc='Write area to ?h.areaname (default "area")' + argstr="-b %s", desc='Write area to ``?h.areaname`` (default "area")' ) seed = traits.Int( argstr="-seed %d", desc="Seed for setting random number generator" @@ -1737,25 +1728,25 @@ class SmoothTessellationOutputSpec(TraitedSpec): This program smooths the tessellation of a surface using 'mris_smooth' """ - surface = File(exists=True, desc="Smoothed surface file ") + surface = File(exists=True, desc="Smoothed surface file.") class SmoothTessellation(FSCommand): """ - This program smooths the tessellation of a surface using 'mris_smooth' + Smooth a tessellated surface. - .. seealso:: - - SurfaceSmooth() Interface - For smoothing a scalar field along a surface manifold + See Also + -------- + `nipype.interfaces.freesurfer.utils.SurfaceSmooth`_ interface for smoothing a scalar field + along a surface manifold Example ------- - >>> import nipype.interfaces.freesurfer as fs >>> smooth = fs.SmoothTessellation() >>> smooth.inputs.in_file = 'lh.hippocampus.stl' >>> smooth.run() # doctest: +SKIP + """ _cmd = "mris_smooth" @@ -1951,11 +1942,10 @@ class Tkregister2(FSCommand): Examples -------- - Get transform matrix between orig (*tkRAS*) and native (*scannerRAS*) coordinates in Freesurfer. Implements the first step of mapping surfaces to native space in `this guide - `_. + `__. >>> from nipype.interfaces.freesurfer import Tkregister2 >>> tk2 = Tkregister2(reg_file='T1_to_native.dat') @@ -2050,11 +2040,16 @@ class AddXFormToHeaderOutputSpec(TraitedSpec): class AddXFormToHeader(FSCommand): - """ Just adds specified xform to the volume header + """ + Just adds specified xform to the volume header. - (!) WARNING: transform input **MUST** be an absolute path to a DataSink'ed transform or - the output will reference a transform in the workflow cache directory! + .. danger :: + Input transform **MUST** be an absolute path to a DataSink'ed transform or + the output will reference a transform in the workflow cache directory! + + Examples + -------- >>> from nipype.interfaces.freesurfer import AddXFormToHeader >>> adder = AddXFormToHeader() >>> adder.inputs.in_file = 'norm.mgz' @@ -2065,10 +2060,9 @@ class AddXFormToHeader(FSCommand): >>> adder.inputs.copy_name = True >>> adder.cmdline 'mri_add_xform_to_header -c trans.mat norm.mgz output.mgz' - >>> adder.run() # doctest: +SKIP - References: + References ---------- [https://surfer.nmr.mgh.harvard.edu/fswiki/mri_add_xform_to_header] @@ -3829,11 +3823,10 @@ class Aparc2AsegInputSpec(FSTraitedSpec): ctxseg = File(argstr="--ctxseg %s", exists=True, desc="") label_wm = traits.Bool( argstr="--labelwm", - desc=""" - For each voxel labeled as white matter in the aseg, re-assign - its label to be that of the closest cortical point if its - distance is less than dmaxctx - """, + desc="""\ +For each voxel labeled as white matter in the aseg, re-assign +its label to be that of the closest cortical point if its +distance is less than dmaxctx.""", ) hypo_wm = traits.Bool(argstr="--hypo-as-wm", desc="Label hypointensities as WM") rip_unknown = traits.Bool( @@ -3842,8 +3835,8 @@ class Aparc2AsegInputSpec(FSTraitedSpec): a2009s = traits.Bool(argstr="--a2009s", desc="Using the a2009s atlas") copy_inputs = traits.Bool( desc="If running as a node, set this to True." - + "This will copy the input files to the node " - + "directory." + "This will copy the input files to the node " + "directory." ) @@ -3859,17 +3852,17 @@ class Aparc2Aseg(FSCommand): labeled as cortex (3 and 42) and assign it the label of the closest cortical vertex. If the voxel is not in the ribbon (as defined by mri/ lh.ribbon and rh.ribbon), then the voxel is marked as unknown (0). - This can be turned off with --noribbon. The cortical parcellation is + This can be turned off with ``--noribbon``. The cortical parcellation is obtained from subject/label/hemi.aparc.annot which should be based on the curvature.buckner40.filled.desikan_killiany.gcs atlas. The aseg is obtained from subject/mri/aseg.mgz and should be based on the RB40_talairach_2005-07-20.gca atlas. If these atlases are used, then the segmentations can be viewed with tkmedit and the - FreeSurferColorLUT.txt color table found in $FREESURFER_HOME. These - are the default atlases used by recon-all. + FreeSurferColorLUT.txt color table found in ``$FREESURFER_HOME``. These + are the default atlases used by ``recon-all``. Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import Aparc2Aseg >>> aparc2aseg = Aparc2Aseg() >>> aparc2aseg.inputs.lh_white = 'lh.pial' @@ -3886,6 +3879,7 @@ class Aparc2Aseg(FSCommand): >>> aparc2aseg.inputs.rip_unknown = True >>> aparc2aseg.cmdline # doctest: +SKIP 'mri_aparc2aseg --labelwm --o aparc+aseg.mgz --rip-unknown --s subject_id' + """ _cmd = "mri_aparc2aseg" @@ -3947,13 +3941,14 @@ class Apas2Aseg(FSCommand): actual surface (this is not the case with aseg.mgz). Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import Apas2Aseg >>> apas2aseg = Apas2Aseg() >>> apas2aseg.inputs.in_file = 'aseg.mgz' >>> apas2aseg.inputs.out_file = 'output.mgz' >>> apas2aseg.cmdline 'apas2aseg --i aseg.mgz --o output.mgz' + """ _cmd = "apas2aseg" @@ -3989,9 +3984,9 @@ class MRIsExpandInputSpec(FSTraitedSpec): position=-1, usedefault=True, desc=( - "Output surface file\n" - "If no path, uses directory of `in_file`\n" - 'If no path AND missing "lh." or "rh.", derive from `in_file`' + "Output surface file. " + "If no path, uses directory of ``in_file``. " + 'If no path AND missing "lh." or "rh.", derive from ``in_file``' ), ) thickness = traits.Bool( @@ -4002,7 +3997,7 @@ class MRIsExpandInputSpec(FSTraitedSpec): copyfile=False, desc=( 'Name of thickness file (implicit: "thickness")\n' - "If no path, uses directory of `in_file`\n" + "If no path, uses directory of ``in_file``\n" 'If no path AND missing "lh." or "rh.", derive from `in_file`' ), ) @@ -4011,8 +4006,8 @@ class MRIsExpandInputSpec(FSTraitedSpec): copyfile=False, desc=( 'Name of pial file (implicit: "pial")\n' - "If no path, uses directory of `in_file`\n" - 'If no path AND missing "lh." or "rh.", derive from `in_file`' + "If no path, uses directory of ``in_file``\n" + 'If no path AND missing "lh." or "rh.", derive from ``in_file``' ), ) sphere = traits.Str( diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index dd7b3d76d7..1bf8e0ada7 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The fsl module provides classes for interfacing with the `FSL -`_ command line tools. +""" +FSL is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. -Top-level namespace for fsl. +The fsl module provides classes for interfacing with the `FSL +`_ command line tools. """ from .base import FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data diff --git a/nipype/interfaces/fsl/aroma.py b/nipype/interfaces/fsl/aroma.py index c40a285989..4a3eb32034 100644 --- a/nipype/interfaces/fsl/aroma.py +++ b/nipype/interfaces/fsl/aroma.py @@ -2,7 +2,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This commandline module provides classes for interfacing with the -`ICA-AROMA.py`_ command line tool. +`ICA-AROMA.py `__ command line tool. """ from ..base import ( diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index eeab08371e..eef38795c7 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -904,7 +904,7 @@ class Eddy(FSLCommand): """ Interface for FSL eddy, a tool for estimating and correcting eddy currents induced distortions. `User guide - `_ and + `__ and `more info regarding acqp file `_. @@ -1648,7 +1648,7 @@ class EddyQuad(FSLCommand): """ Interface for FSL eddy_quad, a tool for generating single subject reports and storing the quality assessment indices for each subject. - `User guide `_ + `User guide `__ Examples -------- diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 071c834e14..32e0fc76f1 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -277,64 +277,64 @@ class DataSinkOutputSpec(TraitedSpec): # Custom DataSink class class DataSink(IOBase): - """ Generic datasink module to store structured outputs - - Primarily for use within a workflow. This interface allows arbitrary - creation of input attributes. The names of these attributes define the - directory structure to create for storage of the files or directories. - - The attributes take the following form: - - string[[.[@]]string[[.[@]]string]] ... + """ + Generic datasink module to store structured outputs. - where parts between [] are optional. + Primarily for use within a workflow. This interface allows arbitrary + creation of input attributes. The names of these attributes define the + directory structure to create for storage of the files or directories. - An attribute such as contrasts.@con will create a 'contrasts' directory - to store the results linked to the attribute. If the @ is left out, such - as in 'contrasts.con', a subdirectory 'con' will be created under - 'contrasts'. + The attributes take the following form:: - the general form of the output is:: + string[[.[@]]string[[.[@]]string]] ... - 'base_directory/container/parameterization/destloc/filename' + where parts between ``[]`` are optional. - destloc = string[[.[@]]string[[.[@]]string]] and - filename comesfrom the input to the connect statement. + An attribute such as contrasts.@con will create a 'contrasts' directory + to store the results linked to the attribute. If the ``@`` is left out, such + as in 'contrasts.con', a subdirectory 'con' will be created under + 'contrasts'. - .. warning:: + The general form of the output is:: - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + 'base_directory/container/parameterization/destloc/filename' - .. note:: + ``destloc = string[[.[@]]string[[.[@]]string]]`` and + ``filename`` come from the input to the connect statement. - If both substitutions and regexp_substitutions are used, then - substitutions are applied first followed by regexp_substitutions. + .. warning:: - This interface **cannot** be used in a MapNode as the inputs are - defined only when the connect statement is executed. + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - Examples - -------- + .. note:: - >>> ds = DataSink() - >>> ds.inputs.base_directory = 'results_dir' - >>> ds.inputs.container = 'subject' - >>> ds.inputs.structural = 'structural.nii' - >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) - >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) - >>> ds.run() # doctest: +SKIP + If both substitutions and regexp_substitutions are used, then + substitutions are applied first followed by regexp_substitutions. - To use DataSink in a MapNode, its inputs have to be defined at the - time the interface is created. + This interface **cannot** be used in a MapNode as the inputs are + defined only when the connect statement is executed. - >>> ds = DataSink(infields=['contasts.@con']) - >>> ds.inputs.base_directory = 'results_dir' - >>> ds.inputs.container = 'subject' - >>> ds.inputs.structural = 'structural.nii' - >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) - >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) - >>> ds.run() # doctest: +SKIP + Examples + -------- + >>> ds = DataSink() + >>> ds.inputs.base_directory = 'results_dir' + >>> ds.inputs.container = 'subject' + >>> ds.inputs.structural = 'structural.nii' + >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) + >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) + >>> ds.run() # doctest: +SKIP + + To use DataSink in a MapNode, its inputs have to be defined at the + time the interface is created. + + >>> ds = DataSink(infields=['contasts.@con']) + >>> ds.inputs.base_directory = 'results_dir' + >>> ds.inputs.container = 'subject' + >>> ds.inputs.structural = 'structural.nii' + >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) + >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) + >>> ds.run() # doctest: +SKIP """ @@ -822,7 +822,7 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): False, usedefault=True, desc="Use anonymous connection to s3. If this is set to True, boto may print" - + " a urlopen error, but this does not prevent data from being downloaded.", + " a urlopen error, but this does not prevent data from being downloaded.", ) region = Str("us-east-1", usedefault=True, desc="Region of s3 bucket") bucket = Str(mandatory=True, desc="Amazon S3 bucket where your data is stored") @@ -855,33 +855,36 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class S3DataGrabber(LibraryBaseInterface, IOBase): - """ Generic datagrabber module that wraps around glob in an - intelligent way for neuroimaging tasks to grab files from - Amazon S3 - - Works exactly like DataGrabber, except, you must specify an - S3 "bucket" and "bucket_path" to search for your data and a - "local_directory" to store the data. "local_directory" - should be a location on HDFS for Spark jobs. Additionally, - "template" uses regex style formatting, rather than the - glob-style found in the original DataGrabber. - - Examples - -------- - - >>> s3grab = S3DataGrabber(infields=['subj_id'], outfields=["func", "anat"]) - >>> s3grab.inputs.bucket = 'openneuro' - >>> s3grab.inputs.sort_filelist = True - >>> s3grab.inputs.template = '*' - >>> s3grab.inputs.anon = True - >>> s3grab.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' - >>> s3grab.inputs.local_directory = '/tmp' - >>> s3grab.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', - ... 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'} - >>> s3grab.inputs.template_args = {'anat': [['subj_id', 'subj_id']], - ... 'func': [['subj_id', 'subj_id']]} - >>> s3grab.inputs.subj_id = 'sub-01' - >>> s3grab.run() # doctest: +SKIP + """ + Pull data from an Amazon S3 Bucket. + + Generic datagrabber module that wraps around glob in an + intelligent way for neuroimaging tasks to grab files from + Amazon S3 + + Works exactly like DataGrabber, except, you must specify an + S3 "bucket" and "bucket_path" to search for your data and a + "local_directory" to store the data. "local_directory" + should be a location on HDFS for Spark jobs. Additionally, + "template" uses regex style formatting, rather than the + glob-style found in the original DataGrabber. + + Examples + -------- + >>> s3grab = S3DataGrabber(infields=['subj_id'], outfields=["func", "anat"]) + >>> s3grab.inputs.bucket = 'openneuro' + >>> s3grab.inputs.sort_filelist = True + >>> s3grab.inputs.template = '*' + >>> s3grab.inputs.anon = True + >>> s3grab.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' + >>> s3grab.inputs.local_directory = '/tmp' + >>> s3grab.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', + ... 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'} + >>> s3grab.inputs.template_args = {'anat': [['subj_id', 'subj_id']], + ... 'func': [['subj_id', 'subj_id']]} + >>> s3grab.inputs.subj_id = 'sub-01' + >>> s3grab.run() # doctest: +SKIP + """ input_spec = S3DataGrabberInputSpec @@ -1119,54 +1122,55 @@ class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class DataGrabber(IOBase): - """ Generic datagrabber module that wraps around glob in an - intelligent way for neuroimaging tasks to grab files - + """ + Find files on a filesystem. - .. attention:: + Generic datagrabber module that wraps around glob in an + intelligent way for neuroimaging tasks to grab files - Doesn't support directories currently + .. important:: - Examples - -------- + Doesn't support directories currently - >>> from nipype.interfaces.io import DataGrabber + Examples + -------- + >>> from nipype.interfaces.io import DataGrabber - Pick all files from current directory + Pick all files from current directory - >>> dg = DataGrabber() - >>> dg.inputs.template = '*' + >>> dg = DataGrabber() + >>> dg.inputs.template = '*' - Pick file foo/foo.nii from current directory + Pick file foo/foo.nii from current directory - >>> dg.inputs.template = '%s/%s.dcm' - >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] + >>> dg.inputs.template = '%s/%s.dcm' + >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] - Same thing but with dynamically created fields + Same thing but with dynamically created fields - >>> dg = DataGrabber(infields=['arg1','arg2']) - >>> dg.inputs.template = '%s/%s.nii' - >>> dg.inputs.arg1 = 'foo' - >>> dg.inputs.arg2 = 'foo' + >>> dg = DataGrabber(infields=['arg1','arg2']) + >>> dg.inputs.template = '%s/%s.nii' + >>> dg.inputs.arg1 = 'foo' + >>> dg.inputs.arg2 = 'foo' - however this latter form can be used with iterables and iterfield in a - pipeline. + however this latter form can be used with iterables and iterfield in a + pipeline. - Dynamically created, user-defined input and output fields + Dynamically created, user-defined input and output fields - >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) - >>> dg.inputs.base_directory = '.' - >>> dg.inputs.template = '%s/%s.nii' - >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] - >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] - >>> dg.inputs.template_args['ref'] = [['sid','ref']] - >>> dg.inputs.sid = 's1' + >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) + >>> dg.inputs.base_directory = '.' + >>> dg.inputs.template = '%s/%s.nii' + >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] + >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] + >>> dg.inputs.template_args['ref'] = [['sid','ref']] + >>> dg.inputs.sid = 's1' - Change the template only for output field struct. The rest use the - general template + Change the template only for output field struct. The rest use the + general template - >>> dg.inputs.field_template = dict(struct='%s/struct.nii') - >>> dg.inputs.template_args['struct'] = [['sid']] + >>> dg.inputs.field_template = dict(struct='%s/struct.nii') + >>> dg.inputs.template_args['struct'] = [['sid']] """ @@ -1357,7 +1361,8 @@ class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class SelectFiles(IOBase): - """Flexibly collect data from disk to feed into workflows. + """ + Flexibly collect data from disk to feed into workflows. This interface uses the {}-based string formatting syntax to plug values (possibly known only at workflow execution time) into string @@ -1369,7 +1374,6 @@ class SelectFiles(IOBase): Examples -------- - >>> import pprint >>> from nipype import SelectFiles, Node >>> templates={"T1": "{subject_id}/struct/T1.nii", @@ -1520,7 +1524,7 @@ class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class DataFinder(IOBase): - """Search for paths that match a given regular expression. Allows a less + r"""Search for paths that match a given regular expression. Allows a less proscriptive approach to gathering input files compared to DataGrabber. Will recursively search any subdirectories by default. This can be limited with the min/max depth options. @@ -1530,7 +1534,6 @@ class DataFinder(IOBase): Examples -------- - >>> from nipype.interfaces.io import DataFinder >>> df = DataFinder() >>> df.inputs.root_paths = '.' @@ -1803,11 +1806,10 @@ class FSSourceOutputSpec(TraitedSpec): class FreeSurferSource(IOBase): - """Generates freesurfer subject info from their directories + """Generates freesurfer subject info from their directories. Examples -------- - >>> from nipype.interfaces.io import FreeSurferSource >>> fs = FreeSurferSource() >>> #fs.inputs.subjects_dir = '.' @@ -1891,36 +1893,35 @@ class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class XNATSource(LibraryBaseInterface, IOBase): - """ Generic XNATSource module that wraps around the pyxnat module in - an intelligent way for neuroimaging tasks to grab files and data - from an XNAT server. - - Examples - -------- - - >>> from nipype.interfaces.io import XNATSource - - Pick all files from current directory - - >>> dg = XNATSource() - >>> dg.inputs.template = '*' - - >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) - >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ - '/assessors/%s/%s_resources/files' - >>> dg.inputs.project = 'IMAGEN' - >>> dg.inputs.subject = 'IMAGEN_000000001274' - >>> dg.inputs.experiment = '*SessionA*' - >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' - >>> dg.inputs.inout = 'out' + """ + Pull data from an XNAT server. - >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) - >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ - '/assessors/*%s_nii/out_resources/files' - >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] - >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] - >>> dg.inputs.sid = 'IMAGEN_000000001274' + Generic XNATSource module that wraps around the pyxnat module in + an intelligent way for neuroimaging tasks to grab files and data + from an XNAT server. + Examples + -------- + Pick all files from current directory + + >>> dg = XNATSource() + >>> dg.inputs.template = '*' + + >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) + >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ + '/assessors/%s/%s_resources/files' + >>> dg.inputs.project = 'IMAGEN' + >>> dg.inputs.subject = 'IMAGEN_000000001274' + >>> dg.inputs.experiment = '*SessionA*' + >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' + >>> dg.inputs.inout = 'out' + + >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) + >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ + '/assessors/*%s_nii/out_resources/files' + >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] + >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] + >>> dg.inputs.sid = 'IMAGEN_000000001274' """ @@ -2310,22 +2311,23 @@ class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class SQLiteSink(LibraryBaseInterface, IOBase): - """ Very simple frontend for storing values into SQLite database. + """ + Very simple frontend for storing values into SQLite database. - .. warning:: + .. warning:: - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - Examples - -------- + Examples + -------- - >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) - >>> sql.inputs.database_file = 'my_database.db' - >>> sql.inputs.table_name = 'experiment_results' - >>> sql.inputs.subject_id = 's1' - >>> sql.inputs.some_measurement = 11.4 - >>> sql.run() # doctest: +SKIP + >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) + >>> sql.inputs.database_file = 'my_database.db' + >>> sql.inputs.table_name = 'experiment_results' + >>> sql.inputs.subject_id = 's1' + >>> sql.inputs.some_measurement = 11.4 + >>> sql.run() # doctest: +SKIP """ @@ -2377,19 +2379,20 @@ class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class MySQLSink(IOBase): - """ Very simple frontend for storing values into MySQL database. + """ + Very simple frontend for storing values into MySQL database. - Examples - -------- + Examples + -------- - >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) - >>> sql.inputs.database_name = 'my_database' - >>> sql.inputs.table_name = 'experiment_results' - >>> sql.inputs.username = 'root' - >>> sql.inputs.password = 'secret' - >>> sql.inputs.subject_id = 's1' - >>> sql.inputs.some_measurement = 11.4 - >>> sql.run() # doctest: +SKIP + >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) + >>> sql.inputs.database_name = 'my_database' + >>> sql.inputs.table_name = 'experiment_results' + >>> sql.inputs.username = 'root' + >>> sql.inputs.password = 'secret' + >>> sql.inputs.subject_id = 's1' + >>> sql.inputs.some_measurement = 11.4 + >>> sql.run() # doctest: +SKIP """ @@ -2455,64 +2458,64 @@ class SSHDataGrabberInputSpec(DataGrabberInputSpec): class SSHDataGrabber(LibraryBaseInterface, DataGrabber): - """ Extension of DataGrabber module that downloads the file list and - optionally the files from a SSH server. The SSH operation must - not need user and password so an SSH agent must be active in - where this module is being run. - + """ + Extension of DataGrabber module that downloads the file list and + optionally the files from a SSH server. The SSH operation must + not need user and password so an SSH agent must be active in + where this module is being run. - .. attention:: - Doesn't support directories currently + .. attention:: - Examples - -------- + Doesn't support directories currently - >>> from nipype.interfaces.io import SSHDataGrabber - >>> dg = SSHDataGrabber() - >>> dg.inputs.hostname = 'test.rebex.net' - >>> dg.inputs.user = 'demo' - >>> dg.inputs.password = 'password' - >>> dg.inputs.base_directory = 'pub/example' + Examples + -------- + >>> from nipype.interfaces.io import SSHDataGrabber + >>> dg = SSHDataGrabber() + >>> dg.inputs.hostname = 'test.rebex.net' + >>> dg.inputs.user = 'demo' + >>> dg.inputs.password = 'password' + >>> dg.inputs.base_directory = 'pub/example' - Pick all files from the base directory + Pick all files from the base directory - >>> dg.inputs.template = '*' + >>> dg.inputs.template = '*' - Pick all files starting with "s" and a number from current directory + Pick all files starting with "s" and a number from current directory - >>> dg.inputs.template_expression = 'regexp' - >>> dg.inputs.template = 'pop[0-9].*' + >>> dg.inputs.template_expression = 'regexp' + >>> dg.inputs.template = 'pop[0-9].*' - Same thing but with dynamically created fields + Same thing but with dynamically created fields - >>> dg = SSHDataGrabber(infields=['arg1','arg2']) - >>> dg.inputs.hostname = 'test.rebex.net' - >>> dg.inputs.user = 'demo' - >>> dg.inputs.password = 'password' - >>> dg.inputs.base_directory = 'pub' - >>> dg.inputs.template = '%s/%s.txt' - >>> dg.inputs.arg1 = 'example' - >>> dg.inputs.arg2 = 'foo' + >>> dg = SSHDataGrabber(infields=['arg1','arg2']) + >>> dg.inputs.hostname = 'test.rebex.net' + >>> dg.inputs.user = 'demo' + >>> dg.inputs.password = 'password' + >>> dg.inputs.base_directory = 'pub' + >>> dg.inputs.template = '%s/%s.txt' + >>> dg.inputs.arg1 = 'example' + >>> dg.inputs.arg2 = 'foo' - however this latter form can be used with iterables and iterfield in a - pipeline. + however this latter form can be used with iterables and iterfield in a + pipeline. - Dynamically created, user-defined input and output fields + Dynamically created, user-defined input and output fields - >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) - >>> dg.inputs.hostname = 'myhost.com' - >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' - >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] - >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] - >>> dg.inputs.template_args['ref'] = [['sid','ref']] - >>> dg.inputs.sid = 's1' + >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) + >>> dg.inputs.hostname = 'myhost.com' + >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' + >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] + >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] + >>> dg.inputs.template_args['ref'] = [['sid','ref']] + >>> dg.inputs.sid = 's1' - Change the template only for output field struct. The rest use the - general template + Change the template only for output field struct. The rest use the + general template - >>> dg.inputs.field_template = dict(struct='%s/struct.nii') - >>> dg.inputs.template_args['struct'] = [['sid']] + >>> dg.inputs.field_template = dict(struct='%s/struct.nii') + >>> dg.inputs.template_args['struct'] = [['sid']] """ @@ -2801,26 +2804,25 @@ class JSONFileSink(IOBase): Entries already existing in in_dict will be overridden by matching entries dynamically added as inputs. - .. warning:: - - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + .. warning:: - Examples - -------- + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - >>> jsonsink = JSONFileSink(input_names=['subject_id', - ... 'some_measurement']) - >>> jsonsink.inputs.subject_id = 's1' - >>> jsonsink.inputs.some_measurement = 11.4 - >>> jsonsink.run() # doctest: +SKIP + Examples + -------- + >>> jsonsink = JSONFileSink(input_names=['subject_id', + ... 'some_measurement']) + >>> jsonsink.inputs.subject_id = 's1' + >>> jsonsink.inputs.some_measurement = 11.4 + >>> jsonsink.run() # doctest: +SKIP - Using a dictionary as input: + Using a dictionary as input: - >>> dictsink = JSONFileSink() - >>> dictsink.inputs.in_dict = {'subject_id': 's1', - ... 'some_measurement': 11.4} - >>> dictsink.run() # doctest: +SKIP + >>> dictsink = JSONFileSink() + >>> dictsink.inputs.in_dict = {'subject_id': 's1', + ... 'some_measurement': 11.4} + >>> dictsink.run() # doctest: +SKIP """ @@ -2898,13 +2900,11 @@ class BIDSDataGrabberInputSpec(DynamicTraitedSpec): class BIDSDataGrabber(LibraryBaseInterface, IOBase): - - """ BIDS datagrabber module that wraps around pybids to allow arbitrary + """BIDS datagrabber module that wraps around pybids to allow arbitrary querying of BIDS datasets. Examples -------- - By default, the BIDSDataGrabber fetches anatomical and functional images from a project, and makes BIDS entities (e.g. subject) available for filtering outputs. @@ -3025,7 +3025,7 @@ class ExportFileOutputSpec(TraitedSpec): class ExportFile(SimpleInterface): - """ Export a file to an absolute path + """Export a file to an absolute path. This interface copies an input file to a named output file. This is useful to save individual files to a specific location, @@ -3033,7 +3033,6 @@ class ExportFile(SimpleInterface): Examples -------- - >>> from nipype.interfaces.io import ExportFile >>> import os.path as op >>> ef = ExportFile() diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index 488635843e..59c36eb478 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" General matlab interface code """ +"""Interfaces to run MATLAB scripts.""" import os from .. import config diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index d1689ad9b4..505426bfe2 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Fixes meshes: -""" +"""MeshFix corrects topological errors in polygonal meshes.""" import os.path as op from ..utils.filemanip import split_filename diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index c593ea998b..a69e38eeb2 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The minc module provides classes for interfacing with the `MINC +"""The MINC (McConnell Brain Imaging Centre, Montreal Neurological Institute) toolkit. + +The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index b4dfa1dac8..14c29f7b1b 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -5,8 +5,7 @@ `_ command line tools. This module was written to work with MINC version 2.2.00. -Author: Carlo Hamalainen - http://carlo-hamalainen.net +Author: `Carlo Hamalainen `__ """ import glob import os @@ -1263,7 +1262,7 @@ class BBoxInputSpec(StdOutCommandLineInputSpec): xor=_xor_one_two, ) two_lines = traits.Bool( - desc="Output on two lines: start_x y z \n width_x y z", + desc="""Write output with two rows (start and width).""", argstr="-two_lines", xor=_xor_one_two, ) @@ -1298,13 +1297,13 @@ class BBox(StdOutCommandLine): Examples -------- - >>> from nipype.interfaces.minc import BBox >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> bbox = BBox(input_file=file0) >>> bbox.run() # doctest: +SKIP + """ input_spec = BBoxInputSpec diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index 2bdbfef78b..85cc052c1e 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""MIPAV enables quantitative analysis and visualization of multimodal medical images.""" from .developer import ( JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index 9bc24b1a80..52e2b01b01 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -106,18 +106,17 @@ class JistLaminarVolumetricLayeringOutputSpec(TraitedSpec): class JistLaminarVolumetricLayering(SEMLikeCommandLine): - """title: Volumetric Layering + """Volumetric Layering. -category: Developer Tools + Builds a continuous layering of the cortex following distance-preserving or volume-preserving + models of cortical folding. -description: Builds a continuous layering of the cortex following distance-preserving or volume-preserving models of cortical folding. -Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. + References + ---------- + Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, + Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. -version: 3.0.RC - -contributor: Miriam Waehnert (waehnert@cbs.mpg.de) http://www.cbs.mpg.de/ - -""" + """ input_spec = JistLaminarVolumetricLayeringInputSpec output_spec = JistLaminarVolumetricLayeringOutputSpec @@ -217,15 +216,12 @@ class JistBrainMgdmSegmentationOutputSpec(TraitedSpec): class JistBrainMgdmSegmentation(SEMLikeCommandLine): - """title: MGDM Whole Brain Segmentation - -category: Developer Tools - -description: Estimate brain structures from an atlas for a MRI dataset (multiple input combinations are possible). + """MGDM Whole Brain Segmentation. -version: 2.0.RC + Estimate brain structures from an atlas for a MRI dataset (multiple input combinations + are possible). -""" + """ input_spec = JistBrainMgdmSegmentationInputSpec output_spec = JistBrainMgdmSegmentationOutputSpec @@ -279,15 +275,7 @@ class JistLaminarProfileGeometryOutputSpec(TraitedSpec): class JistLaminarProfileGeometry(SEMLikeCommandLine): - """title: Profile Geometry - -category: Developer Tools - -description: Compute various geometric quantities for a cortical layers. - -version: 3.0.RC - -""" + """Compute various geometric quantities for a cortical layers.""" input_spec = JistLaminarProfileGeometryInputSpec output_spec = JistLaminarProfileGeometryOutputSpec @@ -330,15 +318,7 @@ class JistLaminarProfileCalculatorOutputSpec(TraitedSpec): class JistLaminarProfileCalculator(SEMLikeCommandLine): - """title: Profile Calculator - -category: Developer Tools - -description: Compute various moments for intensities mapped along a cortical profile. - -version: 3.0.RC - -""" + """Compute various moments for intensities mapped along a cortical profile.""" input_spec = JistLaminarProfileCalculatorInputSpec output_spec = JistLaminarProfileCalculatorOutputSpec @@ -410,15 +390,7 @@ class MedicAlgorithmN3OutputSpec(TraitedSpec): class MedicAlgorithmN3(SEMLikeCommandLine): - """title: N3 Correction - -category: Developer Tools - -description: Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled. - -version: 1.8.R - -""" + """Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled.""" input_spec = MedicAlgorithmN3InputSpec output_spec = MedicAlgorithmN3OutputSpec @@ -458,15 +430,7 @@ class JistLaminarROIAveragingOutputSpec(TraitedSpec): class JistLaminarROIAveraging(SEMLikeCommandLine): - """title: Profile ROI Averaging - -category: Developer Tools - -description: Compute an average profile over a given ROI. - -version: 3.0.RC - -""" + """Compute an average profile over a given ROI.""" input_spec = JistLaminarROIAveragingInputSpec output_spec = JistLaminarROIAveragingOutputSpec @@ -639,18 +603,18 @@ class MedicAlgorithmLesionToadsOutputSpec(TraitedSpec): class MedicAlgorithmLesionToads(SEMLikeCommandLine): - """title: Lesion TOADS + """Algorithm for simulataneous brain structures and MS lesion segmentation of MS Brains. -category: Developer Tools + The brain segmentation is topologically consistent and the algorithm can use multiple + MR sequences as input data. -description: Algorithm for simulataneous brain structures and MS lesion segmentation of MS Brains. The brain segmentation is topologically consistent and the algorithm can use multiple MR sequences as input data. -N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. + References + ---------- + N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, + "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", + NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. -version: 1.9.R - -contributor: Navid Shiee (navid.shiee@nih.gov) http://iacl.ece.jhu.edu/~nshiee/ - -""" + """ input_spec = MedicAlgorithmLesionToadsInputSpec output_spec = MedicAlgorithmLesionToadsOutputSpec @@ -728,15 +692,11 @@ class JistBrainMp2rageSkullStrippingOutputSpec(TraitedSpec): class JistBrainMp2rageSkullStripping(SEMLikeCommandLine): - """title: MP2RAGE Skull Stripping - -category: Developer Tools - -description: Estimate a brain mask for a MP2RAGE dataset. At least a T1-weighted or a T1 map image is required. + """Estimate a brain mask for a MP2RAGE dataset. -version: 3.0.RC + At least a T1-weighted or a T1 map image is required. -""" + """ input_spec = JistBrainMp2rageSkullStrippingInputSpec output_spec = JistBrainMp2rageSkullStrippingOutputSpec @@ -804,18 +764,14 @@ class JistCortexSurfaceMeshInflationOutputSpec(TraitedSpec): class JistCortexSurfaceMeshInflation(SEMLikeCommandLine): - """title: Surface Mesh Inflation + """Inflates a cortical surface mesh. -category: Developer Tools + References + ---------- + D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, + Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. -description: Inflates a cortical surface mesh. -D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. - -version: 3.0.RC - -contributor: Duygu Tosun - -""" + """ input_spec = JistCortexSurfaceMeshInflationInputSpec output_spec = JistCortexSurfaceMeshInflationOutputSpec @@ -861,17 +817,7 @@ class RandomVolOutputSpec(TraitedSpec): class RandomVol(SEMLikeCommandLine): - """title: Random Volume Generator - -category: Developer Tools - -description: Generate a random scalar volume. - -version: 1.12.RC - -documentation-url: http://www.nitrc.org/projects/jist/ - -""" + """Generate a volume of random scalars.""" input_spec = RandomVolInputSpec output_spec = RandomVolOutputSpec @@ -918,17 +864,11 @@ class MedicAlgorithmImageCalculatorOutputSpec(TraitedSpec): class MedicAlgorithmImageCalculator(SEMLikeCommandLine): - """title: Image Calculator - -category: Developer Tools - -description: Perform simple image calculator operations on two images. The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' - -version: 1.10.RC + """Perform simple image calculator operations on two images. -documentation-url: http://www.iacl.ece.jhu.edu/ + The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' -""" + """ input_spec = MedicAlgorithmImageCalculatorInputSpec output_spec = MedicAlgorithmImageCalculatorOutputSpec @@ -975,15 +915,7 @@ class JistBrainMp2rageDuraEstimationOutputSpec(TraitedSpec): class JistBrainMp2rageDuraEstimation(SEMLikeCommandLine): - """title: MP2RAGE Dura Estimation - -category: Developer Tools - -description: Filters a MP2RAGE brain image to obtain a probability map of dura matter. - -version: 3.0.RC - -""" + """Filters a MP2RAGE brain image to obtain a probability map of dura matter.""" input_spec = JistBrainMp2rageDuraEstimationInputSpec output_spec = JistBrainMp2rageDuraEstimationOutputSpec @@ -1029,15 +961,7 @@ class JistLaminarProfileSamplingOutputSpec(TraitedSpec): class JistLaminarProfileSampling(SEMLikeCommandLine): - """title: Profile Sampling - -category: Developer Tools - -description: Sample some intensity image along a cortical profile across layer surfaces. - -version: 3.0.RC - -""" + """Sample some intensity image along a cortical profile across layer surfaces.""" input_spec = JistLaminarProfileSamplingInputSpec output_spec = JistLaminarProfileSamplingOutputSpec @@ -1142,15 +1066,7 @@ class MedicAlgorithmMipavReorientOutputSpec(TraitedSpec): class MedicAlgorithmMipavReorient(SEMLikeCommandLine): - """title: Reorient Volume - -category: Developer Tools - -description: Reorient a volume to a particular anatomical orientation. - -version: .alpha - -""" + """Reorient a volume to a particular anatomical orientation.""" input_spec = MedicAlgorithmMipavReorientInputSpec output_spec = MedicAlgorithmMipavReorientOutputSpec @@ -1409,26 +1325,20 @@ class MedicAlgorithmSPECTRE2010OutputSpec(TraitedSpec): class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): - """title: SPECTRE 2010 - -category: Developer Tools - -description: Simple Paradigm for Extra-Cranial Tissue REmoval - -Algorithm Version: 1.6 -GUI Version: 1.10 + """SPECTRE 2010: Simple Paradigm for Extra-Cranial Tissue REmoval [1]_, [2]_. -A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, 'A Joint Registration and Segmentation Approach to Skull Stripping', Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, April 12-15, 2007. -A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', NeuroImage 56(4):1982-1992, 2011. + References + ---------- -version: 1.6.R + .. [1] A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, + 'A Joint Registration and Segmentation Approach to Skull Stripping', + Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, + April 12-15, 2007. + .. [2] A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, + 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', + NeuroImage 56(4):1982-1992, 2011. -documentation-url: http://www.iacl.ece.jhu.edu/ - -contributor: Aaron Carass (aaron_carass@jhu.edu) http://www.iacl.ece.jhu.edu/ -Hanlin Wan (hanlinwan@gmail.com) - -""" + """ input_spec = MedicAlgorithmSPECTRE2010InputSpec output_spec = MedicAlgorithmSPECTRE2010OutputSpec @@ -1484,15 +1394,11 @@ class JistBrainPartialVolumeFilterOutputSpec(TraitedSpec): class JistBrainPartialVolumeFilter(SEMLikeCommandLine): - """title: Partial Volume Filter - -category: Developer Tools - -description: Filters an image for regions of partial voluming assuming a ridge-like model of intensity. + """Partial Volume Filter. -version: 2.0.RC + Filters an image for regions of partial voluming assuming a ridge-like model of intensity. -""" + """ input_spec = JistBrainPartialVolumeFilterInputSpec output_spec = JistBrainPartialVolumeFilterOutputSpec @@ -1575,15 +1481,7 @@ class JistIntensityMp2rageMaskingOutputSpec(TraitedSpec): class JistIntensityMp2rageMasking(SEMLikeCommandLine): - """title: MP2RAGE Background Masking - -category: Developer Tools - -description: Estimate a background signal mask for a MP2RAGE dataset. - -version: 3.0.RC - -""" + """Estimate a background signal mask for a MP2RAGE dataset.""" input_spec = JistIntensityMp2rageMaskingInputSpec output_spec = JistIntensityMp2rageMaskingOutputSpec @@ -1628,17 +1526,11 @@ class MedicAlgorithmThresholdToBinaryMaskOutputSpec(TraitedSpec): class MedicAlgorithmThresholdToBinaryMask(SEMLikeCommandLine): - """title: Threshold to Binary Mask - -category: Developer Tools - -description: Given a volume and an intensity range create a binary mask for values within that range. - -version: 1.2.RC + """Threshold to Binary Mask. -documentation-url: http://www.iacl.ece.jhu.edu/ + Given a volume and an intensity range create a binary mask for values within that range. -""" + """ input_spec = MedicAlgorithmThresholdToBinaryMaskInputSpec output_spec = MedicAlgorithmThresholdToBinaryMaskOutputSpec diff --git a/nipype/interfaces/mne/__init__.py b/nipype/interfaces/mne/__init__.py index 8bf3db28ed..820780e54d 100644 --- a/nipype/interfaces/mne/__init__.py +++ b/nipype/interfaces/mne/__init__.py @@ -1,2 +1,3 @@ # -*- coding: utf-8 -*- +"""MNE is a software for exploring, visualizing, and analyzing human neurophysiological data.""" from .base import WatershedBEM diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index 917d576eda..3aafdc1db7 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +"""MRTrix version 2 (DEPRECATED) -- tools to perform various types of diffusion MRI analyses.""" from .tracking import ( Tracks2Prob, FilterTracks, diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index c922c4fba4..b7465cdbf2 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -324,7 +324,7 @@ class StreamlineTrackInputSpec(CommandLineInputSpec): argstr="-number %d", desc="Sets the desired number of tracks." "The program will continue to generate tracks until this number of tracks have been selected and written to the output file" - "(default is 100 for *_STREAM methods, 1000 for *_PROB methods).", + "(default is 100 for ``*_STREAM`` methods, 1000 for ``*_PROB`` methods).", ) maximum_number_of_tracks = traits.Int( argstr="-maxnum %d", diff --git a/nipype/interfaces/mrtrix3/__init__.py b/nipype/interfaces/mrtrix3/__init__.py index 0dbe3bb872..2970918844 100644 --- a/nipype/interfaces/mrtrix3/__init__.py +++ b/nipype/interfaces/mrtrix3/__init__.py @@ -1,7 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- - +"""MRTrix3 provides software tools to perform various types of diffusion MRI analyses.""" from .utils import ( Mesh2PVE, Generate5tt, diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index e71d9cd37a..301f5deeff 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -40,7 +40,8 @@ class TractographyInputSpec(MRTrix3BaseInputSpec): "Tensor_Prob", usedefault=True, argstr="-algorithm %s", - desc="tractography algorithm to be used", + desc="Tractography algorithm to be used -- References:" + "[FACT]_, [iFOD1]_, [iFOD2]_, [Nulldist]_, [Tensor_Det]_, [Tensor_Prob]_.", ) # ROIs processing options @@ -311,8 +312,10 @@ class TractographyOutputSpec(TraitedSpec): class Tractography(MRTrix3Base): """ - Performs streamlines tractography after selecting the appropriate - algorithm. + Performs streamlines tractography after selecting the appropriate algorithm. + + References + ---------- .. [FACT] Mori, S.; Crain, B. J.; Chacko, V. P. & van Zijl, P. C. M. Three-dimensional tracking of axonal projections in the @@ -340,7 +343,6 @@ class Tractography(MRTrix3Base): Tracking Using the Wild Bootstrap With Diffusion Tensor MRI. IEEE Transactions on Medical Imaging, 2008, 27, 1268-1274 - Example ------- diff --git a/nipype/interfaces/niftyfit/__init__.py b/nipype/interfaces/niftyfit/__init__.py index b9d4725496..d945991fa0 100644 --- a/nipype/interfaces/niftyfit/__init__.py +++ b/nipype/interfaces/niftyfit/__init__.py @@ -1,12 +1,12 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ -The niftyfit module provides classes for interfacing with the `NiftyFit`_ -command line tools. +NiftyFit is a software package for multi-parametric model-fitting of 4D MRI. -Top-level namespace for niftyfit. -""" +The niftyfit module provides classes for interfacing with the `NiftyFit +`__ command line tools. +""" from .asl import FitAsl from .dwi import FitDwi, DwiTool from .qt1 import FitQt1 diff --git a/nipype/interfaces/niftyreg/__init__.py b/nipype/interfaces/niftyreg/__init__.py index 2ea7b95b26..1bc01a9ad5 100644 --- a/nipype/interfaces/niftyreg/__init__.py +++ b/nipype/interfaces/niftyreg/__init__.py @@ -2,12 +2,12 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ +NiftyReg is an open-source software for efficient medical image registration. + The niftyreg module provides classes for interfacing with the `NiftyReg `_ command line tools. -Top-level namespace for niftyreg. """ - from .base import get_custom_path from .reg import RegAladin, RegF3D from .regutils import ( diff --git a/nipype/interfaces/niftyseg/maths.py b/nipype/interfaces/niftyseg/maths.py index 0afea087a0..80ceca1da1 100644 --- a/nipype/interfaces/niftyseg/maths.py +++ b/nipype/interfaces/niftyseg/maths.py @@ -6,9 +6,6 @@ The maths module provides higher-level interfaces to some of the operations that can be performed with the niftysegmaths (seg_maths) command-line program. -Examples --------- -See the docstrings of the individual classes for examples. """ import os @@ -125,69 +122,43 @@ class UnaryMathsInput(MathsInput): argstr="-%s", position=4, mandatory=True, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * sqrt - Square root of the image). + * exp - Exponential root of the image. + * log - Log of the image. + * recip - Reciprocal (1/I) of the image. + * abs - Absolute value of the image. + * bin - Binarise the image. + * otsu - Otsu thresholding of the current image. + * lconcomp - Take the largest connected component + * concomp6 - Label the different connected components with a 6NN kernel + * concomp26 - Label the different connected components with a 26NN kernel + * fill - Fill holes in binary object (e.g. fill ventricle in brain mask). + * euc - Euclidean distance transform + * tpmax - Get the time point with the highest value (binarise 4D probabilities) + * tmean - Mean value of all time points. + * tmax - Max value of all time points. + * tmin - Mean value of all time points. + * splitlab - Split the integer labels into multiple timepoints + * removenan - Remove all NaNs and replace then with 0 + * isnan - Binary image equal to 1 if the value is NaN and 0 otherwise + * subsamp2 - Subsample the image by 2 using NN sampling (qform and sform scaled) + * scl - Reset scale and slope info. + * 4to5 - Flip the 4th and 5th dimension. + * range - Reset the image range to the min max. + +""") class UnaryMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any unary mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - sqrt - Square root of the image). - - exp - Exponential root of the image. - - log - Log of the image. - - recip - Reciprocal (1/I) of the image. - - abs - Absolute value of the image. - - bin - Binarise the image. - - otsu - Otsu thresholding of the current image. - - lconcomp - Take the largest connected component - - concomp6 - Label the different connected components with a 6NN kernel - - concomp26 - Label the different connected components with a 26NN kernel - - fill - Fill holes in binary object (e.g. fill ventricle in brain mask). - - euc - Euclidean distance trasnform - - tpmax - Get the time point with the highest value (binarise 4D \ -probabilities) - - tmean - Mean value of all time points. - - tmax - Max value of all time points. - - tmin - Mean value of all time points. - - splitlab - Split the integer labels into multiple timepoints - - removenan - Remove all NaNs and replace then with 0 - - isnan - Binary image equal to 1 if the value is NaN and 0 otherwise + """Unary mathematical operations. - subsamp2 - Subsample the image by 2 using NN sampling (qform and sform \ -scaled) - - scl - Reset scale and slope info. - - 4to5 - Flip the 4th and 5th dimension. - - range - Reset the image range to the min max. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -196,30 +167,35 @@ class UnaryMaths(MathsCommand): >>> unary = niftyseg.UnaryMaths() >>> unary.inputs.output_datatype = 'float' >>> unary.inputs.in_file = 'im1.nii' + >>> # Test sqrt operation >>> unary_sqrt = copy.deepcopy(unary) >>> unary_sqrt.inputs.operation = 'sqrt' >>> unary_sqrt.cmdline 'seg_maths im1.nii -sqrt -odt float im1_sqrt.nii' >>> unary_sqrt.run() # doctest: +SKIP + >>> # Test sqrt operation >>> unary_abs = copy.deepcopy(unary) >>> unary_abs.inputs.operation = 'abs' >>> unary_abs.cmdline 'seg_maths im1.nii -abs -odt float im1_abs.nii' >>> unary_abs.run() # doctest: +SKIP + >>> # Test bin operation >>> unary_bin = copy.deepcopy(unary) >>> unary_bin.inputs.operation = 'bin' >>> unary_bin.cmdline 'seg_maths im1.nii -bin -odt float im1_bin.nii' >>> unary_bin.run() # doctest: +SKIP + >>> # Test otsu operation >>> unary_otsu = copy.deepcopy(unary) >>> unary_otsu.inputs.operation = 'otsu' >>> unary_otsu.cmdline 'seg_maths im1.nii -otsu -odt float im1_otsu.nii' >>> unary_otsu.run() # doctest: +SKIP + >>> # Test isnan operation >>> unary_isnan = copy.deepcopy(unary) >>> unary_isnan.inputs.operation = 'isnan' @@ -257,7 +233,32 @@ class BinaryMathsInput(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", + desc="""\ +Operation to perform: + + * mul - - Multiply image value or by other image. + * div - - Divide image by or by other image. + * add - - Add image by or by other image. + * sub - - Subtract image by or by other image. + * pow - - Image to the power of . + * thr - - Threshold the image below . + * uthr - - Threshold image above . + * smo - - Gaussian smoothing by std (in voxels and up to 4-D). + * edge - - Calculate the edges of the image using a threshold . + * sobel3 - - Calculate the edges of all timepoints using a Sobel filter + with a 3x3x3 kernel and applying gaussian smoothing. + * sobel5 - - Calculate the edges of all timepoints using a Sobel filter + with a 5x5x5 kernel and applying gaussian smoothing. + * min - - Get the min per voxel between and . + * smol - - Gaussian smoothing of a 3D label image. + * geo - - Geodesic distance according to the speed function + * llsnorm - Linear LS normalisation between current and + * masknan - Assign everything outside the mask (mask==0) with NaNs + * hdr_copy - Copy header from working image to and save in . + * splitinter - Split interleaved slices in direction + into separate time points + +""", ) operand_file = File( @@ -291,61 +292,12 @@ class BinaryMathsInput(MathsInput): class BinaryMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any binary mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - mul - - Multiply image value or by other image. - - div - - Divide image by or by other image. - - add - - Add image by or by other image. - - sub - - Subtract image by or by other image. - - pow - - Image to the power of . - - thr - - Threshold the image below . - - uthr - - Threshold image above . - - smo - - Gaussian smoothing by std (in voxels and up to \ -4-D). - - edge - - Calculate the edges of the image using a threshold <\ -float>. + """Binary mathematical operations. - sobel3 - - Calculate the edges of all timepoints using a Sobel \ -filter with a 3x3x3 kernel and applying gaussian smoothing. - - sobel5 - - Calculate the edges of all timepoints using a Sobel \ -filter with a 5x5x5 kernel and applying gaussian smoothing. - - min - - Get the min per voxel between and . - - smol - - Gaussian smoothing of a 3D label image. - - geo - - Geodesic distance according to the speed function \ - - - llsnorm - Linear LS normalisation between current and \ - - - masknan - Assign everything outside the mask (mask==0) \ -with NaNs - - hdr_copy - Copy header from working image to and save in \ -. - - splitinter - Split interleaved slices in direction into \ -separate time points - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -354,6 +306,7 @@ class BinaryMaths(MathsCommand): >>> binary = niftyseg.BinaryMaths() >>> binary.inputs.in_file = 'im1.nii' >>> binary.inputs.output_datatype = 'float' + >>> # Test sub operation >>> binary_sub = copy.deepcopy(binary) >>> binary_sub.inputs.operation = 'sub' @@ -361,6 +314,7 @@ class BinaryMaths(MathsCommand): >>> binary_sub.cmdline 'seg_maths im1.nii -sub im2.nii -odt float im1_sub.nii' >>> binary_sub.run() # doctest: +SKIP + >>> # Test mul operation >>> binary_mul = copy.deepcopy(binary) >>> binary_mul.inputs.operation = 'mul' @@ -368,6 +322,7 @@ class BinaryMaths(MathsCommand): >>> binary_mul.cmdline 'seg_maths im1.nii -mul 2.00000000 -odt float im1_mul.nii' >>> binary_mul.run() # doctest: +SKIP + >>> # Test llsnorm operation >>> binary_llsnorm = copy.deepcopy(binary) >>> binary_llsnorm.inputs.operation = 'llsnorm' @@ -375,6 +330,7 @@ class BinaryMaths(MathsCommand): >>> binary_llsnorm.cmdline 'seg_maths im1.nii -llsnorm im2.nii -odt float im1_llsnorm.nii' >>> binary_llsnorm.run() # doctest: +SKIP + >>> # Test splitinter operation >>> binary_splitinter = copy.deepcopy(binary) >>> binary_splitinter.inputs.operation = 'splitinter' @@ -440,8 +396,17 @@ class BinaryMathsInputInteger(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * equal - - Get voxels equal to + * dil - - Dilate the image times (in voxels). + * ero - - Erode the image times (in voxels). + * tp - - Extract time point + * crop - - Crop voxels around each 3D volume. + * pad - - Pad voxels with NaN value around each 3D volume. + +""") operand_value = traits.Int( argstr="%d", @@ -452,28 +417,12 @@ class BinaryMathsInputInteger(MathsInput): class BinaryMathsInteger(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any integer mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: (requiring integer values) - - equal - - Get voxels equal to - - dil - - Dilate the image times (in voxels). + """Integer mathematical operations. - ero - - Erode the image times (in voxels). - - tp - - Extract time point - - crop - - Crop voxels around each 3D volume. - - pad - - Pad voxels with NaN value around each 3D volume. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -519,8 +468,14 @@ class TupleMathsInput(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * lncc Local CC between current img and on a kernel with + * lssd Local SSD between current img and on a kernel with + * lltsnorm Linear LTS normalisation assuming percent outliers + +""") operand_file1 = File( exists=True, @@ -552,25 +507,12 @@ class TupleMathsInput(MathsInput): class TupleMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any tuple mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: + """Mathematical operations on tuples. - lncc Local CC between current img and on a kernel \ -with - - lssd Local SSD between current img and on a kernel \ -with - - lltsnorm Linear LTS normalisation assuming \ -percent outliers - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -604,9 +546,9 @@ class TupleMaths(MathsCommand): >>> tuple_lltsnorm.inputs.operand_file1 = 'im2.nii' >>> tuple_lltsnorm.inputs.operand_value2 = 0.01 >>> tuple_lltsnorm.cmdline - 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float \ -im1_lltsnorm.nii' + 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float im1_lltsnorm.nii' >>> tuple_lltsnorm.run() # doctest: +SKIP + """ input_spec = TupleMathsInput @@ -616,27 +558,19 @@ class MergeInput(MathsInput): """Input Spec for seg_maths merge operation.""" dimension = traits.Int(mandatory=True, desc="Dimension to merge the images.") - - desc = "List of images to merge to the working image ." merge_files = traits.List( - File(exists=True), argstr="%s", mandatory=True, position=4, desc=desc + File(exists=True), argstr="%s", mandatory=True, position=4, + desc="List of images to merge to the working image ." ) class Merge(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. + """Merge image files. - Interface to use the merge operation that can be performed - - with the seg_maths command-line program. - - See below for this option:: - - merge Merge images and the working image in the \ - dimension - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- diff --git a/nipype/interfaces/niftyseg/stats.py b/nipype/interfaces/niftyseg/stats.py index 611f293b42..4d9e598ddf 100644 --- a/nipype/interfaces/niftyseg/stats.py +++ b/nipype/interfaces/niftyseg/stats.py @@ -101,59 +101,39 @@ class UnaryStatsInput(StatsInput): argstr="-%s", position=4, mandatory=True, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * r - The range of all voxels. + * R - The robust range (assuming 2% outliers on both sides) of all voxels + * a - Average of all voxels + * s - Standard deviation of all voxels + * v - Volume of all voxels above 0 (<# voxels> * ) + * vl - Volume of each integer label (<# voxels per label> x ) + * vp - Volume of all probabilsitic voxels (sum() x ) + * n - Count of all voxels above 0 (<# voxels>) + * np - Sum of all fuzzy voxels (sum()) + * e - Entropy of all voxels + * ne - Normalized entropy of all voxels + * x - Location (i j k x y z) of the smallest value in the image + * X - Location (i j k x y z) of the largest value in the image + * c - Location (i j k x y z) of the centre of mass of the object + * B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] + * xvox - Output the number of voxels in the x direction. + Replace x with y/z for other directions. + * xdim - Output the voxel dimention in the x direction. + Replace x with y/z for other directions. + +""") class UnaryStats(StatsCommand): - """ - Interface for executable seg_stats from NiftySeg platform. - - Interface to use any unary statistical operations that can be performed - - with the seg_stats command-line program. - - See below for those operations:: - - r - The range of all voxels. - - R - The robust range (assuming 2% outliers on both sides) of all voxels - - a - Average of all voxels - - s - Standard deviation of all voxels - - v - Volume of all voxels above 0 (<# voxels> * ) - - vl - Volume of each integer label (<# voxels per label> * \ -) - - vp - Volume of all probabilsitic voxels (sum() * ) - - n - Count of all voxels above 0 (<# voxels>) - - np - Sum of all fuzzy voxels (sum()) - - e - Entropy of all voxels - - ne - Normalized entropy of all voxels - - x - Location (i j k x y z) of the smallest value in the image - - X - Location (i j k x y z) of the largest value in the image - - c - Location (i j k x y z) of the centre of mass of the object - - B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] - - xvox - Output the number of voxels in the x direction. Replace x with \ -y/z for other directions. + """Unary statistical operations. - xdim - Output the voxel dimention in the x direction. Replace x with \ -y/z for other directions. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -161,18 +141,21 @@ class UnaryStats(StatsCommand): >>> from nipype.interfaces import niftyseg >>> unary = niftyseg.UnaryStats() >>> unary.inputs.in_file = 'im1.nii' + >>> # Test v operation >>> unary_v = copy.deepcopy(unary) >>> unary_v.inputs.operation = 'v' >>> unary_v.cmdline 'seg_stats im1.nii -v' >>> unary_v.run() # doctest: +SKIP + >>> # Test vl operation >>> unary_vl = copy.deepcopy(unary) >>> unary_vl.inputs.operation = 'vl' >>> unary_vl.cmdline 'seg_stats im1.nii -vl' >>> unary_vl.run() # doctest: +SKIP + >>> # Test x operation >>> unary_x = copy.deepcopy(unary) >>> unary_x.inputs.operation = 'x' @@ -202,8 +185,21 @@ class BinaryStatsInput(StatsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * p - - The th percentile of all voxels intensity (float=[0,100]) + * sa - - Average of all voxels + * ss - - Standard deviation of all voxels + * svp - - Volume of all probabilsitic voxels (sum() x ) + * al - - Average value in for each label in + * d - - Calculate the Dice score between all classes in and + * ncc - - Normalized cross correlation between and + * nmi - - Normalized Mutual Information between and + * Vl - - Volume of each integer label . Save to file. + * Nl - - Count of each label . Save to file. + +""") operand_file = File( exists=True, @@ -224,40 +220,12 @@ class BinaryStatsInput(StatsInput): class BinaryStats(StatsCommand): - """ - Interface for executable seg_stats from NiftySeg platform. - - Interface to use any binary statistical operations that can be performed - - with the seg_stats command-line program. - - See below for those operations:: - - p - - The th percentile of all voxels intensity \ -(float=[0,100]) + """Binary statistical operations. - sa - - Average of all voxels - - ss - - Standard deviation of all voxels - - svp - - Volume of all probabilsitic voxels (sum() * \ -) - - al - - Average value in for each label in - - d - - Calculate the Dice score between all classes in \ -and - - ncc - - Normalized cross correlation between and - - nmi - - Normalized Mutual Information between and - - Vl - - Volume of each integer label . Save to file. - - Nl - - Count of each label . Save to file. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 82da210fac..68f88b51e7 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Algorithms to compute statistics on :abbr:`fMRI (functional MRI)` -""" +"""Nilearn is a Python module for fast and easy statistical learning on NeuroImaging data.""" import os import numpy as np diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index 19d030b61a..72317edae0 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""NIPY is a python project for analysis of structural and functional neuroimaging data.""" from .model import FitGLM, EstimateContrast from .preprocess import ComputeMask, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nitime/__init__.py b/nipype/interfaces/nitime/__init__.py index f237859eb6..f3fc84079a 100644 --- a/nipype/interfaces/nitime/__init__.py +++ b/nipype/interfaces/nitime/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Nitime is a library for time-series analysis of data from neuroscience experiments.""" from .analysis import ( CoherenceAnalyzerInputSpec, CoherenceAnalyzerOutputSpec, diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 93787c1964..8abc0db75a 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -2,7 +2,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ - Interfaces to functionality from nitime for time-series analysis of fmri data - nitime.analysis.CoherenceAnalyzer: Coherence/y @@ -48,7 +47,7 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): # If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float( - desc=("The TR used to collect the data" "in your csv file ") + desc=("The TR used to collect the data in your csv file ") ) in_TS = traits.Any(desc="a nitime TimeSeries object") @@ -87,11 +86,11 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): ) output_csv_file = File( - desc="File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}" + desc="File to write outputs (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``" ) output_figure_file = File( - desc="File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,..." + desc="File to write output figures (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``. Possible formats: .png,.svg,.pdf,.jpg,..." ) figure_type = traits.Enum( @@ -109,19 +108,19 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_array = traits.Array( - desc=("The pairwise coherence values" "between the ROIs") + desc=("The pairwise coherence values between the ROIs") ) timedelay_array = traits.Array( - desc=("The pairwise time delays between the" "ROIs (in seconds)") + desc=("The pairwise time delays between the ROIs (in seconds)") ) coherence_csv = File( - desc=("A csv file containing the pairwise " "coherence values") + desc=("A csv file containing the pairwise coherence values") ) timedelay_csv = File( - desc=("A csv file containing the pairwise " "time delay values") + desc=("A csv file containing the pairwise time delay values") ) coherence_fig = File(desc=("Figure representing coherence values")) @@ -129,6 +128,7 @@ class CoherenceAnalyzerOutputSpec(TraitedSpec): class CoherenceAnalyzer(NitimeBaseInterface): + """Wraps nitime.analysis.CoherenceAnalyzer: Coherence/y""" input_spec = CoherenceAnalyzerInputSpec output_spec = CoherenceAnalyzerOutputSpec diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index c59b7f2777..311f770848 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""PETPVC is a toolbox for partial volume correction in positron emission tomography.""" import os from .base import ( @@ -48,7 +48,34 @@ class PETPVCInputSpec(CommandLineInputSpec): desc="Mask image file", exists=True, mandatory=True, argstr="-m %s" ) pvc = traits.Enum( - pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s" + pvc_methods, mandatory=True, argstr="-p %s", + desc="""\ +Desired PVC method: + + * Geometric transfer matrix -- ``GTM`` + * Labbe approach -- ``LABBE`` + * Richardson-Lucy -- ``RL`` + * Van-Cittert -- ``VC`` + * Region-based voxel-wise correction -- ``RBV`` + * RBV with Labbe -- ``LABBE+RBV`` + * RBV with Van-Cittert -- ``RBV+VC`` + * RBV with Richardson-Lucy -- ``RBV+RL`` + * RBV with Labbe and Van-Cittert -- ``LABBE+RBV+VC`` + * RBV with Labbe and Richardson-Lucy -- ``LABBE+RBV+RL`` + * Multi-target correction -- ``MTC`` + * MTC with Labbe -- ``LABBE+MTC`` + * MTC with Van-Cittert -- ``MTC+VC`` + * MTC with Richardson-Lucy -- ``MTC+RL`` + * MTC with Labbe and Van-Cittert -- ``LABBE+MTC+VC`` + * MTC with Labbe and Richardson-Lucy -- ``LABBE+MTC+RL`` + * Iterative Yang -- ``IY`` + * Iterative Yang with Van-Cittert -- ``IY+VC`` + * Iterative Yang with Richardson-Lucy -- ``IY+RL`` + * Muller Gartner -- ``MG`` + * Muller Gartner with Van-Cittert -- ``MG+VC`` + * Muller Gartner with Richardson-Lucy -- ``MG+RL`` + +""" ) fwhm_x = traits.Float( desc="The full-width at half maximum in mm along x-axis", @@ -93,75 +120,11 @@ class PETPVCOutputSpec(TraitedSpec): class PETPVC(CommandLine): - """ Use PETPVC for partial volume correction of PET images. + """Use PETPVC for partial volume correction of PET images. - PETPVC is a software from the Nuclear Medicine Department + PETPVC ([1]_, [2]_) is a software from the Nuclear Medicine Department of the UCL University Hospital, London, UK. - Its source code is here: https://github.com/UCL/PETPVC - - The methods that it implement are explained here: - K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, - "A review of partial volume correction techniques for emission tomography - and their applications in neurology, cardiology and oncology," Phys. Med. - Biol., vol. 57, no. 21, p. R119, 2012. - - Its command line help shows this: - - -i --input < filename > - = PET image file - -o --output < filename > - = Output file - [ -m --mask < filename > ] - = Mask image file - -p --pvc < keyword > - = Desired PVC method - -x < X > - = The full-width at half maximum in mm along x-axis - -y < Y > - = The full-width at half maximum in mm along y-axis - -z < Z > - = The full-width at half maximum in mm along z-axis - [ -d --debug ] - = Prints debug information - [ -n --iter [ Val ] ] - = Number of iterations - With: Val (Default = 10) - [ -k [ Val ] ] - = Number of deconvolution iterations - With: Val (Default = 10) - [ -a --alpha [ aval ] ] - = Alpha value - With: aval (Default = 1.5) - [ -s --stop [ stopval ] ] - = Stopping criterion - With: stopval (Default = 0.01) - - Technique - keyword - ------------------- - - Geometric transfer matrix - "GTM" - - Labbe approach - "LABBE" - - Richardson-Lucy - "RL" - - Van-Cittert - "VC" - - Region-based voxel-wise correction - "RBV" - - RBV with Labbe - "LABBE+RBV" - - RBV with Van-Cittert - "RBV+VC" - - RBV with Richardson-Lucy - "RBV+RL" - - RBV with Labbe and Van-Cittert - "LABBE+RBV+VC" - - RBV with Labbe and Richardson-Lucy- "LABBE+RBV+RL" - - Multi-target correction - "MTC" - - MTC with Labbe - "LABBE+MTC" - - MTC with Van-Cittert - "MTC+VC" - - MTC with Richardson-Lucy - "MTC+RL" - - MTC with Labbe and Van-Cittert - "LABBE+MTC+VC" - - MTC with Labbe and Richardson-Lucy- "LABBE+MTC+RL" - - Iterative Yang - "IY" - - Iterative Yang with Van-Cittert - "IY+VC" - - Iterative Yang with Richardson-Lucy - "IY+RL" - - Muller Gartner - "MG" - - Muller Gartner with Van-Cittert - "MG+VC" - - Muller Gartner with Richardson-Lucy - "MG+RL" - Examples -------- >>> from ..testing import example_data @@ -175,6 +138,15 @@ class PETPVC(CommandLine): >>> pvc.inputs.fwhm_y = 2.0 >>> pvc.inputs.fwhm_z = 2.0 >>> outs = pvc.run() #doctest: +SKIP + + References + ---------- + .. [1] K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, + "A review of partial volume correction techniques for emission tomography + and their applications in neurology, cardiology and oncology," Phys. Med. + Biol., vol. 57, no. 21, p. R119, 2012. + .. [2] https://github.com/UCL/PETPVC + """ input_spec = PETPVCInputSpec diff --git a/nipype/interfaces/quickshear.py b/nipype/interfaces/quickshear.py index 7725abfeb5..b7409fdbf3 100644 --- a/nipype/interfaces/quickshear.py +++ b/nipype/interfaces/quickshear.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -""" Quickshear is a simple geometric defacing algorithm -""" +"""Quickshear is a simple geometric defacing algorithm.""" from .base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ..external.due import BibTeX diff --git a/nipype/interfaces/semtools/__init__.py b/nipype/interfaces/semtools/__init__.py index 243e3a43a0..a09c926c37 100644 --- a/nipype/interfaces/semtools/__init__.py +++ b/nipype/interfaces/semtools/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""SEM Tools are useful tools for Structural Equation Modeling.""" from .diffusion import * from .featurecreator import GenerateCsfClippedFromClassifiedImage from .segmentation import * diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index bef4698d03..91c56b131f 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -1,4 +1,10 @@ # -*- coding: utf-8 -*- +""" +3D Slicer is a platform for medical image informatics processing and visualization. + +For an EXPERIMENTAL implementation of an interface for the ``3dSlicer`` full framework, +please check `"dynamic" Slicer `__. +""" from .diffusion import * from .segmentation import * from .filtering import * diff --git a/nipype/interfaces/spm/__init__.py b/nipype/interfaces/spm/__init__.py index 0d5c91abfb..fcb6926eb6 100644 --- a/nipype/interfaces/spm/__init__.py +++ b/nipype/interfaces/spm/__init__.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for spm.""" - +"""SPM is a software package for the analysis of brain imaging data sequences.""" from .base import Info, SPMCommand, logger, no_spm, scans_for_fname, scans_for_fnames from .preprocess import ( FieldMap, diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index c2f50f56fc..e230ceb9bc 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -45,52 +45,49 @@ class Level1DesignInputSpec(SPMCommandInputSpec): field="timing.RT", desc="Interscan interval in secs", mandatory=True ) microtime_resolution = traits.Int( - field="timing.fmri_t", desc=("Number of time-bins per scan " "in secs (opt)") + field="timing.fmri_t", desc=("Number of time-bins per scan in secs (opt)") ) microtime_onset = traits.Float( field="timing.fmri_t0", - desc=("The onset/time-bin in seconds for " "alignment (opt)"), + desc=("The onset/time-bin in seconds for alignment (opt)"), ) session_info = traits.Any( field="sess", - desc=("Session specific information generated " "by ``modelgen.SpecifyModel``"), + desc=("Session specific information generated by ``modelgen.SpecifyModel``"), mandatory=True, ) factor_info = traits.List( traits.Dict(traits.Enum("name", "levels")), field="fact", - desc=("Factor specific information " "file (opt)"), + desc=("Factor specific information file (opt)"), ) bases = traits.Dict( traits.Enum("hrf", "fourier", "fourier_han", "gamma", "fir"), field="bases", - desc=""" - dict {'name':{'basesparam1':val,...}} - name : string - Name of basis function (hrf, fourier, fourier_han, - gamma, fir) - - hrf : - derivs : 2-element list - Model HRF Derivatives. No derivatives: [0,0], - Time derivatives : [1,0], Time and Dispersion - derivatives: [1,1] - fourier, fourier_han, gamma, fir: - length : int - Post-stimulus window length (in seconds) - order : int - Number of basis functions + desc="""\ +Dictionary names of the basis function to parameters: + + * hrf + + * derivs -- (2-element list) Model HRF Derivatives. No derivatives: [0,0], + Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] + + * fourier, fourier_han, gamma, or fir: + + * length -- (int) Post-stimulus window length (in seconds) + * order -- (int) Number of basis functions + """, mandatory=True, ) volterra_expansion_order = traits.Enum( - 1, 2, field="volt", desc=("Model interactions - " "yes:1, no:2") + 1, 2, field="volt", desc=("Model interactions - yes:1, no:2") ) global_intensity_normalization = traits.Enum( "none", "scaling", field="global", - desc=("Global intensity " "normalization - " "scaling or none"), + desc=("Global intensity normalization - scaling or none"), ) mask_image = File( exists=True, field="mask", desc="Image for explicitly masking the analysis" @@ -192,7 +189,7 @@ def _make_matlab_command(self, content): postscript += ( "SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % self.inputs.mask_threshold ) - postscript += "SPM.xM.xs = struct('Masking', " "'explicit masking only');\n" + postscript += "SPM.xM.xs = struct('Masking', 'explicit masking only');\n" postscript += "save SPM SPM;\n" else: postscript = None @@ -220,7 +217,7 @@ class EstimateModelInputSpec(SPMCommandInputSpec): field="method", mandatory=True, desc=( - "Dictionary of either Classical: 1, Bayesian: 1, " "or Bayesian2: 1 (dict)" + "Dictionary of either Classical: 1, Bayesian: 1, or Bayesian2: 1 (dict)" ), ) write_residuals = traits.Bool( @@ -388,7 +385,7 @@ class EstimateContrastInputSpec(SPMCommandInputSpec): ) beta_images = InputMultiPath( File(exists=True), - desc=("Parameter estimates of the " "design matrix"), + desc=("Parameter estimates of the design matrix"), copyfile=False, mandatory=True, ) @@ -474,7 +471,7 @@ def _make_matlab_command(self, _): script += "condnames=names;\n" else: if self.inputs.use_derivs: - script += r"pat = 'Sn\([0-9]*\) (.*)';" "\n" + script += r"pat = 'Sn\([0-9]*\) (.*)';\n" else: script += ( r"pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " @@ -483,7 +480,7 @@ def _make_matlab_command(self, _): ) script += "t = regexp(names,pat,'tokens');\n" # get sessidx for columns - script += r"pat1 = 'Sn\(([0-9].*)\)\s.*';" "\n" + script += r"pat1 = 'Sn\(([0-9].*)\)\s.*';\n" script += "t1 = regexp(names,pat1,'tokens');\n" script += ( "for i0=1:numel(t),condnames{i0}='';condsess(i0)=0;if " @@ -506,7 +503,7 @@ def _make_matlab_command(self, _): for sno, sw in enumerate(contrast.sessions): script += "sidx = find(condsess(idx)==%d);\n" % (sno + 1) script += ( - "consess{%d}.tcon.convec(idx(sidx)) " "= %f;\n" + "consess{%d}.tcon.convec(idx(sidx)) = %f;\n" ) % (i + 1, sw * contrast.weights[c0]) else: script += "consess{%d}.tcon.convec(idx) = %f;\n" % ( @@ -526,7 +523,7 @@ def _make_matlab_command(self, _): "to the F contrasts" ) script += ( - "consess{%d}.fcon.convec{%d} = " "consess{%d}.tcon.convec;\n" + "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;\n" ) % (i + 1, cl0 + 1, tidx + 1) script += "jobs{1}.stats{1}.con.consess = consess;\n" script += ( @@ -588,23 +585,23 @@ class ThresholdInputSpec(SPMCommandInputSpec): use_topo_fdr = traits.Bool( True, usedefault=True, - desc=("whether to use FDR over cluster extent " "probabilities"), + desc=("whether to use FDR over cluster extent probabilities"), ) height_threshold = traits.Float( 0.05, usedefault=True, - desc=("value for initial thresholding " "(defining clusters)"), + desc=("value for initial thresholding (defining clusters)"), ) height_threshold_type = traits.Enum( "p-value", "stat", usedefault=True, - desc=("Is the cluster forming " "threshold a stat value or " "p-value?"), + desc=("Is the cluster forming threshold a stat value or p-value?"), ) extent_fdr_p_threshold = traits.Float( 0.05, usedefault=True, - desc=("p threshold on FDR corrected " "cluster size probabilities"), + desc=("p threshold on FDR corrected cluster size probabilities"), ) extent_threshold = traits.Int( 0, usedefault=True, desc="Minimum cluster size in voxels" @@ -825,7 +822,7 @@ class ThresholdStatisticsInputSpec(SPMCommandInputSpec): mandatory=True, desc="which contrast in the SPM.mat to use" ) height_threshold = traits.Float( - desc=("stat value for initial " "thresholding (defining clusters)"), + desc=("stat value for initial thresholding (defining clusters)"), mandatory=True, ) extent_threshold = traits.Int( @@ -946,7 +943,7 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): key_trait=traits.Enum("vector", "name", "interaction", "centering") ), field="cov", - desc=("covariate dictionary {vector, name, " "interaction, centering}"), + desc=("covariate dictionary {vector, name, interaction, centering}"), ) threshold_mask_none = traits.Bool( field="masking.tm.tm_none", @@ -961,10 +958,10 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): threshold_mask_relative = traits.Float( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], - desc=("threshold using a " "proportion of the global " "value"), + desc=("threshold using a proportion of the global value"), ) use_implicit_threshold = traits.Bool( - field="masking.im", desc=("use implicit mask NaNs or " "zeros to threshold") + field="masking.im", desc=("use implicit mask NaNs or zeros to threshold") ) explicit_mask_file = File( field="masking.em", # requires cell @@ -987,14 +984,14 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): desc="omit global calculation", ) no_grand_mean_scaling = traits.Bool( - field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean " "scaling") + field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean scaling") ) global_normalization = traits.Enum( 1, 2, 3, field="globalm.glonorm", - desc=("global normalization None-1, " "Proportional-2, ANCOVA-3"), + desc=("global normalization None-1, Proportional-2, ANCOVA-3"), ) @@ -1099,11 +1096,11 @@ class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): desc="Group 2 input files", ) dependent = traits.Bool( - field="des.t2.dept", desc=("Are the measurements dependent between " "levels") + field="des.t2.dept", desc=("Are the measurements dependent between levels") ) unequal_variance = traits.Bool( field="des.t2.variance", - desc=("Are the variances equal or unequal " "between groups"), + desc=("Are the variances equal or unequal between groups"), ) @@ -1183,7 +1180,7 @@ class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): user_covariates = InputMultiPath( traits.Dict(key_trait=traits.Enum("vector", "name", "centering")), field="des.mreg.mcov", - desc=("covariate dictionary {vector, " "name, centering}"), + desc=("covariate dictionary {vector, name, centering}"), ) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index b6a705fa8e..528fbc282d 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -205,9 +205,10 @@ class FieldMap(SPMCommand): http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=173 - To do - ----- - Deal with real/imag magnitude images and with the two phase files case. + .. important:: + + This interface does not deal with real/imag magnitude images neither + with the two phase files case. Examples -------- @@ -266,18 +267,18 @@ class SliceTimingInputSpec(SPMCommandInputSpec): ) time_repetition = traits.Float( field="tr", - desc=("time between volume acquisitions" "(start to start time)"), + desc=("time between volume acquisitions (start to start time)"), mandatory=True, ) time_acquisition = traits.Float( field="ta", - desc=("time of volume acquisition. usually" "calculated as TR-(TR/num_slices)"), + desc=("time of volume acquisition. usually calculated as TR-(TR/num_slices)"), mandatory=True, ) slice_order = traits.List( traits.Float(), field="so", - desc=("1-based order or onset (in ms) in which " "slices are acquired"), + desc=("1-based order or onset (in ms) in which slices are acquired"), mandatory=True, ) ref_slice = traits.Int( @@ -378,7 +379,7 @@ class RealignInputSpec(SPMCommandInputSpec): ) register_to_mean = traits.Bool( field="eoptions.rtm", - desc=("Indicate whether realignment is " "done to the mean image"), + desc=("Indicate whether realignment is done to the mean image"), ) weight_img = File( exists=True, field="eoptions.weight", desc="filename of weighting image" @@ -408,14 +409,14 @@ class RealignInputSpec(SPMCommandInputSpec): low=0, high=7, field="roptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="roptions.wrap", - desc=("Check if interpolation should wrap in " "[x,y,z]"), + desc=("Check if interpolation should wrap in [x,y,z]"), ) write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( @@ -448,7 +449,7 @@ class RealignOutputSpec(TraitedSpec): ), ) realignment_parameters = OutputMultiPath( - File(exists=True), desc=("Estimated translation and " "rotation parameters") + File(exists=True), desc=("Estimated translation and rotation parameters") ) @@ -872,14 +873,14 @@ class CoregisterInputSpec(SPMCommandInputSpec): low=0, high=7, field="roptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="roptions.wrap", - desc=("Check if interpolation should wrap in " "[x,y,z]"), + desc=("Check if interpolation should wrap in [x,y,z]"), ) write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( @@ -1025,7 +1026,7 @@ class NormalizeInputSpec(SPMCommandInputSpec): field="eoptions.cutoff", desc="Cutoff of for DCT bases" ) nonlinear_iterations = traits.Int( - field="eoptions.nits", desc=("Number of iterations of " "nonlinear warping") + field="eoptions.nits", desc=("Number of iterations of nonlinear warping") ) nonlinear_regularization = traits.Float( field="eoptions.reg", @@ -1053,12 +1054,12 @@ class NormalizeInputSpec(SPMCommandInputSpec): low=0, high=7, field="roptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), field="roptions.wrap", - desc=("Check if interpolation should wrap in " "[x,y,z] - list of bools"), + desc=("Check if interpolation should wrap in [x,y,z] - list of bools"), ) out_prefix = traits.String( "w", field="roptions.prefix", usedefault=True, desc="normalized output prefix" @@ -1068,7 +1069,7 @@ class NormalizeInputSpec(SPMCommandInputSpec): class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath( File(exists=True), - desc=("MAT files containing " "the normalization " "parameters"), + desc=("MAT files containing the normalization parameters"), ) normalized_source = OutputMultiPath( File(exists=True), desc="Normalized source files" @@ -1175,7 +1176,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): image_to_align = ImageFileSPM( exists=True, field="subj.vol", - desc=("file to estimate normalization parameters " "with"), + desc=("file to estimate normalization parameters with"), xor=["deformation_file"], mandatory=True, copyfile=True, @@ -1235,7 +1236,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): tpm = File( exists=True, field="eoptions.tpm", - desc=("template in form of tissue probablitiy maps to " "normalize to"), + desc=("template in form of tissue probablitiy maps to normalize to"), xor=["deformation_file"], copyfile=False, ) @@ -1247,15 +1248,15 @@ class Normalize12InputSpec(SPMCommandInputSpec): field="eoptions.reg", minlen=5, maxlen=5, - desc=("controls balance between " "parameters and data"), + desc=("controls balance between parameters and data"), ) smoothness = traits.Float( field="eoptions.fwhm", - desc=("value (in mm) to smooth the data before " "normalization"), + desc=("value (in mm) to smooth the data before normalization"), ) sampling_distance = traits.Float( field="eoptions.samp", - desc=("Sampling distance on data for " "parameter estimation"), + desc=("Sampling distance on data for parameter estimation"), ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), @@ -1283,7 +1284,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): low=0, high=7, field="woptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) out_prefix = traits.String( "w", field="woptions.prefix", usedefault=True, desc="Normalized output prefix" @@ -1301,7 +1302,7 @@ class Normalize12OutputSpec(TraitedSpec): ), ) normalized_image = OutputMultiPath( - File(exists=True), desc=("Normalized file that needed to " "be aligned") + File(exists=True), desc=("Normalized file that needed to be aligned") ) normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") @@ -1456,24 +1457,24 @@ class SegmentInputSpec(SPMCommandInputSpec): Modulated + Unmodulated Normalised: [True,True,False]""", ) save_bias_corrected = traits.Bool( - field="output.biascor", desc=("True/False produce a bias " "corrected image") + field="output.biascor", desc=("True/False produce a bias corrected image") ) clean_masks = traits.Enum( "no", "light", "thorough", field="output.cleanup", - desc=("clean using estimated brain mask " "('no','light','thorough')"), + desc=("clean using estimated brain mask ('no','light','thorough')"), ) tissue_prob_maps = traits.List( File(exists=True), field="opts.tpm", - desc=("list of gray, white & csf prob. " "(opt,)"), + desc=("list of gray, white & csf prob. (opt,)"), ) gaussians_per_class = traits.List( traits.Int(), field="opts.ngaus", - desc=("num Gaussians capture intensity " "distribution"), + desc=("num Gaussians capture intensity distribution"), ) affine_regularization = traits.Enum( "mni", @@ -1490,7 +1491,7 @@ class SegmentInputSpec(SPMCommandInputSpec): ), ) warping_regularization = traits.Float( - field="opts.warpreg", desc=("Controls balance between " "parameters and data") + field="opts.warpreg", desc=("Controls balance between parameters and data") ) warp_frequency_cutoff = traits.Float( field="opts.warpco", desc="Cutoff of DCT bases" @@ -1525,7 +1526,7 @@ class SegmentInputSpec(SPMCommandInputSpec): ) sampling_distance = traits.Float( field="opts.samp", - desc=("Sampling distance on data for " "parameter estimation"), + desc=("Sampling distance on data for parameter estimation"), ) mask_image = File( exists=True, @@ -1537,13 +1538,13 @@ class SegmentInputSpec(SPMCommandInputSpec): class SegmentOutputSpec(TraitedSpec): native_gm_image = File(desc="native space grey probability map") normalized_gm_image = File(desc="normalized grey probability map",) - modulated_gm_image = File(desc=("modulated, normalized grey " "probability map")) + modulated_gm_image = File(desc=("modulated, normalized grey probability map")) native_wm_image = File(desc="native space white probability map") normalized_wm_image = File(desc="normalized white probability map") - modulated_wm_image = File(desc=("modulated, normalized white " "probability map")) + modulated_wm_image = File(desc=("modulated, normalized white probability map")) native_csf_image = File(desc="native space csf probability map") normalized_csf_image = File(desc="normalized csf probability map") - modulated_csf_image = File(desc=("modulated, normalized csf " "probability map")) + modulated_csf_image = File(desc=("modulated, normalized csf probability map")) modulated_input_image = File( deprecated="0.10", new_name="bias_corrected_image", @@ -1682,14 +1683,14 @@ class NewSegmentInputSpec(SPMCommandInputSpec): ) sampling_distance = traits.Float( field="warp.samp", - desc=("Sampling distance on data for " "parameter estimation"), + desc=("Sampling distance on data for parameter estimation"), ) write_deformation_fields = traits.List( traits.Bool(), minlen=2, maxlen=2, field="warp.write", - desc=("Which deformation fields to " "write:[Inverse, Forward]"), + desc=("Which deformation fields to write:[Inverse, Forward]"), ) @@ -1704,7 +1705,7 @@ class NewSegmentOutputSpec(TraitedSpec): traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( - traits.List(File(exists=True)), desc=("modulated+normalized class " "images") + traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" @@ -1883,7 +1884,7 @@ class SmoothInputSpec(SPMCommandInputSpec): ) data_type = traits.Int(field="dtype", desc="Data type of the output images") implicit_masking = traits.Bool( - field="im", desc=("A mask implied by a particular" "voxel value") + field="im", desc=("A mask implied by a particular voxel value") ) out_prefix = traits.String( "s", field="prefix", usedefault=True, desc="smoothed output prefix" @@ -1957,7 +1958,7 @@ class DARTELInputSpec(SPMCommandInputSpec): "Membrane", "Bending", field="warp.settings.rform", - desc=("Form of regularization energy " "term"), + desc=("Form of regularization energy term"), ) iteration_parameters = traits.List( traits.Tuple( @@ -1969,31 +1970,36 @@ class DARTELInputSpec(SPMCommandInputSpec): minlen=3, maxlen=12, field="warp.settings.param", - desc="""List of tuples for each iteration - - Inner iterations - - Regularization parameters - - Time points for deformation model - - smoothing parameter - """, + desc="""\ +List of tuples for each iteration + + * Inner iterations + * Regularization parameters + * Time points for deformation model + * smoothing parameter + +""", ) optimization_parameters = traits.Tuple( traits.Float, traits.Range(1, 8), traits.Range(1, 8), field="warp.settings.optim", - desc=""" - Optimization settings a tuple - - LM regularization - - cycles of multigrid solver - - relaxation iterations - """, + desc="""\ +Optimization settings a tuple: + + * LM regularization + * cycles of multigrid solver + * relaxation iterations + +""", ) class DARTELOutputSpec(TraitedSpec): final_template_file = File(exists=True, desc="final DARTEL template") template_files = traits.List( - File(exists=True), desc=("Templates from different stages of " "iteration") + File(exists=True), desc=("Templates from different stages of iteration") ) dartel_flow_fields = traits.List(File(exists=True), desc="DARTEL flow fields") @@ -2104,7 +2110,7 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): ) modulate = traits.Bool( field="mni_norm.preserve", - desc=("Modulate out images - no modulation " "preserves concentrations"), + desc=("Modulate out images - no modulation preserves concentrations"), ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), @@ -2119,7 +2125,7 @@ class DARTELNorm2MNIOutputSpec(TraitedSpec): File(exists=True), desc="Normalized files in MNI space" ) normalization_parameter_file = File( - exists=True, desc=("Transform parameters to MNI " "space") + exists=True, desc=("Transform parameters to MNI space") ) @@ -2203,7 +2209,7 @@ class CreateWarpedInputSpec(SPMCommandInputSpec): iterations = traits.Range( low=0, high=9, - desc=("The number of iterations: log2(number of " "time steps)"), + desc=("The number of iterations: log2(number of time steps)"), field="crt_warped.K", ) interp = traits.Range( @@ -2491,7 +2497,7 @@ class VBMSegmentOuputSpec(TraitedSpec): traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( - traits.List(File(exists=True)), desc=("modulated+normalized class " "images") + traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index c44c4678d3..928ff19fc2 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +"""VistaSoft contains Matlab code to perform a variety of analysis on MRI data.""" from .vista import Vnifti2Image, VtoMat diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index 4bb941c7f9..12823b10cc 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -31,12 +31,12 @@ class Vnifti2Image(CommandLine): Example ------- - >>> vimage = Vnifti2Image() >>> vimage.inputs.in_file = 'image.nii' >>> vimage.cmdline 'vnifti2image -in image.nii -out image.v' - >>> vimage.run() # doctest: +SKIP + >>> vimage.run() # doctest: +SKIP + """ _cmd = "vnifti2image" @@ -69,12 +69,12 @@ class VtoMat(CommandLine): Example ------- - >>> vimage = VtoMat() >>> vimage.inputs.in_file = 'image.v' >>> vimage.cmdline 'vtomat -in image.v -out image.mat' - >>> vimage.run() # doctest: +SKIP + >>> vimage.run() # doctest: +SKIP + """ _cmd = "vtomat" diff --git a/nipype/interfaces/workbench/__init__.py b/nipype/interfaces/workbench/__init__.py index 1de46f8953..fb68624c88 100644 --- a/nipype/interfaces/workbench/__init__.py +++ b/nipype/interfaces/workbench/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Connectome Workbench is a visualization for neuroimaging data, esp. derived from HCP data.""" from .metric import MetricResample from .cifti import CiftiSmooth diff --git a/nipype/interfaces/workbench/base.py b/nipype/interfaces/workbench/base.py index 82c12420b9..d91c85d9f6 100644 --- a/nipype/interfaces/workbench/base.py +++ b/nipype/interfaces/workbench/base.py @@ -5,9 +5,9 @@ The workbench module provides classes for interfacing with `connectome workbench `_ tools. -`Connectome Workbench is an open source, freely available visualization and - discovery tool used to map neuroimaging data, especially data generated by the - Human Connectome Project. +Connectome Workbench is an open source, freely available visualization and +discovery tool used to map neuroimaging data, especially data generated by the +Human Connectome Project. """ import os @@ -21,9 +21,7 @@ class Info(PackageInfo): - """ - Handle `wb_command` version information. - """ + """Handle Connectome Workbench version information.""" version_cmd = "wb_command -version" diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py new file mode 100644 index 0000000000..a7b45241d3 --- /dev/null +++ b/nipype/sphinxext/apidoc/__init__.py @@ -0,0 +1,188 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Settings for sphinxext.interfaces and connection to sphinx-apidoc.""" +import re +from sphinxcontrib.napoleon import ( + Config as NapoleonConfig, + _patch_python_domain, + _skip_member as _napoleon_skip_member +) + +from ... import __version__ +from ...interfaces.base import BaseInterface, TraitedSpec +from .docstring import NipypeDocstring, InterfaceDocstring + + +class Config(NapoleonConfig): + r""" + Sphinx-nipype extension settings in ``conf.py``. + + Listed below are all the settings used by this extension + and their default values. + These settings can be changed in the Sphinx's ``conf.py`` file. + Make sure that ``nipype.sphinxext.interfaces`` is enabled + in ``conf.py``:: + + # conf.py + + # Add this extension to the corresponding list: + extensions = ['nipype.sphinxext.interfaces'] + + # NiPype settings + nipype_references = False + + Attributes + ---------- + nipype_skip_classes: :obj:`bool` (Defaults to True) + True to include referenced publications with the interface + (requires duecredit to be installed). + + """ + _config_values = { + 'nipype_skip_classes': ([ + "AFNI(Python)?Command", + "ANTS", + "FSLCommand", + "FS(Command|Script)", + "Info", + "^SPM", + "Tester", + "InputSpec", + "OutputSpec", + "Numpy", + "NipypeTester", + ], 'env'), + **NapoleonConfig._config_values + } + + +def setup(app): + # type: (Sphinx) -> Dict[unicode, Any] + """ + Sphinx extension setup function. + + When the extension is loaded, Sphinx imports this module and executes + the ``setup()`` function, which in turn notifies Sphinx of everything + the extension offers. + + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process + + See Also + -------- + `The Sphinx documentation on Extensions + `_ + `The Extension Tutorial `_ + `The Extension API `_ + + """ + from sphinx.application import Sphinx + if not isinstance(app, Sphinx): + # probably called by tests + return {'version': __version__, 'parallel_read_safe': True} + + _patch_python_domain() + + app.setup_extension('sphinx.ext.autodoc') + app.connect('autodoc-process-docstring', _process_docstring) + app.connect('autodoc-skip-member', _skip_member) + + for name, (default, rebuild) in Config._config_values.items(): + app.add_config_value(name, default, rebuild) + return {'version': __version__, 'parallel_read_safe': True} + + +def _process_docstring(app, what, name, obj, options, lines): + # type: (Sphinx, unicode, unicode, Any, Any, List[unicode]) -> None + """Process the docstring for a given python object. + Called when autodoc has read and processed a docstring. `lines` is a list + of docstring lines that `_process_docstring` modifies in place to change + what Sphinx outputs. + The following settings in conf.py control what styles of docstrings will + be parsed: + * ``napoleon_google_docstring`` -- parse Google style docstrings + * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process. + what : str + A string specifying the type of the object to which the docstring + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : str + The fully qualified name of the object. + obj : module, class, exception, function, method, or attribute + The object to which the docstring belongs. + options : sphinx.ext.autodoc.Options + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + lines : list of str + The lines of the docstring, see above. + .. note:: `lines` is modified *in place* + """ + result_lines = lines + # Parse Nipype Interfaces + if what == "class" and issubclass(obj, BaseInterface): + result_lines[:] = InterfaceDocstring( + result_lines, app.config, app, what, name, obj, options).lines() + + result_lines = NipypeDocstring(result_lines, app.config, app, what, name, + obj, options).lines() + lines[:] = result_lines[:] + + +def _skip_member(app, what, name, obj, skip, options): + # type: (Sphinx, unicode, unicode, Any, bool, Any) -> bool + """ + Determine if private and special class members are included in docs. + + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process + what : str + A string specifying the type of the object to which the member + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : str + The name of the member. + obj : module, class, exception, function, method, or attribute. + For example, if the member is the __init__ method of class A, then + `obj` will be `A.__init__`. + skip : bool + A boolean indicating if autodoc will skip this member if `_skip_member` + does not override the decision + options : sphinx.ext.autodoc.Options + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + Returns + ------- + bool + True if the member should be skipped during creation of the docs, + False if it should be included in the docs. + + """ + # Parse Nipype Interfaces + patterns = [pat if hasattr(pat, 'search') else re.compile(pat) + for pat in app.config.nipype_skip_classes] + isbase = False + try: + isbase = issubclass(obj, BaseInterface) + if issubclass(obj, TraitedSpec): + return True + except TypeError: + pass + + if isbase: + for pattern in patterns: + if pattern.search(name): + return True + + return _napoleon_skip_member(app, what, name, obj, skip, options) diff --git a/nipype/sphinxext/apidoc/docstring.py b/nipype/sphinxext/apidoc/docstring.py new file mode 100644 index 0000000000..f5191d5155 --- /dev/null +++ b/nipype/sphinxext/apidoc/docstring.py @@ -0,0 +1,159 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Reformat interface docstrings.""" +import re +from sphinxcontrib.napoleon._upstream import _ +from sphinxcontrib.napoleon.docstring import NumpyDocstring + + +class NipypeDocstring(NumpyDocstring): + """Patch the NumpyDocstring from napoleon to get special section headers.""" + + def _parse_parameters_section(self, section): + # type: (unicode) -> List[unicode] + labels = { + 'args': _('Parameters'), + 'arguments': _('Parameters'), + 'parameters': _('Parameters'), + } # type: Dict[unicode, unicode] + label = labels.get(section.lower(), section) + + fields = self._consume_fields() + if self._config.napoleon_use_param: + return self._format_docutils_params(fields) + + return self._format_fields(label, fields) + + +class InterfaceDocstring(NipypeDocstring): + """ + Convert docstrings of Nipype Interfaces to reStructuredText. + + Parameters + ---------- + docstring : :obj:`str` or :obj:`list` of :obj:`str` + The docstring to parse, given either as a string or split into + individual lines. + config: :obj:`sphinxcontrib.napoleon.Config` or :obj:`sphinx.config.Config` + The configuration settings to use. If not given, defaults to the + config object on `app`; or if `app` is not given defaults to the + a new :class:`nipype.sphinxext.apidoc.Config` object. + + Other Parameters + ---------------- + app : :class:`sphinx.application.Sphinx`, optional + Application object representing the Sphinx process. + what : :obj:`str`, optional + A string specifying the type of the object to which the docstring + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : :obj:`str`, optional + The fully qualified name of the object. + obj : module, class, exception, function, method, or attribute + The object to which the docstring belongs. + options : :class:`sphinx.ext.autodoc.Options`, optional + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + + """ + + _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" + r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) + + def __init__(self, docstring, config=None, app=None, what='', name='', + obj=None, options=None): + # type: (Union[unicode, List[unicode]], SphinxConfig, Sphinx, unicode, unicode, Any, Any) -> None # NOQA + super().__init__(docstring, config, app, what, name, obj, options) + + cmd = getattr(obj, '_cmd', '') + if cmd and cmd.strip(): + self._parsed_lines = [ + 'Wrapped executable: ``%s``.' % cmd.strip(), + ''] + self._parsed_lines + + if obj is not None: + self._parsed_lines += _parse_interface(obj) + + +def _parse_interface(obj): + """Print description for input parameters.""" + parsed = [] + if obj.input_spec: + inputs = obj.input_spec() + mandatory_items = sorted(inputs.traits(mandatory=True).items()) + if mandatory_items: + parsed += ["", "Mandatory Inputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in mandatory_items: + parsed += _parse_spec(inputs, name, spec) + + mandatory_keys = {item[0] for item in mandatory_items} + optional_items = sorted([ + (name, val) for name, val in inputs.traits(transient=None).items() + if name not in mandatory_keys + ]) + if optional_items: + parsed += ["", "Optional Inputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in optional_items: + parsed += _parse_spec(inputs, name, spec) + + if obj.output_spec: + outputs = sorted(obj.output_spec().traits(transient=None).items()) + if outputs: + parsed += ["", "Outputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in outputs: + parsed += _parse_spec(inputs, name, spec) + + return parsed + + +def _indent(lines, n=4): + # type: (List[unicode], int) -> List[unicode] + return [(' ' * n) + line for line in lines] + + +def _parse_spec(inputs, name, spec): + """Parse a HasTraits object into a Numpy-style docstring.""" + desc_lines = [] + if spec.desc: + desc = ''.join([spec.desc[0].capitalize(), spec.desc[1:]]) + if not desc.endswith('.') and not desc.endswith('\n'): + desc = '%s.' % desc + desc_lines += desc.splitlines() + + argstr = spec.argstr + if argstr and argstr.strip(): + pos = spec.position + if pos is None: + desc_lines += ["""Maps to a command-line argument: :code:`{arg}`.""".format( + arg=argstr.strip())] + else: + desc_lines += [ + """Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format( + arg=argstr.strip(), pos=pos)] + + xor = spec.xor + if xor: + desc_lines += ["Mutually **exclusive** with inputs: %s." % ", ".join( + ["``%s``" % x for x in xor])] + + requires = spec.requires + if requires: + desc_lines += ["**Requires** inputs: %s." % ", ".join( + ["``%s``" % x for x in requires])] + + if spec.usedefault: + default = spec.default_value()[1] + if isinstance(default, (bytes, str)) and not default: + default = '""' + + desc_lines += ["(Nipype **default** value: ``%s``)" % str(default)] + + out_rst = ["{name} : {type}".format(name=name, type=spec.full_info(inputs, name, None))] + out_rst += _indent(desc_lines, 4) + + return out_rst diff --git a/nipype/sphinxext/documenter.py b/nipype/sphinxext/documenter.py new file mode 100644 index 0000000000..9e15c57f49 --- /dev/null +++ b/nipype/sphinxext/documenter.py @@ -0,0 +1,72 @@ +"""sphinx autodoc ext.""" +from sphinx.locale import _ +from sphinx.ext import autodoc +from nipype.interfaces.base import BaseInterface +from .gh import get_url + +_ClassDocumenter = autodoc.ClassDocumenter +RST_CLASS_BLOCK = """ +.. index:: {name} + +.. _{module}.{name}: + +{name} +{underline} +`Link to code <{code_url}>`__ + +""" + + +class NipypeClassDocumenter(_ClassDocumenter): # type: ignore + priority = 20 + + def add_directive_header(self, sig: str) -> None: + if self.doc_as_attr: + self.directivetype = 'attribute' + + # Copied from super + domain = getattr(self, 'domain', 'py') + directive = getattr(self, 'directivetype', self.objtype) + name = self.format_name() + sourcename = self.get_sourcename() + + is_interface = False + try: + is_interface = issubclass(self.object, BaseInterface) + except TypeError: + pass + + if is_interface is True: + lines = RST_CLASS_BLOCK.format( + code_url=get_url(self.object), + module=self.modname, + name=name, + underline='=' * len(name), + ) + for line in lines.splitlines(): + self.add_line(line, sourcename) + else: + self.add_line('.. %s:%s:: %s%s' % (domain, directive, name, sig), + sourcename) + if self.options.noindex: + self.add_line(' :noindex:', sourcename) + if self.objpath: + # Be explicit about the module, this is necessary since .. class:: + # etc. don't support a prepended module name + self.add_line(' :module: %s' % self.modname, sourcename) + + # add inheritance info, if wanted + if not self.doc_as_attr and self.options.show_inheritance: + sourcename = self.get_sourcename() + self.add_line('', sourcename) + if hasattr(self.object, '__bases__') and len(self.object.__bases__): + bases = [':class:`%s`' % b.__name__ + if b.__module__ in ('__builtin__', 'builtins') + else ':class:`%s.%s`' % (b.__module__, b.__name__) + for b in self.object.__bases__] + self.add_line(' ' + _('Bases: %s') % ', '.join(bases), + sourcename) + + +def setup(app): + app.add_autodocumenter(NipypeClassDocumenter) diff --git a/nipype/sphinxext/gh.py b/nipype/sphinxext/gh.py new file mode 100644 index 0000000000..6658fda361 --- /dev/null +++ b/nipype/sphinxext/gh.py @@ -0,0 +1,32 @@ +"""Build a file URL.""" +import os +import inspect +import subprocess + +REVISION_CMD = 'git rev-parse --short HEAD' + + +def _get_git_revision(): + # Comes from scikit-learn + # https://github.com/scikit-learn/scikit-learn/blob/master/doc/sphinxext/github_link.py + try: + revision = subprocess.check_output(REVISION_CMD.split()).strip() + except (subprocess.CalledProcessError, OSError): + return None + return revision.decode('utf-8') + + +def get_url(obj): + """Return local or remote url for an object.""" + filename = inspect.getsourcefile(obj) + uri = "file://%s" % filename + revision = _get_git_revision() + if revision is not None: + shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) + uri = "http://github.com/nipy/nipype/blob/%s/%s" % ( + revision, + shortfile, + ) + lines, lstart = inspect.getsourcelines(obj) + lend = len(lines) + lstart + return '%s#L%d-L%d' % (uri, lstart, lend) diff --git a/rtd_requirements.txt b/rtd_requirements.txt deleted file mode 100644 index 8cb274347a..0000000000 --- a/rtd_requirements.txt +++ /dev/null @@ -1,20 +0,0 @@ -configparser -funcsigs -future>=0.16.0 -matplotlib -mock -networkx>=1.9 -nibabel>=2.1.0 -numpy>=1.9.0 -numpydoc -packaging -prov>=1.5.2 -neurdflib -psutil -pydot>=1.2.3 -pydotplus -pytest>=3.0 -python-dateutil>=2.2 -scipy>=0.14 -simplejson>=3.8.0 -traits>=4.6 diff --git a/tools/apigen.py b/tools/apigen.py deleted file mode 100644 index 19e47b5c20..0000000000 --- a/tools/apigen.py +++ /dev/null @@ -1,730 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Attempt to generate templates for module reference with Sphinx - -XXX - we exclude extension modules - -To include extension modules, first identify them as valid in the -``_uri2path`` method, then handle them in the ``_parse_module`` script. - -We get functions and classes by parsing the text of .py files. -Alternatively we could import the modules for discovery, and we'd have -to do that for extension modules. This would involve changing the -``_parse_module`` method to work via import and introspection, and -might involve changing ``discover_modules`` (which determines which -files are modules, and therefore which module URIs will be passed to -``_parse_module``). - -NOTE: this is a modified version of a script originally shipped with the -PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed -project. -""" -import os -import sys -import re -import tempfile -import warnings - -from nipype.interfaces.base import BaseInterface -from nipype.pipeline.engine import Workflow -from nipype.utils.misc import trim - -from github import get_file_url - -RST_SECTION_LEVELS = ("*", "=", "-", "~", "^") - -RST_CLASS_BLOCK = """ -.. _{uri}.{cls}: - -.. index:: {cls} - -{cls} -{underline} -`Link to code <{code_url}>`__ - -{body} -""" - -RST_FUNC_BLOCK = """ -.. _{uri}.{name}: - -:func:`{name}` -{underline} -`Link to code <{code_url}>`__ - -{body} - -""" - - -# Functions and classes -class ApiDocWriter(object): - """Write reST documents for API docs.""" - - # only separating first two levels - rst_section_levels = RST_SECTION_LEVELS - - def __init__( - self, - package_name, - rst_extension=".rst", - package_skip_patterns=(r"\.tests$",), - module_skip_patterns=(r"\.setup$", r"\._"), - ): - r""" - Initialize package for parsing. - - Parameters - ---------- - package_name : string - Name of the top-level package. *package_name* must be the - name of an importable package - rst_extension : string, optional - Extension for reST files, default '.rst' - package_skip_patterns : None or sequence of {strings, regexps} - Sequence of strings giving URIs of packages to be excluded - Operates on the package path, starting at (including) the - first dot in the package path, after *package_name* - so, - if *package_name* is ``sphinx``, then ``sphinx.util`` will - result in ``.util`` being passed for earching by these - regexps. If is None, gives default. Default is: - ``('\.tests$', )``. - module_skip_patterns : None or sequence - Sequence of strings giving URIs of modules to be excluded - Operates on the module name including preceding URI path, - back to the first dot after *package_name*. For example - ``sphinx.util.console`` results in the string to search of - ``.util.console`` - If is None, gives default. Default is: - ``('\.setup$', '\._')``. - - """ - self._skip_patterns = {} - self.rst_extension = rst_extension - self.package_name = package_name - self.package_skip_patterns = package_skip_patterns - self.module_skip_patterns = module_skip_patterns - - @property - def package_name(self): - """Get package name.""" - return self._package_name - - @package_name.setter - def package_name(self, name): - """ - Set package_name. - - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> docwriter.root_path == sphinx.__path__[0] - True - >>> docwriter.package_name = 'docutils' - >>> import docutils - >>> docwriter.root_path == docutils.__path__[0] - True - - """ - # It's also possible to imagine caching the module parsing here - self._package_name = name - self.root_module = __import__(name) - self.root_path = self.root_module.__path__[0] - self.written_modules = None - - @property - def package_skip_patterns(self): - """Get package skip patterns.""" - return self._skip_patterns['package'] - - @package_skip_patterns.setter - def package_skip_patterns(self, pattern): - self._skip_patterns['package'] = _parse_patterns(pattern) - - @property - def module_skip_patterns(self): - """Get module skip patterns.""" - return self._skip_patterns['module'] - - @module_skip_patterns.setter - def module_skip_patterns(self, pattern): - self._skip_patterns['module'] = _parse_patterns(pattern) - - def _get_object_name(self, line): - """ - Get second token in line. - - >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - u'func' - >>> docwriter._get_object_name(" class Klass(object): ") - 'Klass' - >>> docwriter._get_object_name(" class Klass: ") - 'Klass' - """ - name = line.split()[1].split("(")[0].strip() - # in case we have classes which are not derived from object - # ie. old style classes - return name.rstrip(":") - - def _uri2path(self, uri): - """ - Convert uri to absolute filepath. - - Parameters - ---------- - uri : string - URI of python module to return path for - - Returns - ------- - path : None or string - Returns None if there is no valid path for this URI - Otherwise returns absolute file system path for URI - - Examples - -------- - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> modpath = sphinx.__path__[0] - >>> res = docwriter._uri2path('sphinx.builder') - >>> res == os.path.join(modpath, 'builder.py') - True - >>> res = docwriter._uri2path('sphinx') - >>> res == os.path.join(modpath, '__init__.py') - True - >>> docwriter._uri2path('sphinx.does_not_exist') - - """ - if uri == self.package_name: - return os.path.join(self.root_path, "__init__.py") - path = uri.replace(".", os.path.sep) - path = path.replace(self.package_name + os.path.sep, "") - path = os.path.join(self.root_path, path) - # XXX maybe check for extensions as well? - if os.path.exists(path + ".py"): # file - path += ".py" - elif os.path.exists(os.path.join(path, "__init__.py")): - path = os.path.join(path, "__init__.py") - else: - return None - return path - - def _path2uri(self, dirpath): - """Convert directory path to uri.""" - relpath = dirpath.replace(self.root_path, self.package_name) - if relpath.startswith(os.path.sep): - relpath = relpath[1:] - return relpath.replace(os.path.sep, ".") - - def _parse_module(self, uri): - """Parse module defined in ``uri``.""" - filename = self._uri2path(uri) - if filename is None: - # nothing that we could handle here. - return ([], []) - f = open(filename, "rt") - functions, classes = self._parse_lines(f, uri) - f.close() - return functions, classes - - def _parse_lines(self, linesource, module=None): - """Parse lines of text for functions and classes.""" - functions = [] - classes = [] - for line in linesource: - if line.startswith("def ") and line.count("("): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - functions.append(name) - elif line.startswith("class "): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def generate_api_doc(self, uri): - """ - Make autodoc documentation template string for a module. - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - S : string - Contents of API doc - - """ - # get the names of all classes and functions - functions, classes = self._parse_module(uri) - if not len(functions) and not len(classes): - print(("WARNING: Empty -", uri)) # dbg - return "" - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r"^%s\." % self.package_name, "", uri) - - ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" - - chap_title = uri_short - ad += chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n" - - # Set the chapter title to read 'module' for all modules except for the - # main packages - if "." in uri: - title = "Module: :mod:`" + uri_short + "`" - else: - title = ":mod:`" + uri_short + "`" - ad += title + "\n" + self.rst_section_levels[2] * len(title) - - if len(classes): - ad += "\nInheritance diagram for ``%s``:\n\n" % uri - ad += ".. inheritance-diagram:: %s \n" % uri - ad += " :parts: 2\n" - - ad += "\n.. automodule:: " + uri + "\n" - ad += "\n.. currentmodule:: " + uri + "\n" - multi_class = len(classes) > 1 - multi_fx = len(functions) > 1 - if multi_class: - ad += "\n" + "Classes" + "\n" + self.rst_section_levels[2] * 7 + "\n" - elif len(classes) and multi_fx: - ad += "\n" + "Class" + "\n" + self.rst_section_levels[2] * 5 + "\n" - for c in classes: - ad += ( - "\n:class:`" - + c - + "`\n" - + self.rst_section_levels[multi_class + 2] * (len(c) + 9) - + "\n\n" - ) - ad += "\n.. autoclass:: " + c + "\n" - # must NOT exclude from index to keep cross-refs working - ad += ( - " :members:\n" - " :undoc-members:\n" - " :show-inheritance:\n" - " :inherited-members:\n" - "\n" - " .. automethod:: __init__\n" - ) - if multi_fx: - ad += "\n" + "Functions" + "\n" + self.rst_section_levels[2] * 9 + "\n\n" - elif len(functions) and multi_class: - ad += "\n" + "Function" + "\n" + self.rst_section_levels[2] * 8 + "\n\n" - for f in functions: - # must NOT exclude from index to keep cross-refs working - ad += "\n.. autofunction:: " + uri + "." + f + "\n\n" - return ad - - def _survives_exclude(self, matchstr, match_type): - r""" - Return ``True`` if ``matchstr`` does not match patterns. - - ``self.package_name`` removed from front of string if present - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> dw._survives_exclude('sphinx.okpkg', 'package') - True - >>> dw.package_skip_patterns.append(r'^\.badpkg$') - >>> dw._survives_exclude('sphinx.badpkg', 'package') - False - >>> dw._survives_exclude('sphinx.badpkg', 'module') - True - >>> dw._survives_exclude('sphinx.badmod', 'module') - True - >>> dw.module_skip_patterns.append(r'^\.badmod$') - >>> dw._survives_exclude('sphinx.badmod', 'module') - False - - """ - patterns = self._skip_patterns.get(match_type) - if patterns is None: - raise ValueError('Cannot interpret match type "%s"' % match_type) - - # Match to URI without package name - L = len(self.package_name) - if matchstr[:L] == self.package_name: - matchstr = matchstr[L:] - for pat in patterns: - try: - pat.search - except AttributeError: - pat = re.compile(pat) - if pat.search(matchstr): - return False - return True - - def discover_modules(self, empty_start=True): - r""" - Return module sequence discovered from ``self.package_name``. - - Parameters - ---------- - None - - Returns - ------- - mods : sequence - Sequence of module names within ``self.package_name`` - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> mods = dw.discover_modules() - >>> 'sphinx.util' in mods - True - >>> dw.package_skip_patterns.append('\.util$') - >>> 'sphinx.util' in dw.discover_modules() - False - >>> - - """ - modules = [] if empty_start else [self.package_name] - # raw directory parsing - for dirpath, dirnames, filenames in os.walk(self.root_path): - # Check directory names for packages - root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) - for dirname in dirnames[:]: # copy list - we modify inplace - package_uri = ".".join((root_uri, dirname)) - if self._uri2path(package_uri) and self._survives_exclude( - package_uri, "package" - ): - modules.append(package_uri) - else: - dirnames.remove(dirname) - # Check filenames for modules - for filename in filenames: - module_name = filename[:-3] - module_uri = ".".join((root_uri, module_name)) - if self._uri2path(module_uri) and self._survives_exclude( - module_uri, "module" - ): - modules.append(module_uri) - return sorted(modules) - - def write_modules_api(self, modules, outdir): - """Generate the list of modules.""" - written_modules = [] - for m in modules: - api_str = self.generate_api_doc(m) - if not api_str: - continue - # write out to file - outfile = os.path.join(outdir, m + self.rst_extension) - fileobj = open(outfile, "wt") - fileobj.write(api_str) - fileobj.close() - written_modules.append(m) - self.written_modules = written_modules - - def write_api_docs(self, outdir): - """ - Generate API reST files. - - Parameters - ---------- - outdir : string - Directory name in which to store files - We create automatic filenames for each module - - Returns - ------- - None - - Notes - ----- - Sets ``self.written_modules`` to list of written modules - - """ - if not os.path.exists(outdir): - os.mkdir(outdir) - # compose list of modules - modules = self.discover_modules() - self.write_modules_api(modules, outdir) - - def write_index(self, outdir, froot="gen", relative_to=None, - maxdepth=None): - """ - Make a reST API index file from written files. - - Parameters - ---------- - path : string - Filename to write index to - outdir : string - Directory to which to write generated index file - froot : string, optional - root (filename without extension) of filename to write to - Defaults to 'gen'. We add ``self.rst_extension``. - relative_to : string - path to which written filenames are relative. This - component of the written file path will be removed from - outdir, in the generated index. Default is None, meaning, - leave path as it is. - - """ - if self.written_modules is None: - raise ValueError("No modules written") - # Get full filename path - path = os.path.join(outdir, froot + self.rst_extension) - # Path written into index is relative to rootpath - if relative_to is not None: - relpath = outdir.replace(relative_to + os.path.sep, "") - else: - relpath = outdir - idx = open(path, "wt") - w = idx.write - w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") - if maxdepth is None: - w(".. toctree::\n\n") - else: - w(".. toctree::\n") - w(" :maxdepth: %d\n\n" % maxdepth) - for f in self.written_modules: - w(" %s\n" % os.path.join(relpath, f)) - idx.close() - - -class InterfaceHelpWriter(ApiDocWriter): - """Convert interface specs to rST.""" - - def __init__( - self, - package_name, - class_skip_patterns=None, - **kwargs - ): - """ - Initialize an :py:mod:`ApiDocWriter` for interface specs. - - Additional Parameters - --------------------- - class_skip_patterns : None or sequence - Sequence of strings giving classes to be excluded - Default is: None - - """ - super().__init__(package_name, **kwargs) - self.class_skip_patterns = class_skip_patterns - - @property - def class_skip_patterns(self): - """Get class skip patterns.""" - return self._skip_patterns['class'] - - @class_skip_patterns.setter - def class_skip_patterns(self, pattern): - self._skip_patterns['class'] = _parse_patterns(pattern) - - def _parse_lines(self, linesource, module=None): - """Parse lines of text for functions and classes.""" - functions = [] - classes = [] - for line in linesource: - if line.startswith("def ") and line.count("("): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - functions.append(name) - elif line.startswith("class "): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_") and self._survives_exclude( - ".".join((module, name)), "class" - ): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def _write_graph_section(self, fname, title): - ad = "\n%s\n%s\n\n" % (title, self.rst_section_levels[3] * len(title)) - ad += ".. graphviz::\n\n" - fhandle = open(fname) - for line in fhandle: - ad += "\t" + line + "\n" - - fhandle.close() - os.remove(fname) - bitmap_fname = "{}.png".format(os.path.splitext(fname)[0]) - os.remove(bitmap_fname) - return ad - - def generate_api_doc(self, uri): - """ - Make autodoc documentation template string for a module. - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - S : string - Contents of API doc - - """ - # get the names of all classes and functions - functions, classes = self._parse_module(uri) - workflows = [] - helper_functions = [] - for function in functions: - - try: - __import__(uri) - finst = sys.modules[uri].__dict__[function] - except TypeError: - continue - try: - workflow = finst() - except Exception: - helper_functions.append((function, finst)) - continue - - if isinstance(workflow, Workflow): - workflows.append((workflow, function, finst)) - - if not classes and not workflows and not helper_functions: - print("WARNING: Empty -", uri) # dbg - return "" - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r"^%s\." % self.package_name, "", uri) - # uri_short = uri - - ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" - - chap_title = uri_short - ad += chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n" - - # Set the chapter title to read 'module' for all modules except for the - # main packages - # if '.' in uri: - # title = 'Module: :mod:`' + uri_short + '`' - # else: - # title = ':mod:`' + uri_short + '`' - # ad += title + '\n' + self.rst_section_levels[2] * len(title) - - # ad += '\n' + 'Classes' + '\n' + \ - # self.rst_section_levels[2] * 7 + '\n' - for c in classes: - __import__(uri) - print(c) - try: - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - classinst = sys.modules[uri].__dict__[c] - except Exception as inst: - print(inst) - continue - - if not issubclass(classinst, BaseInterface): - continue - - ad += RST_CLASS_BLOCK.format( - uri=uri, - cls=c, - underline=self.rst_section_levels[2] * len(c), - code_url=get_file_url(classinst), - body=trim(classinst.help(returnhelp=True), self.rst_section_levels[3]) - ) - - if workflows or helper_functions: - ad += "\n.. module:: %s\n\n" % uri - - for workflow, name, finst in workflows: - ad += RST_FUNC_BLOCK.format( - uri=uri, - name=name, - underline=self.rst_section_levels[2] * (len(name) + 8), - code_url=get_file_url(finst), - body=trim(finst.__doc__, self.rst_section_levels[3]) - ) - """ - # use sphinx autodoc for function signature - ad += '\n.. _%s:\n\n' % (uri + '.' + name) - ad += '.. autofunction:: %s\n\n' % name - """ - - (_, fname) = tempfile.mkstemp(suffix=".dot") - workflow.write_graph(dotfilename=fname, graph2use="hierarchical") - ad += self._write_graph_section(fname, "Graph") + "\n" - - for name, finst in helper_functions: - ad += RST_FUNC_BLOCK.format( - uri=uri, - name=name, - underline=self.rst_section_levels[2] * (len(name) + 8), - code_url=get_file_url(finst), - body=trim(finst.__doc__, self.rst_section_levels[3]) - ) - return ad - - def discover_modules(self, empty_start=True): - """Return module sequence discovered from ``self.package_name``.""" - return super().discover_modules(empty_start=False) - - def write_modules_api(self, modules, outdir): - """Generate the list of modules.""" - written_modules = [] - for m in modules: - api_str = self.generate_api_doc(m) - if not api_str: - continue - # write out to file - mvalues = m.split(".") - if len(mvalues) > 3: - index_prefix = ".".join(mvalues[1:3]) - index_dir = os.path.join(outdir, index_prefix) - index_file = index_dir + self.rst_extension - if not os.path.exists(index_dir): - os.makedirs(index_dir) - header = """.. AUTO-GENERATED FILE -- DO NOT EDIT! - -{name} -{underline} - -.. toctree:: - :maxdepth: 1 - :glob: - - {name}/* - """.format( - name=index_prefix, underline="=" * len(index_prefix) - ) - with open(index_file, "wt") as fp: - fp.write(header) - m = os.path.join(index_prefix, ".".join(mvalues[3:])) - outfile = os.path.join(outdir, m + self.rst_extension) - fileobj = open(outfile, "wt") - fileobj.write(api_str) - fileobj.close() - written_modules.append(m) - self.written_modules = written_modules - - -def _parse_patterns(pattern): - if pattern is None: - return [] - if isinstance(pattern, str): - return [pattern] - if isinstance(pattern, tuple): - return list(pattern) - return pattern diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py deleted file mode 100755 index f42adc7904..0000000000 --- a/tools/build_interface_docs.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Script to auto-generate interface docs. -""" -# stdlib imports -import os -import sys - -# ***************************************************************************** -if __name__ == "__main__": - nipypepath = os.path.abspath("..") - sys.path.insert(1, nipypepath) - # local imports - from apigen import InterfaceHelpWriter - - package = "nipype" - outdir = os.path.join("interfaces", "generated") - docwriter = InterfaceHelpWriter(package) - # Packages that should not be included in generated API docs. - docwriter.package_skip_patterns += [ - r"\.external$", - r"\.fixes$", - r"\.utils$", - r"\.pipeline", - r"\.testing", - r"\.caching", - r"\.scripts", - r"\.sphinxext$", - r"\.workflows" - ] - # Modules that should not be included in generated API docs. - docwriter.module_skip_patterns += [ - r"\.conftest", - r"\.interfaces\.base$", - r"\.interfaces\.matlab$", - r"\.interfaces\.pymvpa$", - r"\.interfaces\.rest$", - r"\.interfaces\.slicer\.generate_classes$", - r"\.interfaces\.spm\.base$", - r"\.interfaces\.traits", - r"\.pipeline\.alloy$", - r"\.pipeline\.s3_node_wrapper$", - r"\.pkg_info" - r"\.scripts", - r"\.testing", - r"\.version$", - ] - docwriter.class_skip_patterns += [ - "AFNICommand", - "ANTS", - "FSLCommand", - "FS", - "Info", - "^SPM", - "Tester", - "Spec$", - "Numpy", - # NipypeTester raises an - # exception when instantiated in - # InterfaceHelpWriter.generate_api_doc - "NipypeTester", - ] - docwriter.write_api_docs(outdir) - # docwriter.write_index(outdir, "gen") - print("%d files written" % len(docwriter.written_modules)) diff --git a/tools/ex2rst b/tools/ex2rst index df24df0340..2434d16ccc 100755 --- a/tools/ex2rst +++ b/tools/ex2rst @@ -143,7 +143,7 @@ def exfile2rst(filename): else: if doc2code: doc2code = False - s += '\n::\n' + s += '\n\n.. code-block :: python\n' # has to be code s += ' %s' % line @@ -284,8 +284,8 @@ Name of the project that contains the examples. This name is used in the toparse.append(t) # filter parse list - if not opts.excluded is None: - toparse = [t for t in toparse if not t in opts.excluded] + if opts.excluded is not None: + toparse = [t for t in toparse if t not in opts.excluded] toparse_list = toparse toparse = set(toparse) diff --git a/tools/github.py b/tools/github.py deleted file mode 100644 index fab02772e9..0000000000 --- a/tools/github.py +++ /dev/null @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- -import http.client -import inspect -import simplejson -import os -from subprocess import Popen, PIPE - -import nipype - - -def is_git_repo(): - """Does the current nipype module have a git folder - """ - sourcepath = os.path.realpath( - os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) - ) - gitpathgit = os.path.join(sourcepath, ".git") - if os.path.exists(gitpathgit): - return True - else: - return False - - -def get_local_branch(): - """Determine current branch - """ - if is_git_repo(): - o, _ = Popen( - 'git branch | grep "\* "', - shell=True, - stdout=PIPE, - cwd=os.path.dirname(nipype.__file__), - ).communicate() - return o.strip()[2:] - else: - return None - - -def get_remote_branch(): - """Get remote branch for current branch - """ - - pass - - -def create_hash_map(): - """Create a hash map for all objects - """ - - hashmap = {} - from base64 import encodestring as base64 - import pwd - - login_name = pwd.getpwuid(os.geteuid())[0] - conn = http.client.HTTPSConnection("api.github.com") - conn.request( - "GET", - "/repos/nipy/nipype", - headers={"Authorization": "Basic %s" % base64(login_name)}, - ) - try: - conn.request("GET", "/repos/nipy/nipype/git/trees/master?recursive=1") - except: - pass - else: - r1 = conn.getresponse() - if r1.reason != "OK": - raise Exception("HTTP Response %s:%s" % (r1.status, r1.reason)) - payload = simplejson.loads(r1.read()) - for infodict in payload["tree"]: - if infodict["type"] == "blob": - hashmap[infodict["sha"]] = infodict["path"] - return hashmap - - -def get_repo_url(force_github=False): - """Returns github url or local url - - Returns - ------- - URI: str - filesystem path or github repo url - """ - sourcepath = os.path.realpath( - os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) - ) - gitpathgit = os.path.join(sourcepath, ".git") - if not os.path.exists(gitpathgit) and not force_github: - uri = "file://%s" % sourcepath - else: - uri = "http://github.com/nipy/nipype/blob/master" - return uri - - -def get_file_url(object): - """Returns local or remote url for an object - """ - filename = inspect.getsourcefile(object) - lines = inspect.getsourcelines(object) - uri = "file://%s#L%d" % (filename, lines[1]) - if is_git_repo(): - info = nipype.get_info() - shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) - uri = "http://github.com/nipy/nipype/tree/%s/%s#L%d" % ( - info["commit_hash"], - shortfile, - lines[1], - ) - return uri diff --git a/tools/make_examples.py b/tools/make_examples.py index 785d06af33..f91d42b0fe 100755 --- a/tools/make_examples.py +++ b/tools/make_examples.py @@ -3,8 +3,6 @@ This also creates the index.rst file appropriately, makes figures, etc. """ -from past.builtins import execfile - # ----------------------------------------------------------------------------- # Library imports # ----------------------------------------------------------------------------- @@ -66,16 +64,19 @@ def show(): # Main script # ----------------------------------------------------------------------------- +exclude_files = ['-x %s' % sys.argv[i + 1] for i, arg in enumerate(sys.argv) if arg == '-x'] + # Work in examples directory cd("users/examples") if not os.getcwd().endswith("users/examples"): raise OSError("This must be run from doc/examples directory") # Run the conversion from .py to rst file -sh("../../../tools/ex2rst --project Nipype --outdir . ../../../examples") -sh( - "../../../tools/ex2rst --project Nipype " - "--outdir . ../../../examples/frontiers_paper" +sh("../../../tools/ex2rst %s --project Nipype --outdir . ../../../examples" % + ' '.join(exclude_files)) +sh("""\ +../../../tools/ex2rst --project Nipype %s --outdir . ../../../examples/frontiers_paper \ +""" % ' '.join(exclude_files) ) # Make the index.rst file @@ -99,5 +100,6 @@ def show(): for script in glob("*.py"): figure_basename = pjoin("fig", os.path.splitext(script)[0]) - execfile(script) + with open(script, 'rt') as f: + exec(f.read()) plt.close("all")