diff --git a/docs/gallery/advanced_io/plot_linking_data.py b/docs/gallery/advanced_io/plot_linking_data.py index 88ba7e10f..00dfe5056 100644 --- a/docs/gallery/advanced_io/plot_linking_data.py +++ b/docs/gallery/advanced_io/plot_linking_data.py @@ -268,8 +268,8 @@ # ------------------------------------------------------------------- # # For extremely large datasets it can be useful to split data across multiple files, e.g., in cases where -# the file stystem does not allow for large files. While we can -# achieve this by writing different components (e.g., :py:meth:`~pynwb.base.TimeSeries`) to different files as described above, +# the file stystem does not allow for large files. While we can achieve this by writing different +# components (e.g., :py:meth:`~pynwb.base.TimeSeries`) to different files as described above, # this option does not allow splitting data from single datasets. An alternative option is to use the # ``family`` driver in ``h5py`` to automatically split the NWB file into a collection of many HDF5 files. # The ``family`` driver stores the file on disk as a series of fixed-length chunks (each in its own file). diff --git a/pyproject.toml b/pyproject.toml index 4873b52e1..befa3bb0f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,7 +91,7 @@ omit = [ ] [tool.ruff] -select = ["E", "F", "T100", "T201", "T203"] +lint.select = ["E", "F", "T100", "T201", "T203"] exclude = [ ".git", ".tox", @@ -100,12 +100,13 @@ exclude = [ "dist/", "src/nwb-schema", "docs/source/conf.py", + "docs/notebooks/*", "src/pynwb/_due.py", "test.py" # remove when pytest comes along ] line-length = 120 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "tests/read_dandi/*" = ["T201"] "docs/gallery/*" = ["E402", "T201"] "src/*/__init__.py" = ["F401"] @@ -115,6 +116,6 @@ line-length = 120 # "test_gallery.py" = ["T201"] # Uncomment when test_gallery.py is created -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 17