diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 0000000..252cc0c
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,3 @@
+python:
+ version: 3
+ setup_py_install: true
diff --git a/.travis.yml b/.travis.yml
index 001093d..612c73b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,18 +1,43 @@
language: python
-python:
- - "3.6-dev"
- - 3.5
- - 3.4
+sudo: false
+cache: pip
+matrix:
+ include:
+ - { python: "3.6-dev", dist: xenial, sudo: true }
+ - { python: "3.7", dist: xenial, sudo: true }
+ - { python: "3.7-dev", dist: xenial, sudo: true }
+ - { python: "nightly", dist: xenial, sudo: true }
install:
- - pip install pytest pytest-cov --upgrade
- - pip install codecov --upgrade
- - pip install pylint --upgrade
- - pip install -e .
+- pip install pytest pytest-cov --upgrade
+- pip install codecov --upgrade
+- pip install pylint --upgrade
+- python setup.py sdist
+- python setup.py bdist_wheel
+- pip install -r requirements.txt
+- python -m pip install dist/importnb-*.tar.gz --ignore-installed --no-deps
+
script:
- - py.test --cov=./
- - pylint ipynb
+- tox
+- importnb-install
+- ipython -m readme
+- jupyter nbconvert --to notebook --inplace readme.ipynb
+- pylint ipynb
after_success:
- - codecov
-matrix:
- allow_failures:
- - python: 3.6-dev
+- codecov
+deploy:
+# - provider: pypi
+# user: docfast
+# skip_cleanup: true
+# password:
+# secure: s9hSRaBCXVpCm2W0moHHAJrzceFWnbIZoZ2j8BhqwbwTicxqIvi8pN7XQWokZt5ymLUDn6OfZ7yap81LXN8XcRWPFTVGJXOuzCrJJuvn06kWx3/x7FZ+hNKCVrsL3wyXXwKTcj8gMUahArYFuseReoAKhcbyH2/yL4n6HXkgcXGWrFXcISVnbR5D+WOlTt3WzEOlh55BoMLUcmAbN8Wa32z8cSQyFcW6kz9DqwcpRy//vCYh4SC/GJZZc5bXX6ByqloxyHmb7MlONCoDEfm7O9fHho8cv9IEZXiZmgy+f0l/ugmGKFBIEp6hhxThD8opw3zQBwl1FHVMJcwqLi+ODmY6lB9xYgtO652wxNZuCkKCArUL/LqBspRTBruex/K2/XjLusln8lrDSExsDvFKWksRrMODW1YccUQK9nhMBrFH18kveIbwr5E41ZfbrWD8I8wn0jHnwiJ/zaYbtPpm0egiY0YfGTVAEQoWN5iV5u5zoDWZA8LiFwTYnywLogGhVbUPX+CkjC7dBm7hWUrkihS99F6nPaP+B4E5HWubp64qsJSko8V+cufrnsqGomG/StozKJhw4+5Zhb9xuWNk0nqhq3Jdi/m0BAsgPLjIriovCcuTqpYC0ojCtpjO+7qExk91N55vGKUIeUkkQAu8gAhQ29jXwxa1HEqy8YO+WUI=
+# on:
+# tags: true
+# distributions: sdist bdist_wheel
+# repo: deathbeds/importnb
+# python: '3.6'
+
+# - provider: pages
+# skip-cleanup: true
+# github-token: $GITHUB_TOKEN # Set in the settings page of your repository, as a secure variable
+# on:
+# python: '3.6'
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..fff4c21
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1 @@
+include LICENSE readme.md changelog.ipynb
\ No newline at end of file
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 0000000..bd33118
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,34 @@
+# Python package
+# Create and test a Python package on multiple Python versions.
+# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
+# https://docs.microsoft.com/azure/devops/pipelines/languages/python
+
+trigger:
+- master
+
+pool:
+ vmImage: 'ubuntu-latest'
+strategy:
+ matrix:
+ Python35:
+ python.version: '3.5'
+ Python36:
+ python.version: '3.6'
+ Python37:
+ python.version: '3.7'
+
+steps:
+- task: UsePythonVersion@0
+ inputs:
+ versionSpec: '$(python.version)'
+ displayName: 'Use Python $(python.version)'
+
+- script: |
+ python -m pip install --upgrade pip
+ pip install -r requirements.txt
+ displayName: 'Install dependencies'
+
+- script: |
+ pip install pytest pytest-azurepipelines
+ pytest
+ displayName: 'pytest'
diff --git a/changelog.ipynb b/changelog.ipynb
new file mode 100644
index 0000000..58da5e6
--- /dev/null
+++ b/changelog.ipynb
@@ -0,0 +1,157 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## CHANGELOG"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0.4.0\n",
+ "\n",
+ "* Fuzzy name completion.\n",
+ "* A configurable extension system for magics.\n",
+ "* `Interactive(shell=False)` is the default loader."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0.3.2\n",
+ "\n",
+ "* Add `remote` loader. Load notebooks from remote urls.\n",
+ "* Support a fuzzy name import system. Files with special characters and numbers are importable.\n",
+ "* An IPython magic to allow relative imports during interactive computing."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0.3.1\n",
+ "\n",
+ "* In loaders `Notebook`, `Interactive`, `Execute`, and `Parameterize`\n",
+ "* Remove `Partial`, `Lazy`, and `NotebookTest` loaders.\n",
+ "* The first Markdown cell imports as a docstrings, permitting doctests on markdown cells.\n",
+ "* `Notebook(globals={})` passes global values to the module\n",
+ "* `Notebook(dir=\"..\")` will change the working directory and path.\n",
+ "* The code is pure python and uses IPython when possible.\n",
+ "* `ipython -m importnb nodebook.ipynb` runs a notebook."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0.2.9\n",
+ "\n",
+ "* Include `Partial`, `Lazy`, and `NotebookTest` loaders.\n",
+ "* Transform markdown cells to literate block strings so they are included in the ast.\n",
+ " * `__doc__`'s are extracted from the first markdown cell or normal source code from a code cell.\n",
+ "* Export the python source code with `black`.\n",
+ "* `Notebook.from_filename` is a loader for paths and strings.\n",
+ "* Add `importnb.nbtest` for notebook testing tools..\n",
+ "* Benchmark `importnb` against existing notebooks.\n",
+ "* Include a `watchdog` trick to watch tests..\n",
+ "* Extend the project to >= 3.4\n",
+ "* Use nbviewer/github hierachy for the docs."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0.2.4\n",
+ "\n",
+ "* Use `tox` for testing\n",
+ "* Use a source directory folder structure for pytest and tox testing.\n",
+ "* Create a pytest plugin that discovers notebooks as tests. With `importnb` notebooks can be used as fixtures in pytest.\n",
+ "* Install `importnb` as an IPython extension.\n",
+ "* Support running notebooks as modules from the `ipython` command line\n",
+ "* Create a `setuptools` command to allow notebooks as packages. "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0.2.1\n",
+ "\n",
+ "* `importnb` supports notebook inputs from pure python environments. Two compatible compiler were created from IPython and Python\n",
+ "* `importnb.Partial` works appropriately by improving exceptions.\n",
+ "* All of the IPython magic syntaxes were removed to support Pure Python.\n",
+ "* The generated Python files are formatted with black.\n",
+ "* Tests were added to:\n",
+ "\n",
+ " * Validate the line number in tracebacks\n",
+ " * Test someone elses notebooks"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0.1.4\n",
+ "- Pypi supports markdown long_description with the proper mimetype in long_description_content_type."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0.1.3\n",
+ "- Include the RST files in the `MANIFEST.in`."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0.1.2 (Unreleased)\n",
+ "- Use RST files to improve the literacy of the pypi description."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0.1.1\n",
+ "- Released on PyPi "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0.0.2\n",
+ "- Initial Testing Release"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "p6",
+ "language": "python",
+ "name": "other-env"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/conf.py b/conf.py
new file mode 100644
index 0000000..092de1e
--- /dev/null
+++ b/conf.py
@@ -0,0 +1,40 @@
+project = 'importnb'
+copyright = '2018, deathbeds'
+author = 'deathbeds'
+
+version = release = '0.5.0'
+
+extensions = ['nbsphinx', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
+ 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
+
+templates_path = ['docs/_includes']
+source_suffix = '.ipynb'
+
+master_doc = 'index'
+exclude_patterns = [
+ '*.ipynb_checkpoints*', '.eggs*', '.tox*', 'build', 'dist', '_build', 'Thumbs.db', '.DS_Store']
+
+pygments_style = 'sphinx'
+html_theme = 'alabaster'
+html_static_path = ['docs']
+
+htmlhelp_basename = 'importnbdoc'
+latex_elements = {}
+
+latex_documents = [
+ (master_doc, 'importnb.tex', 'importnb Documentation',
+ 'deathbeds', 'manual'),
+]
+man_pages = [
+ (master_doc, 'importnb', 'importnb Documentation',
+ [author], 1)]
+texinfo_documents = [
+ (master_doc, 'importnb', 'importnb Documentation',
+ author, 'importnb', 'One line description of project.',
+ 'Miscellaneous')]
+epub_title = project
+epub_author = author
+epub_publisher = author
+epub_copyright = copyright
+epub_exclude_files = ['search.html']
+intersphinx_mapping = {'https://docs.python.org/': None}
\ No newline at end of file
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..1305edd
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,8 @@
+try:
+ from IPython import get_ipython
+ assert get_ipython()
+except:
+ collect_ignore = [
+ "src/importnb/utils/ipython.py",
+ "src/importnb/completer.py"
+ ]
diff --git a/docs/classes_importnb.png b/docs/classes_importnb.png
new file mode 100644
index 0000000..cd05f63
Binary files /dev/null and b/docs/classes_importnb.png differ
diff --git a/docs/index.html b/docs/index.html
new file mode 100644
index 0000000..07d56f0
--- /dev/null
+++ b/docs/index.html
@@ -0,0 +1 @@
+
diff --git a/docs/packages_importnb.png b/docs/packages_importnb.png
new file mode 100644
index 0000000..8272b45
Binary files /dev/null and b/docs/packages_importnb.png differ
diff --git a/docs/readme.rst b/docs/readme.rst
new file mode 100644
index 0000000..a6c9c00
--- /dev/null
+++ b/docs/readme.rst
@@ -0,0 +1 @@
+.. include:: ../readme.ipynb
\ No newline at end of file
diff --git a/index.ipynb b/index.ipynb
new file mode 120000
index 0000000..928944b
--- /dev/null
+++ b/index.ipynb
@@ -0,0 +1 @@
+readme.ipynb
\ No newline at end of file
diff --git a/postBuild b/postBuild
new file mode 100644
index 0000000..7db7292
--- /dev/null
+++ b/postBuild
@@ -0,0 +1,2 @@
+jupyter labextension install jupyterlab-toc
+importnb-install
diff --git a/readme.ipynb b/readme.ipynb
new file mode 100644
index 0000000..71cb04e
--- /dev/null
+++ b/readme.ipynb
@@ -0,0 +1,569 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "__importnb__ imports notebooks as modules. Notebooks are reusable as tests, source code, importable modules, and command line utilities.\n",
+ "\n",
+ "[![Binder](https://mybinder.org/badge.svg)](https://mybinder.org/v2/gh/deathbeds/importnb/master?urlpath=lab/tree/readme.ipynb)[![Documentation Status](https://readthedocs.org/projects/importnb/badge/?version=latest)](https://importnb.readthedocs.io/en/latest/?badge=latest)\n",
+ "[![Build Status](https://travis-ci.org/deathbeds/importnb.svg?branch=master)](https://travis-ci.org/deathbeds/importnb)[![PyPI version](https://badge.fury.io/py/importnb.svg)](https://badge.fury.io/py/importnb)![PyPI - Python Version](https://img.shields.io/pypi/pyversions/importnb.svg)![PyPI - Format](https://img.shields.io/pypi/format/importnb.svg)![PyPI - Format](https://img.shields.io/pypi/l/importnb.svg)[\n",
+ "![Conda](https://img.shields.io/conda/pn/conda-forge/importnb.svg)](https://anaconda.org/conda-forge/importnb)[\n",
+ "![GitHub tag](https://img.shields.io/github/tag/deathbeds/importnb.svg)](https://github.com/deathbeds/importnb/tree/master/src/importnb) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)\n",
+ "\n",
+ "\n",
+ "##### Installation\n",
+ "\n",
+ " pip install importnb\n",
+ " \n",
+ "---\n",
+ "\n",
+ " conda install -c conda-forge importnb"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "---\n",
+ "\n",
+ "# `importnb` for testing\n",
+ "\n",
+ "After `importnb` is installed, [pytest](https://pytest.readthedocs.io/) will discover and import notebooks as tests.\n",
+ "\n",
+ " pytest index.ipynb\n",
+ "\n",
+ "[`importnb`](https://github.com/deathbeds/importnb) imports notebooks as python modules, it does not compare outputs like [`nbval`](https://github.com/computationalmodelling/nbval). \n",
+ "\n",
+ "[`importnb`](https://github.com/deathbeds/importnb) now captures `doctest`s in every __Markdown__ cell & block string expression. The docstrings are tested with the [__--doctest-modules__ flag](https://doc.pytest.org/en/latest/doctest.html).\n",
+ "\n",
+ " pytest index.ipynb --doctest-modules\n",
+ " \n",
+ "It is recommended to use `importnb` with [__--nbval__](https://github.com/computationalmodelling/nbval) and the __--monotonic__ flag that checks if has notebook has be restarted and re-run.\n",
+ "\n",
+ " pytest index.ipynb --nbval --monotonic"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "---\n",
+ "\n",
+ "# `importnb` for the commmand line\n",
+ "\n",
+ "`importnb` can run notebooks as command line scripts. Any literal variable in the notebook, may be applied as a parameter from the command line.\n",
+ "\n",
+ " ipython -m importnb -- index.ipynb --foo \"A new value\"\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "---\n",
+ "\n",
+ "# `importnb` for Python and IPython\n",
+ "\n",
+ "\n",
+ "It is suggested to execute `importnb-install` to make sure that notebooks for each IPython session.\n",
+ "\n",
+ "> Restart and run all or it didn't happen.\n",
+ "\n",
+ "`importnb` excels in an interactive environment and if a notebook will __Restart and Run All__ then it may reused as python code. The `Notebook` context manager will allow notebooks _with valid names_ to import with Python.\n",
+ "\n",
+ " >>> from importnb import Notebook"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### For brevity"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " with __import__('importnb').Notebook(): \n",
+ " import readme"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "> [`importnb.loader`](src/notebooks/loader.ipynb) will find notebooks available anywhere along the [`sys.path`](https://docs.python.org/2/library/sys.html#sys.path)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### or explicity "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " from importnb import Notebook\n",
+ " with Notebook(): \n",
+ " import readme"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " foo = 42\n",
+ " with Notebook(): \n",
+ " import readme\n",
+ " if __name__ == '__main__':\n",
+ " assert readme.foo == 42\n",
+ " assert readme.__file__.endswith('.ipynb')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "nbsphinx-toctree": {}
+ },
+ "source": [
+ "[`importnb` readme](readme.ipynb)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Modules may be reloaded \n",
+ "\n",
+ "The context manager is required to `reload` a module."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " from importlib import reload\n",
+ " with Notebook(): __name__ == '__main__' and reload(readme)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Lazy imports\n",
+ "\n",
+ "The `lazy` option will delay the evaluation of a module until one of its attributes are accessed the first time."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " with Notebook(lazy=True):\n",
+ " import readme"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Fuzzy File Names"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " if __name__ == '__main__':\n",
+ " with Notebook():\n",
+ " import __a_me\n",
+ " \n",
+ " assert __a_me.__file__ == readme.__file__"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Python does not provide a way to import file names starting with numbers of contains special characters. `importnb` installs a fuzzy import logic to import files containing these edge cases.\n",
+ "\n",
+ " import __2018__6_01_A_Blog_Post\n",
+ " \n",
+ "will find the first file matching `*2018*6?01?A?Blog?Post`. Importing `Untitled314519.ipynb` could be supported with the query below.\n",
+ "\n",
+ " import __314519"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Docstring\n",
+ "\n",
+ "The first markdown cell will become the module docstring."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "__importnb__ imports notebooks as modules. Notebooks are reusable as tests, source code, importable modules, and command line utilities.\n"
+ ]
+ }
+ ],
+ "source": [
+ " if __name__ == '__main__':\n",
+ " print(readme.__doc__.splitlines()[0])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Meaning non-code blocks can be executeb by [doctest]()."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " if __name__ == '__main__':\n",
+ " __import__('doctest').testmod(readme)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Import notebooks from files\n",
+ "\n",
+ "Notebook names may not be valid Python paths. In this case, use `Notebook.from_filename`.\n",
+ "\n",
+ " Notebook.from_filename('index.ipynb')\n",
+ " \n",
+ "Import under the `__main__` context.\n",
+ " \n",
+ " Notebook('__main__').from_filename('index.ipynb')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Parameterize Notebooks\n",
+ "\n",
+ "Literal ast statements are converted to notebooks parameters.\n",
+ "\n",
+ "In `readme`, `foo` is a parameter because it may be evaluated with ast.literal_val"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " if __name__ == '__main__':\n",
+ " from importnb.parameterize import Parameterize\n",
+ " f = Parameterize.load(readme.__file__)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The parameterized module is a callable that evaluates with different literal statements."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " if __name__ == '__main__': \n",
+ " assert callable(f)\n",
+ " f.__signature__\n",
+ "\n",
+ " assert f().foo == 42\n",
+ " assert f(foo='importnb').foo == 'importnb'"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Run Notebooks from the command line\n",
+ "\n",
+ "Run any notebook from the command line with importnb. Any parameterized expressions are available as parameters on the command line.\n",
+ "\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ " !ipython -m importnb -- index.ipynb --foo \"The new value\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Integrations\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### IPython\n",
+ "\n",
+ "#### [IPython Extension](src/notebooks/loader.ipynb#IPython-Extensions)\n",
+ "\n",
+ "Avoid the use of the context manager using loading importnb as IPython extension.\n",
+ "\n",
+ " %load_ext importnb\n",
+ " \n",
+ "`%unload_ext importnb` will unload the extension.\n",
+ "\n",
+ "#### Default Extension\n",
+ "\n",
+ "`importnb` may allow notebooks to import by default with "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ " !importnb-install\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "> If you'd like to play with source code on binder then you must execute the command above. Toggle the markdown cell to a code cell and run it."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "This extension will install a script into the default IPython profile startup that is called each time an IPython session is created. \n",
+ "\n",
+ "Uninstall the extension with `importnb-install`.\n",
+ "\n",
+ "##### Run a notebook as a module\n",
+ "\n",
+ "When the default extension is loaded any notebook can be run from the command line. After the `importnb` extension is created notebooks can be execute from the command line.\n",
+ "\n",
+ " ipython -m readme\n",
+ " \n",
+ "In the command line context, `__file__ == sys.arv[0] and __name__ == '__main__'` .\n",
+ " \n",
+ "> See the [deploy step in the travis build](https://github.com/deathbeds/importnb/blob/docs/.travis.yml#L19).\n",
+ "\n",
+ "##### Parameterizable IPython commands\n",
+ "\n",
+ "Installing the IPython extension allows notebooks to be computed from the command. The notebooks are parameterizable from the command line.\n",
+ "\n",
+ " ipython -m readme -- --help"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### py.test\n",
+ "\n",
+ "`importnb` installs a pytest plugin when it is setup. Any notebook obeying the py.test discovery conventions can be used in to pytest. _This is great because notebooks are generally your first test._\n",
+ "\n",
+ " !ipython -m pytest -- src \n",
+ " \n",
+ "Will find all the test notebooks and configurations as pytest would any Python file."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Setup\n",
+ "\n",
+ "To package notebooks add `recursive-include package_name *.ipynb`"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Developer"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "* [Source Notebooks](src/notebooks/)\n",
+ "* [Transpiled Python Source](src/importnb/)\n",
+ "* [Tests](src/importnb/tests)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Format and test the Source Code"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "/Users/tonyfast/anaconda3/lib/python3.7/site-packages/IPython/core/inputsplitter.py:22: DeprecationWarning: IPython.core.inputsplitter is deprecated since IPython 7 in favor of `IPython.core.inputtransformer2`\n",
+ " DeprecationWarning)\n",
+ "\u001b]0;IPython: tonyfast/importnb\u0007\u001b[1m============================= test session starts ==============================\u001b[0m\n",
+ "platform darwin -- Python 3.7.3, pytest-5.1.2, py-1.8.0, pluggy-0.13.0 -- /Users/tonyfast/anaconda3/bin/python\n",
+ "cachedir: .pytest_cache\n",
+ "hypothesis profile 'default' -> database=DirectoryBasedExampleDatabase('/Users/tonyfast/importnb/.hypothesis/examples')\n",
+ "rootdir: /Users/tonyfast/importnb, inifile: tox.ini\n",
+ "plugins: hypothesis-4.36.2, nbval-0.9.2, black-0.3.7, pylint-0.14.1, xonsh-0.9.11, importnb-0.5.5\n",
+ "collected 19 items \u001b[0m\n",
+ "\n",
+ "src/importnb/completer.py::importnb.completer \u001b[32mPASSED\u001b[0m\u001b[36m [ 5%]\u001b[0m\n",
+ "src/importnb/loader.py::importnb.loader.FinderContextManager \u001b[32mPASSED\u001b[0m\u001b[36m [ 10%]\u001b[0m\n",
+ "src/importnb/utils/export.py::importnb.utils.export \u001b[32mPASSED\u001b[0m\u001b[36m [ 15%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_basic \u001b[32mPASSED\u001b[0m\u001b[36m [ 21%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_package \u001b[32mPASSED\u001b[0m\u001b[36m [ 26%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_reload \u001b[32mPASSED\u001b[0m\u001b[36m [ 31%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_docstrings \u001b[32mPASSED\u001b[0m\u001b[36m [ 36%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_docstring_opts \u001b[32mPASSED\u001b[0m\u001b[36m [ 42%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_from_file \u001b[32mPASSED\u001b[0m\u001b[36m [ 47%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_lazy \u001b[32mPASSED\u001b[0m\u001b[36m [ 52%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_module_source \u001b[32mPASSED\u001b[0m\u001b[36m [ 57%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_main \u001b[32mPASSED\u001b[0m\u001b[36m [ 63%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_object_source \u001b[32mPASSED\u001b[0m\u001b[36m [ 68%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_python_file \u001b[32mPASSED\u001b[0m\u001b[36m [ 73%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_cli \u001b[32mPASSED\u001b[0m\u001b[36m [ 78%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_parameterize \u001b[32mPASSED\u001b[0m\u001b[36m [ 84%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_minified_json \u001b[32mPASSED\u001b[0m\u001b[36m [ 89%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_fuzzy_finder \u001b[32mPASSED\u001b[0m\u001b[36m [ 94%]\u001b[0m\n",
+ "tests/test_importnb.ipynb::test_remote \u001b[32mPASSED\u001b[0m\u001b[36m [100%]\u001b[0m\n",
+ "\n",
+ "\u001b[33m=============================== warnings summary ===============================\u001b[0m\n",
+ "src/importnb/completer.py::importnb.completer\n",
+ "src/importnb/completer.py::importnb.completer\n",
+ "src/importnb/completer.py::importnb.completer\n",
+ "src/importnb/completer.py::importnb.completer\n",
+ " /Users/tonyfast/anaconda3/lib/python3.7/site-packages/IPython/core/completer.py:1950: PendingDeprecationWarning: `Completer.complete` is pending deprecation since IPython 6.0 and will be replaced by `Completer.completions`.\n",
+ " PendingDeprecationWarning)\n",
+ "\n",
+ "-- Docs: https://docs.pytest.org/en/latest/warnings.html\n",
+ "\u001b[33m\u001b[1m======================== 19 passed, 4 warnings in 2.93s ========================\u001b[0m\n",
+ "[NbConvertApp] Converting notebook index.ipynb to markdown\n"
+ ]
+ }
+ ],
+ "source": [
+ " if __name__ == '__main__':\n",
+ " if globals().get('__file__', None) == __import__('sys').argv[0]:\n",
+ " print(foo, __import__('sys').argv)\n",
+ " else:\n",
+ " from subprocess import call\n",
+ " !ipython -m pytest\n",
+ " \"\"\"Formatting\"\"\"\n",
+ " from pathlib import Path\n",
+ " from importnb.utils.export import export\n",
+ " root = 'src/importnb/notebooks/'\n",
+ " for path in Path(root).rglob(\"\"\"*.ipynb\"\"\"): \n",
+ " if 'checkpoint' not in str(path):\n",
+ " export(path, Path('src/importnb') / path.with_suffix('.py').relative_to(root))\n",
+ " !jupyter nbconvert --to markdown --stdout index.ipynb > readme.md\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ " if __name__ == '__main__':\n",
+ " try:\n",
+ " from IPython.display import display, Image\n",
+ " from IPython.utils.capture import capture_output\n",
+ " from IPython import get_ipython\n",
+ " with capture_output(): \n",
+ " get_ipython().system(\"cd docs && pyreverse importnb -opng -pimportnb\")\n",
+ " display(Image(url='docs/classes_importnb.png', ))\n",
+ " except: ..."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python [conda env:root] *",
+ "language": "python",
+ "name": "conda-root-py"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/readme.md b/readme.md
new file mode 100644
index 0000000..5dca8c9
--- /dev/null
+++ b/readme.md
@@ -0,0 +1,290 @@
+
+__importnb__ imports notebooks as modules. Notebooks are reusable as tests, source code, importable modules, and command line utilities.
+
+[![Binder](https://mybinder.org/badge.svg)](https://mybinder.org/v2/gh/deathbeds/importnb/master?urlpath=lab/tree/readme.ipynb)[![Documentation Status](https://readthedocs.org/projects/importnb/badge/?version=latest)](https://importnb.readthedocs.io/en/latest/?badge=latest)
+[![Build Status](https://travis-ci.org/deathbeds/importnb.svg?branch=master)](https://travis-ci.org/deathbeds/importnb)[![PyPI version](https://badge.fury.io/py/importnb.svg)](https://badge.fury.io/py/importnb)![PyPI - Python Version](https://img.shields.io/pypi/pyversions/importnb.svg)![PyPI - Format](https://img.shields.io/pypi/format/importnb.svg)![PyPI - Format](https://img.shields.io/pypi/l/importnb.svg)[
+![Conda](https://img.shields.io/conda/pn/conda-forge/importnb.svg)](https://anaconda.org/conda-forge/importnb)[
+![GitHub tag](https://img.shields.io/github/tag/deathbeds/importnb.svg)](https://github.com/deathbeds/importnb/tree/master/src/importnb)
+
+##### Installation
+
+ pip install importnb
+
+---
+
+ conda install -c conda-forge importnb
+
+---
+
+# `importnb` for testing
+
+After `importnb` is installed, [pytest](https://pytest.readthedocs.io/) will discover and import notebooks as tests.
+
+ pytest index.ipynb
+
+[`importnb`](https://github.com/deathbeds/importnb) imports notebooks as python modules, it does not compare outputs like [`nbval`](https://github.com/computationalmodelling/nbval).
+
+[`importnb`](https://github.com/deathbeds/importnb) now captures `doctest`s in every __Markdown__ cell & block string expression. The docstrings are tested with the [__--doctest-modules__ flag](https://doc.pytest.org/en/latest/doctest.html).
+
+ pytest index.ipynb --doctest-modules
+
+It is recommended to use `importnb` with [__--nbval__](https://github.com/computationalmodelling/nbval).
+
+ pytest index.ipynb --nbval
+
+---
+
+# `importnb` for the commmand line
+
+`importnb` can run notebooks as command line scripts. Any literal variable in the notebook, may be applied as a parameter from the command line.
+
+ ipython -m importnb -- index.ipynb --foo "A new value"
+
+
+---
+
+# `importnb` for Python and IPython
+
+
+It is suggested to execute `importnb-install` to make sure that notebooks for each IPython session.
+
+> Restart and run all or it didn't happen.
+
+`importnb` excels in an interactive environment and if a notebook will __Restart and Run All__ then it may reused as python code. The `Notebook` context manager will allow notebooks _with valid names_ to import with Python.
+
+ >>> from importnb import Notebook
+
+### For brevity
+
+
+```python
+ with __import__('importnb').Notebook():
+ import readme
+```
+
+> [`importnb.loader`](src/notebooks/loader.ipynb) will find notebooks available anywhere along the [`sys.path`](https://docs.python.org/2/library/sys.html#sys.path).
+
+#### or explicity
+
+
+```python
+ from importnb import Notebook
+ with Notebook():
+ import readme
+```
+
+
+```python
+ foo = 42
+ import readme
+ assert readme.foo is 42
+ assert readme.__file__.endswith('.ipynb')
+```
+
+[`importnb` readme](readme.ipynb)
+
+### Modules may be reloaded
+
+The context manager is required to `reload` a module.
+
+
+```python
+ from importlib import reload
+ with Notebook(): __name__ == '__main__' and reload(readme)
+```
+
+### Lazy imports
+
+The `lazy` option will delay the evaluation of a module until one of its attributes are accessed the first time.
+
+
+```python
+ with Notebook(lazy=True):
+ import readme
+```
+
+### Fuzzy File Names
+
+
+```python
+ if __name__ == '__main__':
+ with Notebook():
+ import __a_me
+
+ assert __a_me.__file__ == readme.__file__
+```
+
+Python does not provide a way to import file names starting with numbers of contains special characters. `importnb` installs a fuzzy import logic to import files containing these edge cases.
+
+ import __2018__6_01_A_Blog_Post
+
+will find the first file matching `*2018*6?01?A?Blog?Post`. Importing `Untitled314519.ipynb` could be supported with the query below.
+
+ import __314519
+
+### Docstring
+
+The first markdown cell will become the module docstring.
+
+
+```python
+ if __name__ == '__main__':
+ print(readme.__doc__.splitlines()[0])
+```
+
+ __importnb__ imports notebooks as modules. Notebooks are reusable as tests, source code, importable modules, and command line utilities.
+
+
+Meaning non-code blocks can be executeb by [doctest]().
+
+
+```python
+ if __name__ == '__main__':
+ __import__('doctest').testmod(readme)
+```
+
+# Import notebooks from files
+
+Notebook names may not be valid Python paths. In this case, use `Notebook.from_filename`.
+
+ Notebook.from_filename('index.ipynb')
+
+Import under the `__main__` context.
+
+ Notebook('__main__').from_filename('index.ipynb')
+
+# Parameterize Notebooks
+
+Literal ast statements are converted to notebooks parameters.
+
+In `readme`, `foo` is a parameter because it may be evaluated with ast.literal_val
+
+
+```python
+ if __name__ == '__main__':
+ from importnb import Parameterize
+ f = Parameterize.load(readme.__file__)
+```
+
+The parameterized module is a callable that evaluates with different literal statements.
+
+
+```python
+ if __name__ == '__main__':
+ assert callable(f)
+ f.__signature__
+```
+
+ assert f().foo == 42
+ assert f(foo='importnb').foo == 'importnb'
+
+# Run Notebooks from the command line
+
+Run any notebook from the command line with importnb. Any parameterized expressions are available as parameters on the command line.
+
+
+
+ !ipython -m importnb -- index.ipynb --foo "The new value"
+
+## Integrations
+
+
+### IPython
+
+#### [IPython Extension](src/notebooks/loader.ipynb#IPython-Extensions)
+
+Avoid the use of the context manager using loading importnb as IPython extension.
+
+ %load_ext importnb
+
+`%unload_ext importnb` will unload the extension.
+
+#### Default Extension
+
+`importnb` may allow notebooks to import by default with
+
+ !importnb-install
+
+
+> If you'd like to play with source code on binder then you must execute the command above. Toggle the markdown cell to a code cell and run it.
+
+This extension will install a script into the default IPython profile startup that is called each time an IPython session is created.
+
+Uninstall the extension with `importnb-install`.
+
+##### Run a notebook as a module
+
+When the default extension is loaded any notebook can be run from the command line. After the `importnb` extension is created notebooks can be execute from the command line.
+
+ ipython -m readme
+
+In the command line context, `__file__ == sys.arv[0] and __name__ == '__main__'` .
+
+> See the [deploy step in the travis build](https://github.com/deathbeds/importnb/blob/docs/.travis.yml#L19).
+
+##### Parameterizable IPython commands
+
+Installing the IPython extension allows notebooks to be computed from the command. The notebooks are parameterizable from the command line.
+
+ ipython -m readme -- --help
+
+### py.test
+
+`importnb` installs a pytest plugin when it is setup. Any notebook obeying the py.test discovery conventions can be used in to pytest. _This is great because notebooks are generally your first test._
+
+ !ipython -m pytest -- src
+
+Will find all the test notebooks and configurations as pytest would any Python file.
+
+### Setup
+
+To package notebooks add `recursive-include package_name *.ipynb`
+
+## Developer
+
+* [Source Notebooks](src/notebooks/)
+* [Transpiled Python Source](src/importnb/)
+* [Tests](src/importnb/tests)
+
+### Format and test the Source Code
+
+
+```python
+ if __name__ == '__main__':
+ if globals().get('__file__', None) == __import__('sys').argv[0]:
+ print(foo, __import__('sys').argv)
+ else:
+ from subprocess import call
+ !ipython -m pytest
+ """Formatting"""
+ from pathlib import Path
+ from importnb.utils.export import export
+ root = 'src/importnb/notebooks/'
+ for path in Path(root).rglob("""*.ipynb"""):
+ if 'checkpoint' not in str(path):
+ export(path, Path('src/importnb') / path.with_suffix('.py').relative_to(root))
+ !jupyter nbconvert --to markdown --stdout index.ipynb > readme.md
+
+```
+
+
+```python
+ if __name__ == '__main__':
+ try:
+ from IPython.display import display, Image
+ from IPython.utils.capture import capture_output
+ from IPython import get_ipython
+ with capture_output():
+ get_ipython().system("cd docs && pyreverse importnb -opng -pimportnb")
+ display(Image(url='docs/classes_importnb.png', ))
+ except: ...
+```
+
+
+
+
+
+
+```python
+
+```
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..766697f
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,6 @@
+pytest
+tox
+pylint
+nbsphinx
+graphviz
+notebook
diff --git a/setup.py b/setup.py
index a10b624..fff2f6f 100644
--- a/setup.py
+++ b/setup.py
@@ -1,13 +1,84 @@
-from setuptools import setup, find_packages
-
-setup(
- name='ipynb',
- version='0.5.1',
- description='Package / Module importer for importing code from Jupyter Notebook files (.ipynb)',
- url='http://github.com/yuvipanda/ipynb',
- author='Yuvi Panda',
- author_email='yuvipanda@gmail.com',
- license='BSD',
- packages=find_packages(),
- python_requires='>=3.4'
+import json
+from pathlib import Path
+import setuptools
+
+name = "importnb"
+
+__version__ = None
+
+here = Path(__file__).parent
+
+# This should be replaced with proper pathlib business
+
+with (here/ 'src' / 'importnb'/ '_version.py').open('r') as file:
+ exec(file.read())
+
+with open(str(here/'readme.md'),'r') as f:
+ description = f.read()
+
+with open(str(here/'changelog.ipynb'), 'r') as f:
+ description += '\n'+ '\n'.join(
+ ''.join(cell['source']) for cell in json.load(f)['cells'] if cell['cell_type'] == 'markdown'
+ )
+
+import sys
+
+from setuptools.command.test import test as TestCommand
+class PyTest(TestCommand):
+ def run_tests(self): sys.exit(__import__('pytest').main([]))
+
+install_requires = []
+try:
+ from importlib import resources
+except:
+ install_requires += ['importlib_resources']
+
+setup_args = dict(
+ name=name,
+ version=__version__,
+ author="Tony Fast",
+ author_email="tony.fast@gmail.com",
+ description="Import Jupyter (ne IPython) notebooks into tests and scripts.",
+ long_description=description,
+ long_description_content_type='text/markdown',
+ url="https://github.com/deathbeds/importnb",
+ python_requires=">=3.4",
+ license="BSD-3-Clause",
+ setup_requires=[
+ 'pytest-runner',
+ ] + ([] if sys.version_info.minor == 4 else ['wheel>=0.31.0']),
+ tests_require=['pytest', 'nbformat'],
+ install_requires=install_requires,
+ include_package_data=True,
+ packages="importnb ipynb".split(),
+ package_dir={"importnb": "src/importnb"},
+ classifiers=[
+ "Development Status :: 4 - Beta",
+ "Framework :: IPython",
+ "Framework :: Jupyter",
+ "Intended Audience :: Developers",
+ "Natural Language :: English",
+ "License :: OSI Approved :: BSD License",
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3 :: Only',],
+ zip_safe=False,
+ cmdclass={'test': PyTest,},
+ entry_points = {
+ 'pytest11': [
+ 'importnb = importnb.utils.pytest_importnb',
+ ],
+ 'console_scripts': [
+ 'importnb-install = importnb.utils.ipython:install',
+ 'importnb-uninstall = importnb.utils.ipython:uninstall',
+ 'nbdoctest = importnb.utils.nbdoctest:_test',
+ ]
+ },
)
+
+if __name__ == "__main__":
+ setuptools.setup(**setup_args)
diff --git a/src/importnb/__init__.py b/src/importnb/__init__.py
new file mode 100644
index 0000000..7fd3b93
--- /dev/null
+++ b/src/importnb/__init__.py
@@ -0,0 +1,9 @@
+# coding: utf-8
+
+__all__ = "Notebook", "reload", "Parameterize", "Remote"
+
+from ._version import *
+from .ipython_extension import load_ipython_extension, unload_ipython_extension
+from .loader import Notebook, reload, unload_ipython_extension
+from .remote import Remote
+from . import utils
\ No newline at end of file
diff --git a/src/importnb/__main__.py b/src/importnb/__main__.py
new file mode 100644
index 0000000..b31ffe6
--- /dev/null
+++ b/src/importnb/__main__.py
@@ -0,0 +1,10 @@
+import sys
+
+from .parameterize import Parameterize
+
+file = sys.argv[1] if len(sys.argv) > 1 else None
+
+if file == __file__:
+ sys.argv = [sys.argv[0]] + [sys.argv[2:]]
+
+ file and Parameterize.load(file)
diff --git a/src/importnb/_version.py b/src/importnb/_version.py
new file mode 100644
index 0000000..906d362
--- /dev/null
+++ b/src/importnb/_version.py
@@ -0,0 +1 @@
+__version__ = "0.6.0"
diff --git a/src/importnb/completer.py b/src/importnb/completer.py
new file mode 100644
index 0000000..b811213
--- /dev/null
+++ b/src/importnb/completer.py
@@ -0,0 +1,117 @@
+# coding: utf-8
+"""# Fuzzy completion
+
+The fuzzy importer could be confusing and perhaps a completer could help.
+
+
+ >>> ip = __import__("IPython").get_ipython(); load_ipython_extension(ip)
+ >>> assert ip.complete('importnb.__pleter', 'import importnb.__pleter')[1]
+ >>> assert ip.complete('__find__', 'import __find__')[1]
+ >>> assert ip.complete('IPy', '\timport IPy')[1]
+ >>> assert ip.complete('_______________plet__', 'from importnb import _______________plet__')[1]
+"""
+
+import string
+from fnmatch import fnmatch
+from pathlib import Path
+
+from .finder import fuzzy_file_search
+
+
+"""To provide the most reliable fuzzy imports `fuzzify_string` replaces the imported with one that complies with the fuzzy finder.
+"""
+
+
+def fuzzify_string(str):
+ return (str[0] in string.ascii_letters + "_" and str[0] or "_") + "".join(
+ letter if letter in string.ascii_letters + "_" + string.digits else "_"
+ for letter in str[1:]
+ )
+
+
+"""`align_match` replaces the the begining of the match with a prefix that matches that completer query name.
+"""
+
+
+def align_match(match, prefix, *, i=0):
+ pattern = prefix.replace("__", "*").replace("_", "?").strip()
+ for i in range(len(match)):
+ if fnmatch(match[:i], pattern):
+ break
+ else:
+ i += 1
+ return prefix + match[i:]
+
+
+"""* `predict_fuzzy` will take a fully qualified fuzzy name completions. This is the main function for the completer.
+"""
+
+
+def predict_fuzzy(fullname):
+ package, paths, specs, extras = "", [], [], []
+ if "." in fullname:
+ package, fullname = fullname.rsplit(".", 1)
+ fullname = fullname.strip()
+ try:
+ module = __import__("importlib").import_module(package)
+ paths.append(Path(module.__file__).parent)
+ extras = [object for object in dir(module) if object.startswith("fullname")]
+ except:
+ ...
+ else:
+ paths = map(Path, __import__("sys").path)
+ query = fullname
+ while not query.endswith("__"):
+ query += "_"
+ for path in paths:
+ specs.extend(
+ str(object.relative_to(path).with_suffix(""))
+ for object in fuzzy_file_search(path, query)
+ )
+
+ return set(
+ (package and package + "." or "") + align_match(fuzzify_string(spec), fullname)
+ for spec in specs
+ ).union(set(extras))
+
+
+def fuzzy_complete_event(self, event):
+ event.line = event.line.lstrip()
+ symbol = event.symbol
+ if event.line.startswith("from"):
+ package = event.line.split(" import ", 1)[0].lstrip().lstrip("from").lstrip()
+ if " import" in event.line:
+ symbol = (package + "." + symbol).lstrip(".")
+ return [
+ object.lstrip(package).lstrip(".") for object in predict_fuzzy(symbol)
+ ]
+
+ return predict_fuzzy(symbol)
+
+
+"""* The extension adds the new fuzzy completer. Our completer has a higher priority than the default completers. Since we stripped the leading whitespace from the completion line event; the extension will permit completion on tabbed lines.
+"""
+
+
+def load_ipython_extension(ip):
+ ip.set_hook(
+ "complete_command", fuzzy_complete_event, str_key="aimport", priority=25
+ )
+ ip.set_hook("complete_command", fuzzy_complete_event, str_key="import", priority=25)
+ ip.set_hook(
+ "complete_command", fuzzy_complete_event, str_key="%reload_ext", priority=25
+ )
+ ip.set_hook(
+ "complete_command", fuzzy_complete_event, str_key="%load_ext", priority=25
+ )
+ ip.set_hook("complete_command", fuzzy_complete_event, str_key="from", priority=25)
+
+
+if __name__ == "__main__":
+ from .utils.export import export
+ from importnb import Notebook
+
+ export("completer.ipynb", "../completer.py")
+ ip = get_ipython()
+ m = Notebook.load("completer.ipynb")
+ print(__import__("doctest").testmod(m, verbose=2))
diff --git a/src/importnb/decoder.py b/src/importnb/decoder.py
new file mode 100644
index 0000000..dfebd92
--- /dev/null
+++ b/src/importnb/decoder.py
@@ -0,0 +1,106 @@
+# coding: utf-8
+"""# Decode `nbformat` with line numbers
+
+`importnb` decodes notebooks with the `nbformat` in valid source code.
+
+We consider three kinds of cells.
+"""
+
+import linecache
+import textwrap
+from functools import partial
+from json import load as _load
+from json import loads as _loads
+from json.decoder import (WHITESPACE, WHITESPACE_STR, JSONDecoder, JSONObject,
+ py_scanstring)
+from json.scanner import py_make_scanner
+
+
+"""Output the strings slice that the source came from.
+"""
+
+
+def scanstring(s, end, strict=True, **kwargs):
+ s, id = py_scanstring(s, end, strict, **kwargs)
+ return (slice(end, id), s), id
+
+
+def quote(object, *, quotes="'''"):
+ if quotes in object:
+ quotes = '"""'
+ return quotes + object + "\n" + quotes
+
+
+def object_pairs_hook(object) -> (slice, str):
+ object = dict(object)
+ if "cells" in object:
+ return object["cells"]
+
+ if "cell_type" in object:
+ _, object["cell_type"] = object["cell_type"]
+
+ for key in ["text", "source"]:
+ if key in object:
+ if object[key]:
+ return (
+ slice(object[key][0][0].start, object[key][-1][0].stop),
+ object,
+ "".join(_[1] for _ in object[key]),
+ )
+ return slice(None), None, None
+
+
+class LineCacheNotebookDecoder(JSONDecoder):
+ def __init__(
+ self,
+ markdown=quote,
+ code=textwrap.dedent,
+ raw=partial(textwrap.indent, prefix="# "),
+ **kwargs
+ ):
+ super().__init__(**kwargs)
+
+ for key in ("markdown", "code", "raw"):
+ setattr(self, "transform_" + key, locals().get(key))
+
+ self.parse_string = scanstring
+ self.object_pairs_hook = object_pairs_hook
+ self.scan_once = py_make_scanner(self)
+
+ def decode(self, object, filename):
+ lines = []
+
+ linecache.updatecache(filename)
+ if filename in linecache.cache:
+ linecache.cache[filename] = (
+ linecache.cache[filename][0],
+ linecache.cache[filename][1],
+ lines,
+ filename,
+ )
+ last, new, old = slice(0, 0), 0, 0
+ for current, cell, source in super().decode(object):
+ if cell:
+ lines += ["\n"] * (
+ object[last.stop : current.start].splitlines().__len__()
+ - 1
+ + (old - new)
+ )
+
+ source = getattr(self, "transform_" + cell["cell_type"])(source)
+
+ lines += list(map("{}\n".format, source.splitlines()))
+ new, old = map(len, map(str.splitlines, (source, object[current])))
+ if not lines[-1]:
+ lines.pop()
+ last = current
+
+ return "".join(lines)
+
+
+if __name__ == "__main__":
+ try:
+ from utils.export import export
+ except:
+ from .utils.export import export
+ export("decoder.ipynb", "../decoder.py")
diff --git a/src/importnb/docstrings.py b/src/importnb/docstrings.py
new file mode 100644
index 0000000..0bd788b
--- /dev/null
+++ b/src/importnb/docstrings.py
@@ -0,0 +1,125 @@
+# coding: utf-8
+"""# Special handling of markdown cells as docstrings.
+
+Modify the Python `ast` to assign docstrings to functions when they are preceded by a Markdown cell.
+"""
+
+import ast
+
+"""# Modifying the `ast`
+
+ >>> assert isinstance(create_test, ast.Assign)
+ >>> assert isinstance(test_update, ast.Attribute)
+"""
+
+create_test = ast.parse(
+ """__test__ = globals().get('__test__', {})""", mode="single"
+).body[0]
+test_update = ast.parse("""__test__.update""", mode="single").body[0].value
+str_nodes = (ast.Str,)
+
+"""`TestStrings` is an `ast.NodeTransformer` that captures `str_nodes` in the `TestStrings.strings` object.
+
+```ipython
+>>> assert isinstance(ast.parse(TestStrings().visit(ast.parse('"Test me"'))), ast.Module)
+
+```
+"""
+
+
+class TestStrings(ast.NodeTransformer):
+
+ strings = None
+
+ def visit_Module(self, module):
+ """`TestStrings.visit_Module` initializes the capture. After all the nodes are visit we append `create_test and test_update`
+ to populate the `"__test__"` attribute.
+ """
+ self.strings = []
+ module = self.visit_body(module)
+ module.body += (
+ [create_test]
+ + [
+ ast.copy_location(
+ ast.Expr(
+ ast.Call(
+ func=test_update,
+ args=[
+ ast.Dict(
+ keys=[ast.Str("string-{}".format(node.lineno))],
+ values=[node],
+ )
+ ],
+ keywords=[],
+ )
+ ),
+ node,
+ )
+ for node in self.strings
+ ]
+ if self.strings
+ else []
+ )
+ return module
+
+ def visit_body(self, node):
+ """`TestStrings.visit_body` visits nodes with a `"body"` attibute and extracts potential string tests."""
+
+ body = []
+ if (
+ node.body
+ and isinstance(node.body[0], ast.Expr)
+ and isinstance(node.body[0].value, str_nodes)
+ ):
+ body.append(node.body.pop(0))
+ node.body = body + [
+ (self.visit_body if hasattr(object, "body") else self.visit)(object)
+ for object in node.body
+ ]
+ return node
+
+ def visit_Expr(self, node):
+ """`TestStrings.visit_Expr` append the `str_nodes` to `TestStrings.strings` to append to the `ast.Module`."""
+
+ if isinstance(node.value, str_nodes):
+ self.strings.append(
+ ast.copy_location(ast.Str(node.value.s.replace("\n```", "\n")), node)
+ )
+ return node
+
+
+def update_docstring(module):
+ from functools import reduce
+
+ module.body = reduce(markdown_docstring, module.body, [])
+ return TestStrings().visit(module)
+
+
+docstring_ast_types = ast.ClassDef, ast.FunctionDef
+try:
+ docstring_ast_types += (ast.AsyncFunctionDef,)
+except:
+ ...
+
+
+def markdown_docstring(nodes, node):
+ if (
+ len(nodes) > 1
+ and str_expr(nodes[-1])
+ and isinstance(node, docstring_ast_types)
+ and not str_expr(node.body[0])
+ ):
+ node.body.insert(0, nodes.pop())
+ return nodes.append(node) or nodes
+
+
+def str_expr(node):
+ return isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)
+
+
+if __name__ == "__main__":
+ try:
+ from utils.export import export
+ except:
+ from .utils.export import export
+ export("docstrings.ipynb", "../docstrings.py")
diff --git a/src/importnb/finder.py b/src/importnb/finder.py
new file mode 100644
index 0000000..770d478
--- /dev/null
+++ b/src/importnb/finder.py
@@ -0,0 +1,126 @@
+# coding: utf-8
+"""# `sys.path_hook` modifiers
+
+Many suggestions for importing notebooks use `sys.meta_paths`, but `importnb` relies on the `sys.path_hooks` to load any notebook in the path. `PathHooksContext` is a base class for the `importnb.Notebook` `SourceFileLoader`.
+"""
+
+import ast
+import inspect
+import os
+import sys
+from contextlib import ExitStack, contextmanager
+from importlib.machinery import ModuleSpec, SourceFileLoader
+from itertools import chain
+from pathlib import Path
+
+try:
+ from importlib._bootstrap_external import FileFinder
+except:
+ # python 3.4
+ from importlib.machinery import FileFinder
+
+
+
+
+
+class FileModuleSpec(ModuleSpec):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._set_fileattr = True
+
+
+class FuzzySpec(FileModuleSpec):
+ def __init__(
+ self,
+ name,
+ loader,
+ *,
+ alias=None,
+ origin=None,
+ loader_state=None,
+ is_package=None
+ ):
+ super().__init__(
+ name,
+ loader,
+ origin=origin,
+ loader_state=loader_state,
+ is_package=is_package,
+ )
+ self.alias = alias
+
+
+def fuzzy_query(str):
+ new = ""
+ for chr in str:
+ new += (not new.endswith("__") or chr != "_") and chr or ""
+ return new.replace("__", "*").replace("_", "?")
+
+
+def fuzzy_file_search(path, fullname):
+ results = []
+ id, details = get_loader_details()
+ for ext in sum((list(object[1]) for object in details), []):
+ results.extend(Path(path).glob(fullname + ext))
+ "_" in fullname and results.extend(Path(path).glob(fuzzy_query(fullname) + ext))
+ return results
+
+
+class FuzzyFinder(FileFinder):
+ """Adds the ability to open file names with special characters using underscores."""
+
+ def find_spec(self, fullname, target=None):
+ """Try to finder the spec and if it cannot be found, use the underscore starring syntax
+ to identify potential matches.
+ """
+ spec = super().find_spec(fullname, target=target)
+
+ if spec is None:
+ original = fullname
+
+ if "." in fullname:
+ original, fullname = fullname.rsplit(".", 1)
+ else:
+ original, fullname = "", original
+
+ if "_" in fullname:
+ files = fuzzy_file_search(self.path, fullname)
+ if files:
+ file = Path(sorted(files)[0])
+ spec = super().find_spec(
+ (original + "." + file.stem.split(".", 1)[0]).lstrip("."),
+ target=target,
+ )
+ fullname = (original + "." + fullname).lstrip(".")
+ if spec and fullname != spec.name:
+ spec = FuzzySpec(
+ spec.name,
+ spec.loader,
+ origin=spec.origin,
+ loader_state=spec.loader_state,
+ alias=fullname,
+ is_package=bool(spec.submodule_search_locations),
+ )
+ return spec
+
+
+def get_loader_details():
+ for id, path_hook in enumerate(sys.path_hooks):
+ try:
+ return (
+ id,
+ list(inspect.getclosurevars(path_hook).nonlocals["loader_details"]),
+ )
+ except:
+ continue
+
+
+"""# Developer
+"""
+
+if __name__ == "__main__":
+ try:
+ from utils.export import export
+ except:
+ from .utils.export import export
+ export("finder.ipynb", "../finder.py")
diff --git a/src/importnb/ipython_extension.py b/src/importnb/ipython_extension.py
new file mode 100644
index 0000000..9eec34d
--- /dev/null
+++ b/src/importnb/ipython_extension.py
@@ -0,0 +1,142 @@
+# coding: utf-8
+"""# `importnb` Jupyter magic extensions
+"""
+
+""" %importnb --stdout --stderr --display --shell
+"""
+
+import argparse
+import inspect
+from functools import partial
+from importlib import import_module
+
+import pkg_resources
+
+
+def get_module_object(str):
+ module, object = str.split(":", 1)
+ return getattr(import_module(module), object)
+
+
+parser = argparse.ArgumentParser(
+ description="""Define the importnb loader properties."""
+)
+parser.add_argument("--cls", type=get_module_object, default="importnb:Notebook")
+parser.add_argument("--fuzzy", action="store_true")
+
+try:
+ from IPython.core import magic_arguments
+ from IPython.core.magic import (
+ Magics,
+ magics_class,
+ line_magic,
+ cell_magic,
+ line_cell_magic,
+ )
+
+ __IPYTHON__ = True
+except:
+ __IPYTHON__ = False
+
+
+class ImportNbExtensionBase:
+ loaders = None
+
+ def __init__(self, shell, loader=None):
+ self.loaders = []
+ # A default loader to install
+ if loader:
+ self.loaders.append(loader(position=-1).__enter__())
+
+
+if __IPYTHON__:
+
+ @magics_class
+ class ImportNbExtension(Magics, ImportNbExtensionBase):
+ loaders = None
+
+ def __init__(self, shell, loader=None):
+ Magics.__init__(self, shell)
+ ImportNbExtensionBase.__init__(self, shell, loader)
+
+ @line_cell_magic
+ def importnb(self, line, cell=None):
+ if line.strip() == "pop":
+ return self.pop()
+ module.__package__
+
+ details = vars(parser.parse_args(line.split()))
+ self.loaders.append(details.pop("cls")(**details))
+
+ if cell is None:
+ self.loaders[-1].__enter__()
+ return
+
+ with self.loaders.pop(-1):
+ self.parent.run_cell(cell)
+
+ def unload(self):
+ while self.loaders:
+ self.pop()
+
+ def pop(self):
+ self.loaders.pop().__exit__(None, None, None)
+
+
+else:
+
+ class ImportNbExtension(ImportNbExtensionBase):
+ ...
+
+
+manager = None
+
+
+def IPYTHON_MAIN():
+ """Decide if the Ipython command line is running code."""
+ import pkg_resources
+
+ runner_frame = inspect.getouterframes(inspect.currentframe())[-2]
+ return (
+ getattr(runner_frame, "function", None)
+ == pkg_resources.load_entry_point(
+ "ipython", "console_scripts", "ipython"
+ ).__name__
+ )
+
+
+def load_ipython_extension(ip=None):
+ global manager, module
+ if IPYTHON_MAIN():
+ from .parameterize import Parameterize as Notebook
+ else:
+ from .loader import Notebook
+
+ # Auto loading only works in IPython and
+ # we only read need it when there are parameters.
+ manager = ImportNbExtension(ip, Notebook)
+
+ if ip:
+ ip.register_magics(manager)
+ ip.user_ns.update(__path__=[str(__import__("pathlib").Path().absolute())])
+ from .completer import load_ipython_extension
+
+ load_ipython_extension(ip)
+
+ ip.user_ns["reload"] = __import__("importlib").reload
+
+
+def unload_ipython_extension(ip=None):
+ global manager
+ ip and manager and manager.unload()
+
+
+"""# Developer
+"""
+
+if __name__ == "__main__":
+ from importnb.utils.export import export
+
+ export("ipython_extension.ipynb", "../ipython_extension.py")
+ # m = Notebook(shell=True).from_filename('extensions.ipynb')
+ # print(__import__('doctest').testmod(m, verbose=2))
diff --git a/src/importnb/loader.py b/src/importnb/loader.py
new file mode 100644
index 0000000..5585650
--- /dev/null
+++ b/src/importnb/loader.py
@@ -0,0 +1,337 @@
+# coding: utf-8
+"""# `loader`
+
+Combine the __import__ finder with the loader.
+"""
+
+try:
+ from .finder import get_loader_details, FuzzySpec, FuzzyFinder
+ from .ipython_extension import load_ipython_extension, unload_ipython_extension
+ from .decoder import LineCacheNotebookDecoder, quote
+ from .docstrings import update_docstring
+except:
+ from finder import get_loader_details, FuzzySpec, FuzzyFinder
+ from ipython_extension import load_ipython_extension, unload_ipython_extension
+ from decoder import LineCacheNotebookDecoder, quote
+ from docstrings import update_docstring
+
+import ast
+import importlib
+import inspect
+import json
+import os
+import sys
+import textwrap
+import types
+from contextlib import ExitStack, contextmanager
+from functools import partial, partialmethod
+from importlib import reload
+from importlib.machinery import ModuleSpec, SourceFileLoader
+from importlib.util import spec_from_loader
+from inspect import signature
+from pathlib import Path
+
+_38 = sys.version_info.major == 3 and sys.version_info.minor == 8
+
+if _38:
+ from importlib._bootstrap import _load_unlocked, _requires_builtin
+else:
+ from importlib._bootstrap import _installed_safely, _requires_builtin
+
+
+
+try:
+ from importlib._bootstrap_external import decode_source, FileFinder
+ from importlib.util import module_from_spec
+ from importlib._bootstrap import _init_module_attrs
+ from importlib.util import LazyLoader
+except:
+ # python 3.4
+ from importlib._bootstrap import _SpecMethods
+ from importlib.util import decode_source
+ from importlib.machinery import FileFinder
+
+ def module_from_spec(spec):
+ return _SpecMethods(spec).create()
+
+ def _init_module_attrs(spec, module):
+ return _SpecMethods(spec).init_module_attrs(module)
+
+
+
+try:
+ import IPython
+ from IPython.core.inputsplitter import IPythonInputSplitter
+
+ dedent = IPythonInputSplitter(
+ line_input_checker=False,
+ physical_line_transforms=[
+ IPython.core.inputsplitter.leading_indent(),
+ IPython.core.inputsplitter.ipy_prompt(),
+ IPython.core.inputsplitter.cellmagic(end_on_blank_line=False),
+ ],
+ ).transform_cell
+except:
+ from textwrap import dedent
+
+__all__ = "Notebook", "reload"
+
+
+
+class FinderContextManager:
+ """
+ FinderContextManager is the base class for the notebook loader. It provides
+ a context manager that replaces `FileFinder` in the `sys.path_hooks` to include
+ an instance of the class in the python findering system.
+
+ >>> with FinderContextManager() as f:
+ ... id, ((loader_cls, _), *_) = get_loader_details()
+ ... assert issubclass(loader_cls, FinderContextManager)
+ >>> id, ((loader_cls, _), *_) = get_loader_details()
+ >>> loader_cls = inspect.unwrap(loader_cls)
+ >>> assert not (isinstance(loader_cls, type) and issubclass(loader_cls, FinderContextManager))
+ """
+
+ extensions = tuple()
+ _position = 0
+
+ finder = FileFinder
+
+ @property
+ def loader(self):
+ return type(self)
+
+ def __enter__(self):
+ id, details = get_loader_details()
+ details.insert(self._position, (self.loader, self.extensions))
+ sys.path_hooks[id] = self.finder.path_hook(*details)
+ sys.path_importer_cache.clear()
+ return self
+
+ def __exit__(self, *excepts):
+ id, details = get_loader_details()
+ details.pop(self._position)
+ sys.path_hooks[id] = self.finder.path_hook(*details)
+ sys.path_importer_cache.clear()
+
+
+"""## The basic loader
+
+The loader uses the import systems `get_source`, `get_data`, and `create_module` methods to import notebook files.
+"""
+
+
+class ModuleType(types.ModuleType, getattr(os, "PathLike", object)):
+ """ModuleType combines a module with a PathLike access to simplify access."""
+
+ def __fspath__(self):
+ return self.__file__
+
+
+class ImportLibMixin(SourceFileLoader):
+ """ImportLibMixin is a SourceFileLoader for loading source code from JSON (e.g. notebooks).
+
+ `get_data` assures consistent line numbers between the file s representatio and source."""
+
+ def create_module(self, spec):
+ module = ModuleType(spec.name)
+ _init_module_attrs(spec, module)
+ if isinstance(spec, FuzzySpec):
+ sys.modules[spec.alias] = module
+ if self.name:
+ module.__name__ = self.name
+ return module
+
+ def decode(self):
+ return decode_source(super().get_data(self.path))
+
+ def get_data(self, path):
+ """Needs to return the string source for the module."""
+ return LineCacheNotebookDecoder(
+ code=self.code, raw=self.raw, markdown=self.markdown
+ ).decode(self.decode(), self.path)
+
+ @classmethod
+ @_requires_builtin
+ def is_package(cls, fullname):
+ """Return False as built-in modules are never packages."""
+ if "." not in fullname:
+ return True
+ return super().is_package(fullname)
+
+ get_source = get_data
+
+
+class NotebookBaseLoader(ImportLibMixin, FinderContextManager):
+ """The simplest implementation of a Notebook Source File Loader.
+ """
+
+ extensions = (".ipynb",)
+ __slots__ = "_lazy", "_fuzzy", "_markdown_docstring", "_position"
+
+ def __init__(
+ self,
+ fullname=None,
+ path=None,
+ *,
+ lazy=False,
+ fuzzy=True,
+ position=0,
+ markdown_docstring=True
+ ):
+ super().__init__(fullname, path)
+ self._lazy = lazy
+ self._fuzzy = fuzzy
+ self._markdown_docstring = markdown_docstring
+ self._position = position
+
+ @property
+ def loader(self):
+ """Create a lazy loader source file loader."""
+ loader = super().loader
+ if self._lazy and (sys.version_info.major, sys.version_info.minor) != (3, 4):
+ loader = LazyLoader.factory(loader)
+ # Strip the leading underscore from slots
+ return partial(
+ loader,
+ **{object.lstrip("_"): getattr(self, object) for object in self.__slots__}
+ )
+
+ @property
+ def finder(self):
+ """Permit fuzzy finding of files with special characters."""
+ return self._fuzzy and FuzzyFinder or super().finder
+
+
+class FileModuleSpec(ModuleSpec):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._set_fileattr = True
+
+
+"""## notebooks most be loadable from files.
+"""
+
+
+class FromFileMixin:
+ """FromFileMixin adds a classmethod to load a notebooks from files.
+ """
+
+ @classmethod
+ def load(cls, filename, dir=None, main=False, **kwargs):
+ """Import a notebook as a module from a filename.
+
+ dir: The directory to load the file from.
+ main: Load the module in the __main__ context.
+
+ > assert Notebook.load('loader.ipynb')
+ """
+ name = main and "__main__" or Path(filename).stem
+ loader = cls(name, str(filename), **kwargs)
+ spec = FileModuleSpec(name, loader, origin=loader.path)
+ module = module_from_spec(spec)
+ cwd = str(Path(loader.path).parent)
+ try:
+
+ if _38:
+ sys.path.append(cwd)
+ module = _load_unlocked(spec)
+ else:
+ with ExitStack() as stack:
+ loader.name != "__main__" and stack.enter_context(
+ _installed_safely(module)
+ )
+ loader.exec_module(module)
+ finally:
+ sys.path.pop()
+
+ return module
+
+
+"""* Sometimes folks may want to use the current IPython shell to manage the code and input transformations.
+"""
+
+"""Use the `IPythonInputSplitter` to dedent and process magic functions.
+"""
+
+
+class TransformerMixin:
+ def code(self, str):
+ return dedent(str)
+
+ def markdown(self, str):
+ return quote(str)
+
+ def raw(self, str):
+ return textwrap.indent(str, "# ")
+
+ def visit(self, node):
+ return node
+
+
+"""## The `Notebook` finder & loader
+"""
+
+
+class Notebook(TransformerMixin, FromFileMixin, NotebookBaseLoader):
+ """Notebook is a user friendly file finder and module loader for notebook source code.
+
+ > Remember, restart and run all or it didn't happen.
+
+ Notebook provides several useful options.
+
+ * Lazy module loading. A module is executed the first time it is used in a script.
+ """
+
+ __slots__ = NotebookBaseLoader.__slots__ + ("_main",)
+
+ def __init__(
+ self,
+ fullname=None,
+ path=None,
+ lazy=False,
+ position=0,
+ fuzzy=True,
+ markdown_docstring=True,
+ main=False,
+ ):
+ self._main = bool(main) or fullname == "__main__"
+ super().__init__(
+ self._main and "__main__" or fullname,
+ path,
+ lazy=lazy,
+ fuzzy=fuzzy,
+ position=position,
+ markdown_docstring=markdown_docstring,
+ )
+
+ def parse(self, nodes):
+ return ast.parse(nodes, self.path)
+
+ def source_to_code(self, nodes, path, *, _optimize=-1):
+ """* Convert the current source to ast
+ * Apply ast transformers.
+ * Compile the code."""
+ if not isinstance(nodes, ast.Module):
+ nodes = self.parse(nodes)
+ if self._markdown_docstring:
+ nodes = update_docstring(nodes)
+ return super().source_to_code(
+ ast.fix_missing_locations(self.visit(nodes)), path, _optimize=_optimize
+ )
+
+
+"""# Developer
+"""
+
+""" Notebook.load('loader.ipynb')
+
+"""
+
+if __name__ == "__main__":
+ try:
+ from utils.export import export
+ except:
+ from .utils.export import export
+ export("loader.ipynb", "../loader.py")
+ print(__import__("doctest").testmod(Notebook.load("loader.ipynb"), verbose=2))
diff --git a/src/importnb/parameterize.py b/src/importnb/parameterize.py
new file mode 100644
index 0000000..027df14
--- /dev/null
+++ b/src/importnb/parameterize.py
@@ -0,0 +1,173 @@
+# coding: utf-8
+"""# Parameterize
+
+The parameterize loader allows notebooks to be used as functions and command line tools. A `Parameterize` loader will convert an literal ast assigments to keyword arguments for the module.
+"""
+
+try:
+ from .loader import Notebook, module_from_spec
+except:
+ from loader import Notebook, module_from_spec
+import argparse
+import ast
+import inspect
+import sys
+from copy import deepcopy
+from functools import partial, partialmethod
+from importlib.util import find_spec, spec_from_loader
+from inspect import Parameter, Signature, signature
+from pathlib import Path
+
+_38 = sys.version_info.major == 3 and sys.version_info.minor == 8
+
+if _38:
+ from importlib._bootstrap import _load_unlocked
+else:
+ from importlib._bootstrap import _installed_safely
+
+
+class FindReplace(ast.NodeTransformer):
+ def __init__(self, globals, parser):
+ self.globals = globals
+ self.parser = parser
+ self.argv = sys.argv[1:]
+ self.parameters = []
+
+ def visit_Assign(self, node):
+ if len(node.targets) == 1 and isinstance(node.targets[0], ast.Name):
+ target, parameter = node.targets[0].id, node.value
+ try:
+ parameter = ast.literal_eval(parameter)
+ except:
+ return node
+
+ if target[0].lower():
+ extras = {}
+ if isinstance(parameter, bool):
+ extras.update(
+ action="store_" + ["true", "false"][parameter],
+ help="{} = {}".format(target, not parameter),
+ )
+ else:
+ extras.update(
+ help="{} : {} = {}".format(
+ target, type(parameter).__name__, parameter
+ )
+ )
+ try:
+ self.parser.add_argument(
+ "--%s" % target, default=parameter, **extras
+ )
+ except argparse.ArgumentError:
+ ...
+ self.parameters.append(
+ Parameter(target, Parameter.KEYWORD_ONLY, default=parameter)
+ )
+ if ("-h" not in self.argv) and ("--help" not in self.argv):
+ ns, self.argv = self.parser.parse_known_args(self.argv)
+ if target in self.globals:
+ node = ast.Expr(ast.Str("Skipped"))
+ elif getattr(ns, target) != parameter:
+ node.value = ast.parse(str(getattr(ns, target))).body[0].value
+ return node
+
+ @property
+ def signature(self):
+ return Signature(self.parameters)
+
+ def visit_Module(self, node):
+ node.body = list(map(self.visit, node.body))
+ self.parser.description = ast.get_docstring(node)
+ self.parser.parse_known_args(self.argv) # run in case there is a help arugment
+ return node
+
+ def generic_visit(self, node):
+ return node
+
+
+def copy_(module):
+ new = type(module)(module.__name__)
+ return new.__dict__.update(**vars(module)) or new
+
+
+class Parameterize(Notebook):
+ __slots__ = Notebook.__slots__ + ("globals",)
+
+ def __init__(
+ self,
+ fullname=None,
+ path=None,
+ *,
+ lazy=False,
+ fuzzy=True,
+ markdown_docstring=True,
+ position=0,
+ globals=None,
+ main=False,
+ **_globals
+ ):
+ super().__init__(
+ fullname, path, lazy=lazy, fuzzy=fuzzy, position=position, main=main
+ )
+ self.globals = globals or {}
+ self.globals.update(**_globals)
+ self._visitor = FindReplace(
+ self.globals, argparse.ArgumentParser(prog=self.name)
+ )
+
+ def exec_module(self, module):
+ self._visitor = FindReplace(self.globals, self._visitor.parser)
+ module.__dict__.update(**self.globals)
+ return super().exec_module(module)
+
+ def visit(self, node):
+ return super().visit(self._visitor.visit(node))
+
+ @classmethod
+ def load(cls, object, **globals):
+ return parameterize(super().load(object), **globals)
+
+
+""" with Parameterize():
+ reload(foo)
+
+ with Parameterize(a=1234123):
+ reload(foo)
+
+ with Parameterize(a="🤘"):
+ reload(foo)
+"""
+
+""" import foo
+"""
+
+
+def parameterize(object, **globals):
+ with Parameterize(**globals):
+ if isinstance(object, str):
+ object = module_from_spec(find_spec(object))
+
+ object.__loader__ = Parameterize(
+ object.__loader__.name, object.__loader__.path, **globals
+ )
+
+ def call(**parameters):
+ nonlocal object, globals
+ object = copy_(object)
+ keywords = {}
+ keywords.update(**globals), keywords.update(**parameters)
+ if _38:
+ Parameterize(object.__name__, object.__file__, **keywords).exec_module(
+ object
+ )
+ else:
+ with _installed_safely(object):
+ Parameterize(object.__name__, object.__file__, **keywords).exec_module(
+ object
+ )
+ return object
+
+ object.__loader__.get_code(object.__name__)
+ call.__doc__ = object.__doc__ or object.__loader__._visitor.parser.format_help()
+ call.__signature__ = object.__loader__._visitor.signature
+ return call
diff --git a/src/importnb/remote.py b/src/importnb/remote.py
new file mode 100644
index 0000000..0296ba6
--- /dev/null
+++ b/src/importnb/remote.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+"""# A reloadable remote notebook importer
+"""
+
+""" # Run this cell to use on binder then re run the notebook
+ !importnb-install
+"""
+
+""" >>> with remote("https://gist.githubusercontent.com/tonyfast/e7fb55934168744926961f02f6171c6a/raw/*.ipynb"):
+ ... import black_formatter #doctest: +ELLIPSIS
+
+ >>> with black_formatter.__loader__:
+ ... importlib.reload(black_formatter) #doctest: +ELLIPSIS
+
+ >>> black_formatter2 = Remote().from_filename(black_formatter.__file__)
+
+ >>> with black_formatter.__loader__:
+ ... importlib.reload(black_formatter2) #doctest: +ELLIPSIS
+"""
+
+''' >>> with remote("""https://raw.githubusercontent.com/deathbeds/importnb/master/src/importnb/tests/*.ipynb"""):
+ ... import test_importnb
+'''
+
+"""The dependencies don't exist on binder for the examples below.
+"""
+
+""" >>> Remote(exceptions=BaseException).from_filename(
+ "https://raw.githubusercontent.com/jakevdp/PythonDataScienceHandbook/master/notebooks/06.00-Figure-Code.ipynb")
+
+ >>> with rempte("https://raw.githubusercontent.com/bloomberg/bqplot/master/examples/Marks/Object%20Model/*.ipynb"):
+ ... import Hist
+
+ >>> Hist.Figure(marks=[Hist.hist], axes=[Hist.ax_x, Hist.ax_y], padding_y=0)
+
+"""
+
+import importlib.machinery
+import importlib.util
+import inspect
+import sys
+import types
+import urllib.error
+import urllib.request
+from importlib.util import decode_source
+
+from .decoder import LineCacheNotebookDecoder
+from .loader import FileModuleSpec, Notebook
+
+cache = {}
+
+
+def urlopen(path):
+ try:
+ return urllib.request.urlopen(path)
+ except urllib.error.HTTPError as Exception:
+ ...
+
+
+class RemoteMixin:
+ def decode(self):
+ global cache
+ return decode_source(cache.pop(self.path, urlopen(self.path)).read())
+
+ def __enter__(self):
+ super().__enter__()
+ sys.meta_path.append(self)
+ return self
+
+ def __exit__(self, *args):
+ sys.meta_path.pop(sys.meta_path.index(self))
+ super().__exit__(*args)
+
+ def find_spec(self, fullname, path=None, *args, **kwargs):
+ global cache
+ # if '.' in fullname and fullname.split('.', 1)[0] in sys.modules:
+ # return
+
+ url = self.path.replace("*", fullname)
+ if fullname in sys.modules:
+ return sys.modules[fullname].__spec__
+ if url not in cache:
+ cache[url] = urlopen(url)
+ if cache[url]:
+ spec = FileModuleSpec(fullname, type(self)(fullname, url), origin=url)
+ spec._set_fileattr = True
+ return spec
+
+
+class RemoteBase(RemoteMixin, Notebook):
+ ...
+
+
+def Remote(path=None, loader=Notebook, **globals):
+ """A remote notebook finder. Place a `*` into a url
+ to generalize the finder. It returns a context manager
+ """
+
+ class Remote(RemoteMixin, loader):
+ ...
+
+ return Remote(path=path, **globals)
+
+
+if __name__ == "__main__":
+ try:
+ from utils.export import export
+ except:
+ from .utils.export import export
+ export("remote.ipynb", "../remote.py")
diff --git a/src/importnb/utils/__init__.py b/src/importnb/utils/__init__.py
new file mode 100644
index 0000000..b7c410a
--- /dev/null
+++ b/src/importnb/utils/__init__.py
@@ -0,0 +1 @@
+from .export import export
diff --git a/src/importnb/utils/export.py b/src/importnb/utils/export.py
new file mode 100644
index 0000000..e3d3f46
--- /dev/null
+++ b/src/importnb/utils/export.py
@@ -0,0 +1,51 @@
+# coding: utf-8
+"""# The `export` module
+
+...provides compatibility for Python and IPython through [`compile_python`](compile_python.ipynb) and [`compile_ipython`](compile_ipython.ipynb), respectively.
+
+ >>> from importnb.utils.export import export
+"""
+
+try:
+ from ..loader import dedent
+except:
+ from importnb.loader import dedent
+from pathlib import Path
+
+try:
+ from black import format_str
+except:
+ format_str = lambda x, i: x
+from json import loads
+
+
+def block_str(str):
+ quotes = '"""'
+ if quotes in str:
+ quotes = "'''"
+ return "{quotes}{str}\n{quotes}\n".format(quotes=quotes, str=str)
+
+
+"""The export function
+"""
+
+
+def export(file, to=None):
+ code = """# coding: utf-8"""
+ with open(str(file), "r") as f:
+ for cell in loads(f.read())["cells"]:
+ if cell["cell_type"] == "markdown":
+ code += "\n" + block_str("".join(cell["source"]))
+ elif cell["cell_type"] == "code":
+ code += "\n" + dedent("".join(cell["source"]))
+ to and Path(to).with_suffix(".py").write_text(format_str(code, 100))
+ return code
+
+
+if __name__ == "__main__":
+ export("export.ipynb", "../../utils/export.py")
+ try:
+ import export as this
+ except:
+ from . import export as this
+ __import__("doctest").testmod(this, verbose=2)
diff --git a/src/importnb/utils/ipython.py b/src/importnb/utils/ipython.py
new file mode 100644
index 0000000..74f0961
--- /dev/null
+++ b/src/importnb/utils/ipython.py
@@ -0,0 +1,75 @@
+# coding: utf-8
+from IPython import paths, get_ipython
+from IPython.core import profiledir
+from pathlib import Path
+import json, ast
+import os
+
+
+def get_config(profile="default"):
+ profile_dir = profiledir.ProfileDir()
+ try:
+ profile = profile_dir.find_profile_dir_by_name(paths.get_ipython_dir(), profile)
+ except profiledir.ProfileDirError:
+ os.makedirs(paths.get_ipython_dir(), exist_ok=True)
+ profile = profile_dir.create_profile_dir_by_name(
+ paths.get_ipython_dir(), profile
+ )
+ return Path(profile.location, "ipython_config.json")
+
+
+def load_config():
+ location = get_config()
+ try:
+ with location.open() as file:
+ config = json.load(file)
+ except (FileNotFoundError, getattr(json, "JSONDecodeError", ValueError)):
+ config = {}
+
+ if "InteractiveShellApp" not in config:
+ config["InteractiveShellApp"] = {}
+
+ if "extensions" not in config["InteractiveShellApp"]:
+ config["InteractiveShellApp"]["extensions"] = []
+
+ return config, location
+
+
+import sys
+
+
+def install(project="importnb"):
+ config, location = load_config()
+ projects = sys.argv[1:] or [project]
+ if not installed(project):
+ config["InteractiveShellApp"]["extensions"].extend(projects)
+
+ with location.open("w") as file:
+ json.dump(config, file)
+
+ print("""<3 {}""".format(projects))
+
+
+def installed(project):
+ config, location = load_config()
+ return project in config.get("InteractiveShellApp", {}).get("extensions", [])
+
+
+def uninstall(project="importnb"):
+ config, location = load_config()
+ projects = sys.argv[1:] or [project]
+ config["InteractiveShellApp"]["extensions"] = [
+ ext
+ for ext in config["InteractiveShellApp"]["extensions"]
+ if ext not in projects
+ ]
+
+ with location.open("w") as file:
+ json.dump(config, file)
+ print("""3 {}.""".format(projects))
+
+
+if __name__ == "__main__":
+ from importnb.utils.export import export
+
+ export("ipython.ipynb", "../../utils/ipython.py")
diff --git a/src/importnb/utils/nbdoctest.py b/src/importnb/utils/nbdoctest.py
new file mode 100644
index 0000000..962505d
--- /dev/null
+++ b/src/importnb/utils/nbdoctest.py
@@ -0,0 +1,76 @@
+# coding: utf-8
+from doctest import OPTIONFLAGS_BY_NAME, testfile, testmod, FAIL_FAST
+import os, argparse
+
+try:
+ from ..loader import Notebook
+except:
+ from importnb import Notebook
+
+
+def _test():
+ parser = argparse.ArgumentParser(description="doctest runner")
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ help="print very verbose output for all tests",
+ )
+ parser.add_argument(
+ "-o",
+ "--option",
+ action="append",
+ choices=OPTIONFLAGS_BY_NAME.keys(),
+ default=[],
+ help=(
+ "specify a doctest option flag to apply"
+ " to the test run; may be specified more"
+ " than once to apply multiple options"
+ ),
+ )
+ parser.add_argument(
+ "-f",
+ "--fail-fast",
+ action="store_true",
+ help=(
+ "stop running tests after first failure (this"
+ " is a shorthand for -o FAIL_FAST, and is"
+ " in addition to any other -o options)"
+ ),
+ )
+ parser.add_argument("file", nargs="+", help="file containing the tests to run")
+ args = parser.parse_args()
+ testfiles = args.file
+ # Verbose used to be handled by the "inspect argv" magic in DocTestRunner,
+ # but since we are using argparse we are passing it manually now.
+ verbose = args.verbose
+ options = 0
+ for option in args.option:
+ options |= OPTIONFLAGS_BY_NAME[option]
+ if args.fail_fast:
+ options |= FAIL_FAST
+ for filename in testfiles:
+ if any(map(filename.endswith, (".py", ".ipynb"))):
+ # It is a module -- insert its dir into sys.path and try to
+ # import it. If it is part of a package, that possibly
+ # won't work because of package imports.
+ failures, _ = testmod(
+ Notebook.load(filename), verbose=verbose, optionflags=options
+ )
+ else:
+ failures, _ = testfile(
+ filename, module_relative=False, verbose=verbose, optionflags=options
+ )
+ if failures:
+ return 1
+ return 0
+
+
+if __name__ == "__main__":
+ _test()
+
+if __name__ == "__main__":
+ from .export import export
+
+ export("nbdoctest.ipynb", "../../utils/nbdoctest.py")
diff --git a/src/importnb/utils/pytest_importnb.py b/src/importnb/utils/pytest_importnb.py
new file mode 100644
index 0000000..0dceb92
--- /dev/null
+++ b/src/importnb/utils/pytest_importnb.py
@@ -0,0 +1,66 @@
+# coding: utf-8
+"""A `pytest` plugin for importing notebooks as modules and using standard test discovered.
+
+The `AlternativeModule` is reusable. See `pidgin` for an example.
+"""
+
+from importnb import Notebook
+
+import importlib, pytest, abc, pathlib, _pytest, functools
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption("--main", action="store_true", help="Run in the main context.")
+
+
+"""`AlternativeModule` is an alternative `pytest.Module` loader that can enable `pytest.Doctest`.
+"""
+
+
+class AlternativeModule(pytest.Module):
+ def _getobj(self):
+ return self.loader(
+ getattr(self.parent.config.option, "main", None) and "__main__" or None
+ ).load(str(self.fspath))
+
+ def collect(self):
+ yield from super().collect()
+ if self.parent.config.option.doctestmodules:
+ self.fspath.pyimport = functools.partial(
+ self.fspath.pyimport, modname=self._obj.__name__
+ )
+ yield from _pytest.doctest.DoctestModule.collect(self)
+
+
+"""`NotebookModule` is an `AlternativeModule` to load `Notebook`s.
+"""
+
+
+class NotebookModule(AlternativeModule):
+ loader = Notebook
+
+
+class AlternativeSourceText(abc.ABCMeta):
+ def __call__(self, parent, path):
+ for module in self.modules:
+ if "".join(pathlib.Path(str(path)).suffixes) in module.loader.extensions:
+ if not parent.session.isinitpath(path):
+ for pat in parent.config.getini("python_files"):
+ if path.fnmatch(pat.rstrip(".py") + path.ext):
+ break
+ else:
+ return
+ return module(path, parent)
+
+
+class NotebookTests(metaclass=AlternativeSourceText):
+ modules = (NotebookModule,)
+
+
+pytest_collect_file = NotebookTests.__call__
+
+if __name__ == "__main__":
+ from importnb.utils.export import export
+
+ export("pytest_importnb.ipynb", "../../utils/pytest_importnb.py")
diff --git a/src/importnb/utils/setup.py b/src/importnb/utils/setup.py
new file mode 100644
index 0000000..3fafd14
--- /dev/null
+++ b/src/importnb/utils/setup.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+"""It is important to distribute notebooks in packages during the initial stages of code development. This notebook creates a setuptools command class that allows for both python and notebook imports. This was specifically created to allow notebooks as py_module imports, but could serve a greater purpose.
+"""
+
+""" class BuildWithNotebooks(setuptools.command.build_py.build_py):
+ def __new__(cls, distribution):
+ from importnb.utils.setup import build_ipynb
+ return build_ipynb(distribution)
+ setup_args.update(cmdclass=dict(build_py=BuildWithNotebooks))
+
+"""
+
+from setuptools.command.build_py import build_py
+import sys, os
+from pathlib import Path
+import importlib
+
+
+class build_ipynb(build_py):
+ """Should really use manifest.in
+
+ Lazy import build_ipynb in your setup.
+
+ class BuildWithNotebooks(setuptools.command.build_py.build_py):
+ def __new__(cls, distribution):
+ from importnb.utils.setup import build_ipynb
+ return build_ipynb(distribution)
+ setup_args.update(cmdclass=dict(build_py=BuildWithNotebooks))
+ """
+
+ def get_module_outfile(self, build_dir, package, module):
+ module_mapper = {module[1]: module[2] for module in self.find_all_modules()}
+ outfile_path = [build_dir] + list(package) + [module_mapper[module]]
+ return os.path.join(*outfile_path)
+
+ def find_package_modules(self, package, package_dir):
+ from glob import glob
+
+ self.check_package(package, package_dir)
+ module_files = glob(os.path.join(package_dir, "*.py"))
+ modules = []
+ setup_script = os.path.abspath(self.distribution.script_name)
+
+ for f in module_files + glob(os.path.join(package_dir, "*.ipynb")):
+ abs_f = os.path.abspath(f)
+ if abs_f != setup_script:
+ module = os.path.splitext(os.path.basename(f))[0]
+ modules.append((package, module, f))
+ else:
+ self.debug_print("excluding %s" % setup_script)
+ return modules
+
+ def find_modules(self):
+ packages, modules = {}, []
+
+ for module in self.py_modules:
+ path = module.split(".")
+ package = ".".join(path[0:-1])
+ module_base = path[-1]
+
+ try:
+ (package_dir, checked) = packages[package]
+ except KeyError:
+ package_dir = self.get_package_dir(package)
+ checked = 0
+
+ if not checked:
+ init_py = self.check_package(package, package_dir)
+ packages[package] = (package_dir, 1)
+ if init_py:
+ modules.append((package, "__init__", init_py))
+
+ module_file = os.path.join(package_dir, module_base + ".ipynb")
+
+ if Path(module_file).exists():
+ modules.append((package, module_base, str(module_file)))
+ else:
+ module_file = str(Path(module_file).with_suffix(".py"))
+ if self.check_module(module, module_file):
+ modules.append((package, module_base, str(module_file)))
+
+ return modules
+
+
+if __name__ == "__main__":
+ try:
+ from ..loader import export
+ except:
+ from importnb.loader import export
+ export("setup.ipynb", "../../utils/setup.py")
diff --git a/tests/foobar.ipynb b/tests/foobar.ipynb
new file mode 100644
index 0000000..9e2b2a9
--- /dev/null
+++ b/tests/foobar.ipynb
@@ -0,0 +1,100 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# `foobar` is a test notebook \n",
+ "\n",
+ "This notebook is loaded in as a module. A convention held by `importnb` is that the first cell, if it is markdown with become the docstring of the module."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "All other markdown cells are converted to string expressions in the python module. Except when the Markdown cell preceeds a `def or class` if and only if no docstring is supplied."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " a_parameter = 'foo'\n",
+ " not_a_parameter = __name__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def function_with_python_docstring():\n",
+ " \"\"\"This docstring will not be replaced.\"\"\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "`function_with_markdown_string` will have this Markdown cell as its docstring. What is cool is that we can include `doctest`s.\n",
+ "\n",
+ " >>> assert True"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def function_with_markdown_docstring(): ..."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " _repr_markdown_ = lambda: \"\"\"# A Custom Markdown Representaiton\n",
+ " \n",
+ " This module is named {__name__} and it original repr was:\n",
+ " \n",
+ " {original}\n",
+ " \"\"\".format(original=repr(__import__(__name__)), **globals())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "This cell is not a docstring, but will docstring, but is captured in tests."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/tests/foobar.py b/tests/foobar.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/foobar2.ipynb b/tests/foobar2.ipynb
new file mode 100644
index 0000000..1969abd
--- /dev/null
+++ b/tests/foobar2.ipynb
@@ -0,0 +1,93 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# `foobar` is a test notebook \n",
+ "\n",
+ "This notebook is loaded in as a module. A convention held by `importnb` is that the first cell, if it is markdown with become the docstring of the module."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "All other markdown cells are converted to string expressions in the python module. Except when the Markdown cell preceeds a `def or class` if and only if no docstring is supplied."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " a_parameter = 'foo'\n",
+ " not_a_parameter = __name__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def function_with_python_docstring():\n",
+ " \"\"\"This docstring will not be replaced.\"\"\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "`function_with_markdown_string` will have this Markdown cell as its docstring. What is cool is that we can include `doctest`s.\n",
+ "\n",
+ " >>> assert True"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def function_with_markdown_docstring(): ..."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " _repr_markdown_ = lambda: \"\"\"# A Custom Markdown Representaiton\n",
+ " \n",
+ " This module is named {__name__} and it original repr was:\n",
+ " \n",
+ " {original}\n",
+ " \"\"\".format(original=repr(__import__(__name__)), **globals())"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/tests/foobaz/__init__.py b/tests/foobaz/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/foobaz/foobar.ipynb b/tests/foobaz/foobar.ipynb
new file mode 100644
index 0000000..a733ab3
--- /dev/null
+++ b/tests/foobaz/foobar.ipynb
@@ -0,0 +1,53 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "This is the docstring.\n",
+ " \n",
+ " >>> assert True\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "foo = 42\n",
+ "assert foo\n",
+ "bar= 100"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "print(foo)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Markdown paragraph"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "_repr_markdown_ = lambda: 'a custom repr {foo}'.format(foo=foo)"
+ ]
+ }
+ ],
+ "metadata": {},
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/tests/lazy_test.ipynb b/tests/lazy_test.ipynb
new file mode 100644
index 0000000..6898632
--- /dev/null
+++ b/tests/lazy_test.ipynb
@@ -0,0 +1,58 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Use the stdout to test the lazy important"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " foo = 42"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "42\n"
+ ]
+ }
+ ],
+ "source": [
+ " print(foo)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/tests/test_importnb.ipynb b/tests/test_importnb.ipynb
new file mode 100644
index 0000000..8e2e73c
--- /dev/null
+++ b/tests/test_importnb.ipynb
@@ -0,0 +1,346 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "__tests__ for `importnb`"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " from importnb import Notebook, reload\n",
+ " from importnb.parameterize import parameterize, Parameterize\n",
+ " from importnb.remote import Remote\n",
+ " from pytest import fixture, raises, mark\n",
+ " import json, linecache, inspect, ast, sys, io\n",
+ " from pathlib import Path\n",
+ " import contextlib\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " try: from IPython import get_ipython\n",
+ " except: get_ipython = lambda: None\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Marks"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " ipy = mark.skipif(not get_ipython(), reason=\"\"\"Not IPython.\"\"\")\n",
+ " py34 = sys.version_info.major == 3 and sys.version_info.minor == 4"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_basic():\n",
+ " with Notebook(): \n",
+ " import foobar\n",
+ " reload(foobar)\n",
+ " assert isinstance(foobar.__loader__, Notebook)\n",
+ " assert foobar.__test__\n",
+ " del foobar"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " @fixture\n",
+ " def module():\n",
+ " sys.path_importer_cache.clear()\n",
+ " with Notebook(): \n",
+ " import foobar\n",
+ " yield reload(foobar)\n",
+ " del foobar"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " @fixture\n",
+ " def package():\n",
+ " with Notebook(): \n",
+ " import foobaz.foobar\n",
+ " yield foobaz.foobar\n",
+ " del foobaz"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_package(package, module):\n",
+ " assert not module.__package__\n",
+ " assert package.__package__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_reload(module): \n",
+ " \"\"\"delete a method from the module and see if it is recovered\"\"\"\n",
+ " del module._repr_markdown_\n",
+ " \n",
+ " \"\"\"The contextmanager is required.\"\"\"\n",
+ " with Notebook():\n",
+ " reload(module)\n",
+ " assert hasattr(module, '_repr_markdown_')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_docstrings(module):\n",
+ " assert module.__doc__\n",
+ " assert module.function_with_markdown_docstring.__doc__\n",
+ " assert module.function_with_python_docstring.__doc__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_docstring_opts(module):\n",
+ " with Notebook(markdown_docstring=False):\n",
+ " reload(module)\n",
+ " assert module.__doc__\n",
+ " assert not module.function_with_markdown_docstring.__doc__\n",
+ " assert module.function_with_python_docstring.__doc__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_from_file(module):\n",
+ " new = Notebook.load(module.__file__)\n",
+ " assert module is not new"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " @mark.skipif(py34, reason=\"There is no py34 lazyloader.\")\n",
+ " def test_lazy(capsys):\n",
+ " \"\"\"Use stdout to test this depsite there probably being a better way\"\"\"\n",
+ " with Notebook(lazy=True): \n",
+ " module = reload(__import__('lazy_test'))\n",
+ " assert not capsys.readouterr()[0], capsys.readouterr()[0]\n",
+ " module.foo, \"The function gets executed here\"\n",
+ " assert capsys.readouterr()[0]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_module_source(module): \n",
+ " with raises(getattr(json, 'JSONDecodeError', ValueError)):\n",
+ " json.loads(''.join(linecache.cache[module.__file__][2]))\n",
+ " assert inspect.getsource(module).strip() == ''.join(linecache.cache[module.__file__][2]).strip()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " import sys"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " @mark.skipif(py34, reason=\"I don't know why this fails on 3.4.\")\n",
+ " def test_main():\n",
+ " with Notebook('__main__'):\n",
+ " try: del sys.modules['foobar']\n",
+ " finally: import foobar\n",
+ " \n",
+ " assert foobar.__name__ == '__main__'"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_object_source(module): \n",
+ " assert ast.parse(inspect.getsource(module.function_with_markdown_docstring)), \"\"\"The source is invalid\"\"\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_python_file():\n",
+ " import foobar as module\n",
+ " assert reload(module).__file__.endswith('.py'), \"\"\"Python didn't take precedent.\"\"\"\n",
+ " with Notebook(): assert reload(module).__file__.endswith('.ipynb')\n",
+ " assert reload(module).__file__.endswith('.py')\n",
+ " with Notebook(): assert reload(module).__file__.endswith('.ipynb')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " @ipy\n",
+ " def test_cli(module): \n",
+ " __import__('subprocess').check_call(\n",
+ " 'ipython -m {}'.format(module.__name__).split(), cwd=str(Path(module.__file__).parent))\n",
+ " __import__('subprocess').check_call(\n",
+ " 'ipython -m importnb -- {}'.format(module.__file__).split(), cwd=str(Path(module.__file__).parent))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_parameterize(module):\n",
+ " f = parameterize(module)\n",
+ " assert 'a_parameter' in f.__signature__.parameters\n",
+ " assert 'not_a_parameter' not in f.__signature__.parameters\n",
+ " assert isinstance(f(), type(module))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_minified_json(module): \n",
+ " with open(module.__file__) as f, open('foobarmin.ipynb', 'w') as o: \n",
+ " json.dump(json.load(f), o, indent=None)\n",
+ " \n",
+ " with Notebook():\n",
+ " import foobarmin \n",
+ " \n",
+ " assert inspect.getsource(foobarmin.function_with_markdown_docstring)\n",
+ " \n",
+ " with open(foobarmin.__file__) as file:\n",
+ " assert json.load(file)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_fuzzy_finder(): \n",
+ " import __bar\n",
+ " assert __bar.__name__ == 'foobar'\n",
+ " del __bar"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ " def test_remote(monkeypatch):\n",
+ " def mocked(url):\n",
+ " class DummyNotebook(object):\n",
+ " def read(self):\n",
+ " with open('tests/foobar.ipynb', 'rb') as file: return file.read()\n",
+ " return DummyNotebook()\n",
+ " monkeypatch.setattr(__import__('urllib').request, \"urlopen\", mocked)\n",
+ " with Remote('http://0.0.0.0:8888/*.ipynb'): \n",
+ " module = __import__('tests/foobar')\n",
+ " assert module.__file__.startswith('http')\n",
+ " assert module.function_with_markdown_docstring"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "`importnb` should be able to transform expressions beginning with magics.\n",
+ "\n",
+ " def test_magic_syntax():..."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..e500093
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,14 @@
+[tox]
+envlist = ipython, python
+
+[pytest]
+addopts = --verbose --doctest-modules
+
+[testenv]
+deps=
+ pytest
+ ipython: ipython
+ ipython: nbconvert
+commands=
+ python: python setup.py test
+ ipython: ipython setup.py test