diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000000..15e202241a --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,103 @@ +name: packages +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+a[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+b[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+' + +jobs: + conda_build: + name: Build Conda Packages + runs-on: 'ubuntu-latest' + defaults: + run: + shell: bash -l {0} + env: + CHANS_DEV: "-c pyviz/label/dev -c bokeh" + PKG_TEST_PYTHON: "--test-python=py37" + PYTHON_VERSION: "3.7" + CHANS: "-c pyviz" + MPLBACKEND: "Agg" + CONDA_UPLOAD_TOKEN: ${{ secrets.CONDA_UPLOAD_TOKEN }} + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: "100" + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: conda-incubator/setup-miniconda@v2 + with: + miniconda-version: "latest" + python-version: 3.8 + - name: Fetch unshallow + run: git fetch --prune --tags --unshallow -f + - name: Set output + id: vars + run: echo ::set-output name=tag::${GITHUB_REF#refs/*/} + - name: conda setup + run: | + conda config --set always_yes True + conda install -c pyviz "pyctdev>=0.5" + doit ecosystem_setup + - name: conda build + run: doit package_build $CHANS_DEV $PKG_TEST_PYTHON --test-group=unit + - name: conda dev upload + if: (contains(steps.vars.outputs.tag, 'a') || contains(steps.vars.outputs.tag, 'b') || contains(steps.vars.outputs.tag, 'rc')) + run: doit package_upload --token=$CONDA_UPLOAD_TOKEN --label=dev + - name: conda main upload + if: (!(contains(steps.vars.outputs.tag, 'a') || contains(steps.vars.outputs.tag, 'b') || contains(steps.vars.outputs.tag, 'rc'))) + run: doit package_upload --token=$CONDA_UPLOAD_TOKEN --label=dev --label=main + pip_build: + name: Build PyPI Packages + runs-on: 'ubuntu-latest' + defaults: + run: + shell: bash -l {0} + env: + CHANS_DEV: "-c pyviz/label/dev -c bokeh" + PKG_TEST_PYTHON: "--test-python=py37" + PYTHON_VERSION: "3.7" + CHANS: "-c pyviz" + MPLBACKEND: "Agg" + PPU: ${{ secrets.PPU }} + PPP: ${{ secrets.PPP }} + PYPI: "https://upload.pypi.org/legacy/" + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: "100" + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: conda-incubator/setup-miniconda@v2 + with: + miniconda-version: "latest" + python-version: 3.8 + - name: Fetch unshallow + run: git fetch --prune --tags --unshallow -f + - name: conda setup + run: | + conda config --set always_yes True + conda install -c pyviz "pyctdev>=0.5" + doit ecosystem_setup + doit env_create $CHANS_DEV --python=$PYTHON_VERSION + - name: env setup + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + doit develop_install $CHANS_DEV -o unit_tests + pip uninstall -y holoviews + doit pip_on_conda + - name: pip build + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + doit ecosystem=pip package_build + - name: pip upload + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + doit ecosystem=pip package_upload -u $PPU -p $PPP -r $PYPI diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000000..980113b366 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,93 @@ +name: docs +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+a[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+b[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+' + +jobs: + test_suite: + name: Documentation + runs-on: 'ubuntu-latest' + strategy: + fail-fast: false + timeout-minutes: 120 + defaults: + run: + shell: bash -l {0} + env: + DESC: "Documentation build" + HV_REQUIREMENTS: "doc" + CHANS_DEV: "-c pyviz/label/dev -c bokeh -c conda-forge" + CHANS: "-c pyviz" + MPLBACKEND: "Agg" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + MOZ_HEADLESS: 1 + PANEL_EMBED: "true" + PANEL_EMBED_JSON: "true" + PANEL_EMBED_JSON_PREFIX: "json" + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: "100" + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: conda-incubator/setup-miniconda@v2 + with: + miniconda-version: "latest" + - name: Fetch unshallow + run: git fetch --prune --tags --unshallow -f + - name: Set output + id: vars + run: echo ::set-output name=tag::${GITHUB_REF#refs/*/} + - name: conda setup + run: | + conda config --set always_yes True + conda install -c pyviz "pyctdev>=0.5" + doit ecosystem_setup + doit env_create ${{ env.CHANS_DEV}} --python=3.7 + - name: doit develop_install + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + conda list + doit develop_install ${{ env.CHANS_DEV}} -o doc + conda install -c conda-forge geckodriver selenium + - name: bokeh sampledata + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + bokeh sampledata + - name: generate rst + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + nbsite generate-rst --org holoviz --project-name holoviews + - name: refmanual + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + python ./doc/generate_modules.py holoviews -d ./doc/reference_manual -n holoviews -e tests + - name: build docs + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + nbsite build --what=html --output=builtdocs --org holoviz --project-name holoviews + - name: upload dev + if: (contains(steps.vars.outputs.tag, 'a') || contains(steps.vars.outputs.tag, 'b') || contains(steps.vars.outputs.tag, 'rc')) + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + aws s3 sync --quiet ./builtdocs s3://dev.holoviews.org/ + - name: upload main + if: (!(contains(steps.vars.outputs.tag, 'a') || contains(steps.vars.outputs.tag, 'b') || contains(steps.vars.outputs.tag, 'rc'))) + run: | + eval "$(conda shell.bash hook)" + conda activate test-environment + aws s3 sync --quiet ./builtdocs s3://holoviews.org/ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f39843f275..147cb372c5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,12 +1,11 @@ -# things not included -# language -# notifications - no email notifications set up - name: pytest on: + push: + branches: + - master pull_request: branches: - - '*' + - '*' jobs: test_suite: @@ -16,22 +15,17 @@ jobs: fail-fast: false matrix: os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] - python-version: [2.7, 3.6, 3.7] - exclude: - - os: windows-latest - python-version: 2.7 - - os: macos-latest - python-version: 3.7 - timeout-minutes: 30 + python-version: [3.6, 3.7, 3.8] + timeout-minutes: 60 defaults: run: - shell: bash -l {0} + shell: bash -l {0} env: DESC: "Python ${{ matrix.python-version }} tests" HV_REQUIREMENTS: "unit_tests" PYTHON_VERSION: ${{ matrix.python-version }} - CHANS_DEV: "-c pyviz/label/dev" - CHANS: "-c pyviz" + CHANS_DEV: "-c pyviz/label/dev -c bokeh -c conda-forge" + CHANS: "-c pyviz -c conda-forge" MPLBACKEND: "Agg" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: @@ -44,6 +38,7 @@ jobs: - uses: conda-incubator/setup-miniconda@v2 with: miniconda-version: "latest" + channels: conda-forge,defaults - name: Fetch unshallow run: git fetch --prune --tags --unshallow - name: conda setup @@ -56,6 +51,7 @@ jobs: run: | eval "$(conda shell.bash hook)" conda activate test-environment + conda install ${{ env.CHANS_DEV }} "pip<21.2.1" conda list doit develop_install ${{ env.CHANS_DEV}} -o ${{ env.HV_REQUIREMENTS }} python -c "from param import version; print(version.Version.setup_version('.', 'holoviews', archive_commit='$Format:%h$'))" @@ -63,20 +59,6 @@ jobs: git describe echo "======" conda list - - name: bokeh update - if: startsWith(matrix.python-version, 3.) - run: | - eval "$(conda shell.bash hook)" - conda activate test-environment - conda install "bokeh>=2.2" - - name: matplotlib patch - if: startsWith(matrix.python-version, 3.) - run: | - eval "$(conda shell.bash hook)" - conda activate test-environment - conda uninstall matplotlib matplotlib-base --force - conda install matplotlib=3.0.3 --no-deps - python -c "import matplotlib; print(matplotlib.__version__);" - name: doit env_capture run: | eval "$(conda shell.bash hook)" @@ -100,8 +82,7 @@ jobs: - name: run coveralls env: github-token: ${{ secrets.GITHUB_TOKEN }} - if: startsWith(matrix.python-version, 3.) run: | eval "$(conda shell.bash hook)" conda activate test-environment - coveralls + coveralls --service=github diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index ff56160f5f..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,173 +0,0 @@ -# We deliberately don't use travis's language=python option because -# we install miniconda and use conda to get python. Additionally, -# Travis's auto-install of python doesn't work on osx images (see -# https://github.com/travis-ci/travis-ci/issues/4729). -git: - depth: 100 - -language: python - -os: - - linux - -dist: xenial - -addons: - firefox: latest - -notifications: - email: - on_failure: change # [always|never|change] default: always - -env: - global: - - PKG_TEST_PYTHON="--test-python=py37 --test-python=py27" - - CHANS_DEV="-c pyviz/label/dev -c bokeh -c conda-forge" - - CHANS="-c pyviz" - - MPLBACKEND="Agg" - - PYTHON_VERSION=3.7 - - MOZ_HEADLESS=1 - -stages: - - name: tests - if: tag =~ ^v(\d+|\.)+([a-z]|rc)?\d?$ - - name: extra_tests - if: type = cron - - name: conda_dev_package - if: tag =~ ^v(\d+|\.)+([a-z]|rc)\d+$ - - name: pip_dev_package - if: tag =~ ^v(\d+|\.)+([a-z]|rc)\d+$ - - name: conda_package - if: tag =~ ^v(\d+|\.)*[^a-z]\d*$ - - name: pip_package - if: tag =~ ^v(\d+|\.)*[^a-z]\d*$ - - name: docs - if: tag =~ ^v(\d+|\.)*[^a-z]\d*$ - - name: gallery - if: tag =~ ^v(\d+|\.)*[^a-z]\d*$ - - name: docs_dev - if: tag =~ ^v(\d+|\.)+([a-z]|rc)\d+$ - - name: gallery_dev - if: tag =~ ^v(\d+|\.)+([a-z]|rc)\d+$ - - name: docs_daily - if: ((type = cron) OR ((commit_message =~ /\[doc-build\]/) and branch = master)) - - name: gallery_daily - if: ((type = cron) OR ((commit_message =~ /\[doc-build\]/) and branch = master)) - -jobs: - include: - ########## Test Stage ########## - - - &default - stage: tests - env: DESC="Python 3.6 tests" HV_REQUIREMENTS="unit_tests" PYTHON_VERSION=3.6 - before_install: - - pip install pyctdev && doit miniconda_install && pip uninstall -y doit pyctdev - - export PATH="$HOME/miniconda/bin:$PATH" && hash -r - - conda config --set always_yes True - - conda install -c pyviz "pyctdev>=0.5" && doit ecosystem_setup - install: - - doit env_create $CHANS_DEV --python=$PYTHON_VERSION - - source activate test-environment - - travis_wait 45 doit develop_install $CHANS_DEV -o $HV_REQUIREMENTS - - if [ "$PYTHON_VERSION" == "3.6" ]; then conda uninstall matplotlib matplotlib-base --force; conda install $CHANS_DEV matplotlib=3.0.3 --no-deps; fi; - - doit env_capture - - hash -r - script: - - doit test_all_recommended - after_success: coveralls - after_failure: sleep 10 - - - &basic_deps - <<: *default - stage: extra_tests - env: DESC="Basic dependencies" HV_REQUIREMENTS="basic_tests" - script: - - doit test_unit - after_success: echo "Success" - - ########## DOCS ########## - - - &doc_build - <<: *default - stage: docs_dev - env: DESC="docs" HV_DOC_GALLERY="false" HV_DOC_REF_GALLERY="true" HV_REQUIREMENTS="doc" PANEL_EMBED="true" PANEL_EMBED_JSON="true" PANEL_EMBED_JSON_PREFIX="json" CHANS_DEV="-c pyviz/label/dev -c bokeh -c conda-forge" - before-script: - - conda install -c conda-forge geckodriver selenium - script: - - bokeh sampledata - - nbsite generate-rst --org holoviz --project-name holoviews --skip ^reference - - python ./doc/generate_modules.py holoviews -d ./doc/reference_manual -n holoviews -e tests - - nbsite build --what=html --output=builtdocs - after_success: - - aws s3 sync --quiet ./ s3://holoviews-doc-builds/$TRAVIS_BUILD_NUMBER --exclude=".git/*" --exclude="doc/nbpublisher/*" - - - &gallery_build - <<: *doc_build - stage: gallery_dev - env: DESC="gallery" HV_DOC_GALLERY="true" HV_DOC_REF_GALLERY="false" BUCKET="dev." HV_REQUIREMENTS="doc" PANEL_EMBED="true" PANEL_EMBED_JSON="true" PANEL_EMBED_JSON_PREFIX="json" CHANS_DEV="-c pyviz/label/dev -c bokeh -c conda-forge" - script: - - bokeh sampledata - - aws s3 sync --quiet s3://holoviews-doc-builds/$TRAVIS_BUILD_NUMBER ./ - - git reset --hard --recurse-submodule - - nbsite build --what=html --output=builtdocs - after_success: - - aws s3 sync --quiet ./builtdocs s3://${BUCKET}holoviews.org/ - - aws s3 rm --recursive --quiet s3://holoviews-doc-builds/$TRAVIS_BUILD_NUMBER - - - <<: *doc_build - stage: docs_daily - - - <<: *gallery_build - stage: gallery_daily - env: DESC="gallery" HV_DOC_GALLERY="true" HV_DOC_REF_GALLERY="false" BUCKET="build." HV_REQUIREMENTS="doc" PANEL_EMBED="true" PANEL_EMBED_JSON="true" PANEL_EMBED_JSON_PREFIX="json" CHANS_DEV="-c pyviz/label/dev -c bokeh -c conda-forge" - - - <<: *doc_build - stage: docs - - - <<: *gallery_build - stage: gallery - env: DESC="gallery" HV_DOC_GALLERY="true" HV_DOC_REF_GALLERY="false" HV_REQUIREMENTS="doc" PANEL_EMBED="true" PANEL_EMBED_JSON="true" PANEL_EMBED_JSON_PREFIX="json" CHANS_DEV="-c pyviz/label/dev -c bokeh -c conda-forge" - after_success: - - aws s3 sync ./builtdocs s3://holoviews.org/ - - ########## END-USER PACKAGES ########## - - ### CONDA #### - - - &conda_pkg - <<: *default - stage: conda_dev_package - env: DESC="" TRAVIS_NOCACHE=$TRAVIS_JOB_ID CHANS_DEV="-c pyviz/label/dev -c bokeh" - install: - - doit package_build $CHANS_DEV $PKG_TEST_PYTHON --test-group=unit - script: - - doit package_upload --token=$CONDA_UPLOAD_TOKEN --label=dev - - - <<: *conda_pkg - stage: conda_package - env: DESC="" TRAVIS_NOCACHE=$TRAVIS_JOB_ID CHANS="-c pyviz -c bokeh" - install: - - doit package_build $CHANS $PKG_TEST_PYTHON --test-group=unit - script: - - doit package_upload --token=$CONDA_UPLOAD_TOKEN --label=dev --label=main - - ### PyPI ### - - - &pip_pkg - <<: *default - stage: pip_dev_package - env: PYPI="https://test.pypi.org/legacy/" TRAVIS_NOCACHE=$TRAVIS_JOB_ID - install: - - doit env_create $CHANS_DEV --python=$PYTHON_VERSION - - source activate test-environment - - travis_wait 30 doit develop_install $CHANS_DEV -o unit_tests && pip uninstall -y holoviews - - doit pip_on_conda - - if [ "$PYTHON_VERSION" == "3.7" ]; then conda uninstall matplotlib matplotlib-base --force; conda install $CHANS_DEV matplotlib=3.0.3 --no-deps; fi; - - doit ecosystem=pip package_build - script: doit ecosystem=pip package_upload -u $TPPU -p $TPPP -r ${PYPI} - - - <<: *pip_pkg - stage: pip_package - env: PYPI="https://upload.pypi.org/legacy/" TRAVIS_NOCACHE=$TRAVIS_JOB_ID - script: doit ecosystem=pip package_upload -u $PPU -p $PPP -r ${PYPI} diff --git a/CHANGELOG.md b/CHANGELOG.md index d86f3c06ef..23274cd041 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,331 @@ +Version 1.14.5 +============== +**July 16, 2021** + + +This is a hotfix release with a number of important bug fixes. Most +importantly, this version supports for the recent pandas 1.3.0 +release. Many thanks to @kgullikson88, @philippjfr and @jlstevens for +contributing the fixes in this release. + +Bug fixes: + +- Support for pandas>=1.3 + ([#5013](https://github.com/holoviz/holoviews/pull/5013)) +- Various bug fixes relating to dim transforms including the use of + parameters in slices and the use of getattribute + ([#4993](https://github.com/holoviz/holoviews/pull/4993), + [#5001](https://github.com/holoviz/holoviews/pull/5001), + [#5005](https://github.com/holoviz/holoviews/pull/5005)) + + +Version 1.14.4 +============== +**May 18, 2021** + +This release primarily focuses on a number of bug fixes. Many thanks to +@Hoxbro, @nitrocalcite, @brl0, @hyamanieu, @rafiyr, @jbednar, @jlstevens +and @philippjfr for contributing. + +Enhancements: + +- Reenable `SaveTool` for plots with `Tiles` + ([#4922](https://github.com/holoviz/holoviews/pull/4922)) +- Enable dask `TriMesh` rasterization using datashader + ([#4935](https://github.com/holoviz/holoviews/pull/4935)) +- Use dataframe index for `TriMesh` node indices + ([#4936](https://github.com/holoviz/holoviews/pull/4936)) + +Bug fixes: + +- Fix hover for stacked `Bars` + ([#4892](https://github.com/holoviz/holoviews/pull/4892)) +- Check before dereferencing Bokeh colormappers + ([#4902](https://github.com/holoviz/holoviews/pull/4902)) +- Fix multiple parameterized inputs to `dim` + ([#4903](https://github.com/holoviz/holoviews/pull/4903)) +- Fix floating point error when generating bokeh Palettes + ([#4911](https://github.com/holoviz/holoviews/pull/4911)) +- Fix bug using dimensions with label on `Bars` + ([#4929](https://github.com/holoviz/holoviews/pull/4929)) +- Do not reverse colormaps with '_r' suffix a second time + ([#4931](https://github.com/holoviz/holoviews/pull/4931)) +- Fix remapping of `Params` stream parameter names + ([#4932](https://github.com/holoviz/holoviews/pull/4932)) +- Ensure `Area.stack` keeps labels + ([#4937](https://github.com/holoviz/holoviews/pull/4937)) + +Documentation: + +- Updated Dashboards user guide to show `pn.bind` first + ([#4907](https://github.com/holoviz/holoviews/pull/4907)) +- Updated docs to correctly declare Scatter kdims + ([#4914](https://github.com/holoviz/holoviews/pull/4914)) + +Compatibility: + +Unfortunately a number of tile sources are no longer publicly +available. Attempting to use these tile sources will now issue warnings +unless `hv.config.raise_deprecated_tilesource_exception` is set to +`True` in which case exceptions will be raised instead. + +- The `Wikipedia` tile source is no longer available as it is no longer + being served outside the wikimedia domain. As one of the most + frequently used tile sources, HoloViews now issues a warning and + switches to the OpenStreetMap (OSM) tile source instead. +- The `CartoMidnight` and `CartoEco` tile sources are no longer publicly + available. Attempting to use these tile sources will result in a + deprecation warning. + +Version 1.14.3 +============== +**April 8, 2021** + +This release contains a small number of bug fixes, enhancements and +compatibility for the latest release of matplotlib. Many thanks to +@stonebig, @Hoxbro, @jlstevens, @jbednar and @philippjfr. + +Enhancements: + +- Allow applying linked selections to chained `DynamicMap` + ([#4870](https://github.com/holoviz/holoviews/pull/4870)) +- Issuing improved error message when `__radd__` called with an + integer ([#4868](https://github.com/holoviz/holoviews/pull/4868)) +- Implement `MultiInterface.assign` + ([#4880](https://github.com/holoviz/holoviews/pull/4880)) +- Handle tuple unit on xarray attribute + ([#4881](https://github.com/holoviz/holoviews/pull/4881)) +- Support selection masks and expressions on gridded data + ([#4882](https://github.com/holoviz/holoviews/pull/4882)) + +Bug fixes: + +- Handle empty renderers when merging `HoverTool.renderers` + ([#4856](https://github.com/holoviz/holoviews/pull/4856)) + +Compatibility: + +- Support matplotlib versions >=3.4 + ([#4878](https://github.com/holoviz/holoviews/pull/4878)) + +Version 1.14.2 +============== +**March 2, 2021** + +This release adds support for Bokeh 2.3, introduces a number of minor +enhancements, miscellaneous documentation improvements and a good number +of bug fixes. + +Many thanks to the many contributors to this release, whether directly +by submitting PRs or by reporting issues and making +suggestions. Specifically, we would like to thank @philippjfr for the +Bokeh 2.3 compatibility updates, @kcpevey, @timgates42, and @scottstanie +for documentation improvements as well as @Hoxbro and @LunarLanding for +various bug fixes. In addition, thanks to the maintainers @jbednar, +@jlstevens and @philippjfr for contributing to this release. + +Enhancements: + +- Bokeh 2.3 compatibility + ([#4805](https://github.com/holoviz/holoviews/pull/4805), + [#4809](https://github.com/holoviz/holoviews/pull/4809)) +- Supporting dictionary streams parameter in DynamicMaps and operations + ([#4787](https://github.com/holoviz/holoviews/pull/4787), + [#4818](https://github.com/holoviz/holoviews/pull/4818), + [#4822](https://github.com/holoviz/holoviews/pull/4822)) +- Support spatialpandas DaskGeoDataFrame + ([#4792](https://github.com/holoviz/holoviews/pull/4792)) +- Disable zoom on axis for geographic plots + ([#4812](https://github.com/holoviz/holoviews/pull/4812) +- Add support for non-aligned data in Area stack classmethod + ([#4836](https://github.com/holoviz/holoviews/pull/4836)) +- Handle arrays and datetime ticks + ([#4831](https://github.com/holoviz/holoviews/pull/4831)) +- Support single-value numpy array as input to HLine and VLine + ([#4798](https://github.com/holoviz/holoviews/pull/4798)) + +Bug fixes: + +- Ensure link_inputs parameter on operations is passed to apply + ([#4795](https://github.com/holoviz/holoviews/pull/4795)) +- Fix for muted option on overlaid Bokeh plots + ([#4830](https://github.com/holoviz/holoviews/pull/4830)) +- Check for nested dim dependencies + ([#4785](https://github.com/holoviz/holoviews/pull/4785)) +- Fixed np.nanmax call when computing ranges + ([#4847](https://github.com/holoviz/holoviews/pull/4847)) +- Fix for Dimension pickling + ([#4843](https://github.com/holoviz/holoviews/pull/4843)) +- Fixes for dask backed elements in plotting + ([#4813](https://github.com/holoviz/holoviews/pull/4813)) +- Handle isfinite for NumPy and Pandas masked arrays + ([#4817](https://github.com/holoviz/holoviews/pull/4817)) +- Fix plotting Graph on top of Tiles/Annotation + ([#4828](https://github.com/holoviz/holoviews/pull/4828)) +- Miscellaneous fixes for the Bokeh plotting extension + ([#4814](https://github.com/holoviz/holoviews/pull/4814), + [#4839](https://github.com/holoviz/holoviews/pull/4839)) +- Miscellaneous fixes for index based linked selections + ([#4776](https://github.com/holoviz/holoviews/pull/4776)) + +Documentation: + +- Expanded on Tap Stream example in Reference Gallery + [#4782](https://github.com/holoviz/holoviews/pull/4782) +- Miscellaneous typo and broken link fixes + ([#4783](https://github.com/holoviz/holoviews/pull/4783), + [#4827](https://github.com/holoviz/holoviews/pull/4827), + [#4844](https://github.com/holoviz/holoviews/pull/4844), + [#4811](https://github.com/holoviz/holoviews/pull/4811)) + +Version 1.14.1 +============== +**December 28, 2020** + +This release contains a small number of bug fixes addressing +regressions. Many thanks to the contributors to this release including +@csachs, @GilShoshan94 and the maintainers @jlstevens, @jbednar and +@philippjfr. + +Bug fixes: + +- Fix issues with linked selections on tables + ([#4758](https://github.com/holoviz/holoviews/pull/4758)) +- Fix Heatmap alpha dimension transform + ([#4757](https://github.com/holoviz/holoviews/pull/4757)) +- Do not drop tools in linked selections + ([#4756](https://github.com/holoviz/holoviews/pull/4756)) +- Fixed access to possibly non-existant key + ([#4742](https://github.com/holoviz/holoviews/pull/4742)) + +Documentation: + +- Warn about disabled interactive features on website + ([#4762](https://github.com/holoviz/holoviews/pull/4762)) + +Version 1.14.0 +============== +**December 1, 2020** + +This release brings a number of major features including a new +IbisInterface, new Plotly Dash support and greatly improved Plotly +support, and greatly improved interaction and integration with +Datashader. Many thanks to the many contributors to this release, +whether directly by submitting PRs or by reporting issues and making +suggestions. Specifically, we would like to thank @philippjfr, +@jonmmease, and @tonyfast for their work on the IbisInterface and +@jonmmease for improving Plotly support, as well as @kcpevey, @Hoxbro, +@marckassay, @mcepl, and @ceball for various other enhancements, +improvements to documentation and testing infrastructure. In +addition, thanks to the maintainers @jbednar, @jlstevens and +@philippjfr for contributing to this release. This version includes a +large number of new features, enhancements, and bug fixes. + +It is important to note that version 1.14 will be the last HoloViews +release supporting Python 2. + +Major features: + +- New Plotly Dash support + ([#4605](https://github.com/holoviz/holoviews/pull/4605)) +- New Plotly support for Tiles element + ([#4686](https://github.com/holoviz/holoviews/pull/4686)) +- New IbisInterface + ([#4517](https://github.com/holoviz/holoviews/pull/4517)) +- Greatly improved Datashader `rasterize()` + ([#4567](https://github.com/holoviz/holoviews/pull/4567)). + Previously, many of the features of Datashader were available only + through `datashade`, which rendered data all the way to RGB pixels + and thus prevented many client-side Bokeh features like hover, + colorbars, dynamic colormaps, etc. `rasterize` now supports all + these Bokeh features along with nearly all the Datashader features + previously only available through `datashade`, including (now + client-side) histogram equalization with `cnorm='eq_hist'` and easy + control of transparency via a new `Dimension.nodata` parameter. See + the [Large Data User + Guide](https://holoviews.org/user_guide/Large_Data.html) for more + information. + +Enhancements: + +- Implemented datashader aggregation of Rectangles + ([#4701](https://github.com/holoviz/holoviews/pull/4701)) +- New support for robust color limits (`clim_percentile`) + ([#4712](https://github.com/holoviz/holoviews/pull/4712)) +- Support for dynamic overlays in link_selections + ([#4683](https://github.com/holoviz/holoviews/pull/4683)) +- Allow clashing Param stream contents + ([#4677](https://github.com/holoviz/holoviews/pull/4677)) +- Ensured pandas does not convert times to UTC + ([#4711](https://github.com/holoviz/holoviews/pull/4711)) +- Removed all use of cyordereddict + ([#4620](https://github.com/holoviz/holoviews/pull/4620)) +- Testing infrastructure moved to GH Actions + ([#4592](https://github.com/holoviz/holoviews/pull/4592)) + +Bug fixes: + +- Ensure RangeXY returns x/y ranges in correct order (#4665) + ([#4665](https://github.com/holoviz/holoviews/pull/4665)) +- Fix datashader instability with Plotly by disabling padding for RGB elements + ([#4705](https://github.com/holoviz/holoviews/pull/4705)) +- Various Dask and cuDF histogram fixes + ([#4691](https://github.com/holoviz/holoviews/pull/4691)) +- Fix handling of custom matplotlib and bokeh colormaps + ([#4693](https://github.com/holoviz/holoviews/pull/4693)) +- Fix cuDF values implementation + ([#4687](https://github.com/holoviz/holoviews/pull/4687)) +- Fixed range calculation on HexTiles + ([#4689](https://github.com/holoviz/holoviews/pull/4689)) +- Use PIL for RGB.load_image + ([#4639](https://github.com/holoviz/holoviews/pull/4639)) + +Documentation: + +- Clarified data types accepted by Points + ([#4430](https://github.com/holoviz/holoviews/pull/4430)) +- Updated Introduction notebook + ([#4682](https://github.com/holoviz/holoviews/pull/4682)) +- Fixed releases urls + ([#4672](https://github.com/holoviz/holoviews/pull/4672)) + +Compatibility: + +- Warning when there are multiple kdims on Chart elements + ([#4710](https://github.com/holoviz/holoviews/pull/4710)) +- Set histogram `normed` option to False by default + ([#4258](https://github.com/holoviz/holoviews/pull/4258)) +- The default colormap in holoviews is now 'kbc_r' instead of + 'fire'; see issue + [#3500](https://github.com/holoviz/holoviews/issues/3500) for details. + This change was made mainly because the highest value of the fire colormap + is white, which meant data was often not visible against a white + background. To restore the old behavior you can set + `hv.config.default_cmap='fire'`, which you can do via the extension e.g. + `hv.extension('bokeh', config=dict(default_cmap='fire'))`. There is + also `hv.config.default_gridded_cmap` which you can set to 'fire' if + you wish to use the old colormap for the `Raster`, `Image` and + `QuadMesh` element types. The default `HeatMap` colormap has also been + set to 'kbc_r' for consistency and can be set back to the old value of + 'RdYlBu_r' via `hv.config.default_heatmap_cmap`. + Version 1.13.5 ============== +**October 23, 2020** This version contains numerous bug fixes and a number of enhancements. Many thanks for contribution by @bryevdv, @jbednar, @jlstevens, @jonmmease, @kcpevey and @philippjfr. +Enhancements: + +- Refactor of link selections streams + ([#4572](https://github.com/holoviz/holoviews/pull/4572)) +- Add ability to listen to dataset linked_selection + ([#4547](https://github.com/holoviz/holoviews/pull/4547)) +- Added `selected` parameter to Bokeh PathPlot + ([#4641](https://github.com/holoviz/holoviews/pull/4641)) + Bug fixes: - Improvements to iteration over Series in CuDF data backend @@ -24,15 +345,6 @@ Bug fixes: - Fixed deserialization of polygon/multi_line CDS data in bokeh backend ([#4631](https://github.com/holoviz/holoviews/pull/4631)) -Enhancements: - -- Refactor of link selections streams - ([#4572](https://github.com/holoviz/holoviews/pull/4572)) -- Add ability to listen to dataset linked_selection - ([#4547](https://github.com/holoviz/holoviews/pull/4547)) -- Added `selected` parameter to Bokeh PathPlot - ([#4641](https://github.com/holoviz/holoviews/pull/4641)) - Documentation: - Improved `Bars` reference example, demonstrating the dataframe constructor @@ -42,6 +354,7 @@ Documentation: Version 1.13.4 ============== +**September 8, 2020** This version fixes a large number of bugs particularly relating to linked selections. Additionally it introduces some enhancements laying @@ -113,6 +426,7 @@ Enhancements: Version 1.13.3 ============== +**June 23, 2020** This version introduces a number of enhancements of existing functionality, particularly for features introduced in 1.13.0, @@ -182,6 +496,7 @@ Bug fixes: Version 1.13.2 ============== +**April 2, 2020** This is a minor patch release fixing a number of regressions introduced as part of the 1.13.x releases. Many thanks to the @@ -208,6 +523,7 @@ Bug fixes: Version 1.13.1 ============== +**March 25, 2020** This is a minor patch release to fix issues compatibility with the about to be released Bokeh 2.0.1 release. Additionally this release @@ -253,6 +569,7 @@ Documentation: Version 1.13.0 ============== +**March 20, 2020** This release is packed full of features and includes a general refactoring of how HoloViews renders widgets now built on top of the @@ -404,6 +721,7 @@ Migration notes: Version 1.12.7 ============== +**November 22, 2019** This a very minor hotfix release fixing an important bug related to axiswise normalization between plots. Many thanks to @srp3003 and @@ -423,6 +741,7 @@ Bug fixes: Version 1.12.6 ============== +**October 8, 2019** This is a minor release containing a large number of bug fixes thanks to the contributions from @joelostblom, @ahuang11, @chbrandt, @@ -488,6 +807,7 @@ Compatibility: Version 1.12.5 ============== +**August 14, 2019** This is a very minor bug fix release ensuring compatibility with recent releases of dask. @@ -502,6 +822,7 @@ Compatibility: Version 1.12.4 ============== +**August 4, 2019** This is a minor release with a number of bug and compatibility fixes as well as a number of enhancements. @@ -570,6 +891,7 @@ Backwards incompatible changes: Version 1.12.3 ============== +**May 20, 2019** This is a minor release primarily focused on a number of important bug fixes. Thanks to our users for reporting issues, and special thanks to @@ -604,6 +926,7 @@ Bug fixes: Version 1.12.2 ============== +**May 1, 2019** This is a minor release with a number of important bug fixes and a small number of enhancements. Many thanks to our users for reporting @@ -650,6 +973,7 @@ Backward compatibility: Version 1.12.1 ============== +**April 10, 2019** This is a minor release that pins to the newly released Bokeh 1.1 and adds support for parameter instances as streams: @@ -661,6 +985,7 @@ Enhancements: Version 1.12.0 ============== +**April 2, 2019** This release provides a number of exciting new features as well as a set of important bug fixes. Many thanks to our users for reporting these @@ -713,6 +1038,7 @@ Bug fixes: Version 1.11.3 ============== +**February 25, 2019** This is the last micro-release in the 1.11 series providing a number of important fixes. Many thanks to our users for reporting these @@ -757,6 +1083,7 @@ Enhancements: Version 1.11.2 ============== +**January 28, 2019** This is a minor bug fix release with a number of small but important bug fixes. Special thanks to @darynwhite for his contributions. @@ -782,6 +1109,7 @@ Enhancements: Version 1.11.1 ============== +**January 17, 2019** This is a minor bug fix release with a number of important bug fixes, enhancements and updates to the documentation. Special thanks to @@ -824,6 +1152,7 @@ Documentation: Version 1.11.0 ============== +**December 24, 2018** This is a major release containing a large number of features and API improvements. Specifically this release was devoted to improving the @@ -926,6 +1255,8 @@ Deprecations: Version 1.10.8 ============== +**October 29, 2018** + This a likely the last hotfix release in the 1.10.x series containing fixes for compatibility with bokeh 1.0 and matplotlib 3.0. It also @@ -1000,6 +1331,7 @@ Documentation: Version 1.10.7 ============== +**July 8, 2018** This a very minor hotfix release mostly containing fixes for datashader aggregation of empty datasets: @@ -1019,6 +1351,7 @@ Fixes: Version 1.10.6 ============== +**June 29, 2018** This another minor bug fix release in the 1.10 series and likely the last one before the upcoming 1.11 release. In addition to some important @@ -1051,6 +1384,7 @@ Fixes: Version 1.10.5 ============== +**June 5, 2018** This is a minor bug fix release containing a mixture of small enhancements, a number of important fixes and improved compatibility @@ -1096,6 +1430,7 @@ Compatibility: Version 1.10.4 ============== +**May 14, 2018** This is a minor bug fix release including a number of crucial fixes for issues reported by our users. @@ -1116,6 +1451,7 @@ Fixes: Version 1.10.3 ============== +**May 8, 2018** This is a minor bug fix release including a number of crucial fixes for issues reported by our users. @@ -1150,6 +1486,7 @@ API: Version 1.10.2 ============== +**April 30, 2018** This is a minor bug fix release with a number of small fixes for features and regressions introduced in 1.10: @@ -1183,6 +1520,7 @@ Deprecations: Version 1.10.1 ============== +**April 20, 2018** This is a minor bug fix release with a number of fixes for regressions and minor bugs introduced in the 1.10.0 release: @@ -1201,6 +1539,7 @@ Fixes: Version 1.10.0 ============== +**April 17, 2018** This is a major release with a large number of new features and bug fixes, as well as a small number of API changes. Many thanks to the @@ -1342,6 +1681,7 @@ Changes affecting backwards compatibility: Version 1.9.5 ============= +**March 2, 2018** This release includes a very small number of minor bugfixes and a new feature to simplify setting options in python: @@ -1363,6 +1703,7 @@ Fixes: Version 1.9.4 ============= +**February 16, 2018** This release contains a small number of important bug fixes: @@ -1378,6 +1719,7 @@ This release contains a small number of important bug fixes: Version 1.9.3 ============= +**February 11, 2018** This release contains a number of important bug fixes and minor enhancements. @@ -1434,6 +1776,7 @@ API Changes: Version 1.9.2 ============= +**December 11, 2017** This release is a minor bug fix release patching various issues which were found in the 1.9.1 release. @@ -1468,6 +1811,7 @@ Fixes: Version 1.9.1 ============= +**November 13, 2017** This release is a minor bug fix release patching various issues which were found in the 1.9.0 release. @@ -1493,6 +1837,7 @@ Fixes: Version 1.9.0 ============= +**November 3, 2017** This release includes a large number of long awaited features, improvements and bug fixes, including streaming and graph support, @@ -1579,6 +1924,7 @@ Changes affecting backwards compatibility: Version 1.8.4 ============= +**September 13, 2017** This bugfix release includes a number of critical fixes for compatiblity with Bokeh 0.12.9 along with various other bug fixes. Many thanks to our @@ -1605,6 +1951,7 @@ Fixes: Version 1.8.3 ============= +**August 21, 2017** This bugfix release fixes a number of minor issues identified since the last release: @@ -1632,6 +1979,7 @@ Fixes: Version 1.8.2 ============= +**August 4, 2017** This bugfix release addresses a number of minor issues identified since the 1.8.1 release: @@ -1658,6 +2006,7 @@ Fixes: Version 1.8.1 ============= +**July 7, 2017** This bugfix release addresses a number of minor issues identified since the 1.8 release: @@ -1698,6 +2047,7 @@ Fixes: Version 1.8.0 ============= +**June 29, 2017** This release includes a complete and long awaited overhaul of the HoloViews documentation and website, with a new gallery, getting-started @@ -1818,6 +2168,7 @@ Changes affecting backwards compatibility: Version 1.7.0 ============= +**April 25, 2017** This version is a major new release incorporating seven months of work involving several hundred PRs and over 1700 commits. Highlights include @@ -2192,6 +2543,7 @@ Changes affecting backwards compatibility: Version 1.6.2 ============= +**August 23, 2016** Bug fix release with various fixes for gridded data backends and optimizations for Bokeh. @@ -2226,6 +2578,7 @@ LayoutPlot.vspace = 0.3 Version 1.6.1 ============= +**July 27, 2016** Bug fix release following the 1.6 major release with major bug fixes for the grid data interfaces and improvements to the options system. @@ -2245,6 +2598,7 @@ the grid data interfaces and improvements to the options system. Version 1.6 =========== +**July 14, 2016** A major release with an optional new data interface based on xarray, support for batching Bokeh plots for huge increases in performance, @@ -2276,6 +2630,7 @@ Features and improvements: Version 1.5 =========== +**May 12, 2016** A major release with a large number of new features including new data interfaces for grid based data, major improvements for DynamicMaps and a @@ -2330,6 +2685,7 @@ Backwards compatibility: Version 1.4.3 ============= +**February 11, 2016** A minor bugfix release to patch a number of small but important issues. @@ -2369,6 +2725,7 @@ Backwards compatibility: Version 1.4.2 ============= +**February 7, 2016** Over the past month since the 1.4.1 release, we have improved our infrastructure for building documentation, updated the main website and @@ -2414,6 +2771,7 @@ Fixes and improvements: Version 1.4.1 ============= +**December 22, 2015** Over the past two weeks since the 1.4 release, we have implemented several important bug fixes and have made several usability @@ -2461,6 +2819,7 @@ Notable bug fixes: Version 1.4.0 ============= +**December 4, 2015** Over the past few months we have added several major new features and with the help of our users have been able to address a number of bugs @@ -2534,6 +2893,7 @@ API Changes: Version 1.3.2 ============= +**July 6, 2015** Minor bugfix release to address a small number of issues: @@ -2551,6 +2911,7 @@ Bug fixes: Version 1.3.1 ============= +**July 1, 2015** Minor bugfix release to address a number of issues that weren't caught in time for the 1.3.0 release with the addition of a small number of @@ -2573,6 +2934,7 @@ Bug fixes: Version 1.3.0 ============= +**June 27, 2015** Since the last release we closed over 34 issues and have made 380 commits mostly focused on fixing bugs, cleaning up the API and working @@ -2637,6 +2999,7 @@ API Changes Version 1.2.0 ============= +**May 27, 2015** Since the last release we closed over 20 issues and have made 334 commits, adding a ton of functionality and fixing a large range of bugs @@ -2697,6 +3060,7 @@ Important bug fixes: Version 1.1.0 ============= +**April 15, 2015** Highlights: @@ -2723,6 +3087,7 @@ made. Version 1.0.1 ============= +**March 26, 2015** Minor release addressing bugs and issues with 1.0.0. @@ -2744,5 +3109,6 @@ Highlights: Version 1.0.0 ============= +**March 16, 2015** First public release available on GitHub and PyPI. diff --git a/LICENSE.txt b/LICENSE.txt index 567d5a727a..ac66dbd6d5 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2005-2019, IOAM (ioam.github.com) +Copyright (c) 2005-2019, holoviz (https://holoviz.org) All rights reserved. Redistribution and use in source and binary forms, with or without @@ -13,7 +13,7 @@ met: documentation and/or other materials provided with the distribution. - * Neither the name of IOAM nor the names of its contributors + * Neither the name of HoloViz nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. diff --git a/README.md b/README.md index 3151cd35e2..f39bf32231 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,9 @@ [![PyPI](https://img.shields.io/pypi/v/holoviews.svg)](https://pypi.python.org/pypi/holoviews) [![Conda](https://anaconda.org/pyviz/holoviews/badges/installer/conda.svg)](https://anaconda.org/pyviz/holoviews) [![Downloads](https://s3.amazonaws.com/pubbadges/holoviews_current.svg)](https://anaconda.org/pyviz/holoviews) -[![BuildStatus](https://travis-ci.org/holoviz/holoviews.svg?branch=master)](https://travis-ci.org/holoviz/holoviews) -[![Coveralls](https://img.shields.io/coveralls/pyviz/holoviews.svg)](https://coveralls.io/r/pyviz/holoviews) +[![BuildStatus](https://github.com/holoviz/holoviews/workflows/pytest/badge.svg?query=branch%3Amaster)](https://github.com/holoviz/holoviews/actions?query=workflow%3Apytest+branch%3Amaster) +[![DocBuildStatus](https://github.com/holoviz/holoviews/workflows/docs/badge.svg?query=branch%3Amaster)](https://github.com/holoviz/holoviews/actions?query=workflow%3Adocs+branch%3Amaster) +[![Coveralls](https://coveralls.io/repos/github/holoviz/holoviews/badge.svg?branch=master)](https://coveralls.io/github/holoviz/holoviews?branch=master) [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/pyviz/pyviz?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Binder](https://img.shields.io/badge/Launch%20JupyterLab-v1.13.2-579ACA.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFkAAABZCAMAAABi1XidAAAB8lBMVEX///9XmsrmZYH1olJXmsr1olJXmsrmZYH1olJXmsr1olJXmsrmZYH1olL1olJXmsr1olJXmsrmZYH1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olJXmsrmZYH1olL1olL0nFf1olJXmsrmZYH1olJXmsq8dZb1olJXmsrmZYH1olJXmspXmspXmsr1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olLeaIVXmsrmZYH1olL1olL1olJXmsrmZYH1olLna31Xmsr1olJXmsr1olJXmsrmZYH1olLqoVr1olJXmsr1olJXmsrmZYH1olL1olKkfaPobXvviGabgadXmsqThKuofKHmZ4Dobnr1olJXmsr1olJXmspXmsr1olJXmsrfZ4TuhWn1olL1olJXmsqBi7X1olJXmspZmslbmMhbmsdemsVfl8ZgmsNim8Jpk8F0m7R4m7F5nLB6jbh7jbiDirOEibOGnKaMhq+PnaCVg6qWg6qegKaff6WhnpKofKGtnomxeZy3noG6dZi+n3vCcpPDcpPGn3bLb4/Mb47UbIrVa4rYoGjdaIbeaIXhoWHmZYHobXvpcHjqdHXreHLroVrsfG/uhGnuh2bwj2Hxk17yl1vzmljzm1j0nlX1olL3AJXWAAAAbXRSTlMAEBAQHx8gICAuLjAwMDw9PUBAQEpQUFBXV1hgYGBkcHBwcXl8gICAgoiIkJCQlJicnJ2goKCmqK+wsLC4usDAwMjP0NDQ1NbW3Nzg4ODi5+3v8PDw8/T09PX29vb39/f5+fr7+/z8/Pz9/v7+zczCxgAABC5JREFUeAHN1ul3k0UUBvCb1CTVpmpaitAGSLSpSuKCLWpbTKNJFGlcSMAFF63iUmRccNG6gLbuxkXU66JAUef/9LSpmXnyLr3T5AO/rzl5zj137p136BISy44fKJXuGN/d19PUfYeO67Znqtf2KH33Id1psXoFdW30sPZ1sMvs2D060AHqws4FHeJojLZqnw53cmfvg+XR8mC0OEjuxrXEkX5ydeVJLVIlV0e10PXk5k7dYeHu7Cj1j+49uKg7uLU61tGLw1lq27ugQYlclHC4bgv7VQ+TAyj5Zc/UjsPvs1sd5cWryWObtvWT2EPa4rtnWW3JkpjggEpbOsPr7F7EyNewtpBIslA7p43HCsnwooXTEc3UmPmCNn5lrqTJxy6nRmcavGZVt/3Da2pD5NHvsOHJCrdc1G2r3DITpU7yic7w/7Rxnjc0kt5GC4djiv2Sz3Fb2iEZg41/ddsFDoyuYrIkmFehz0HR2thPgQqMyQYb2OtB0WxsZ3BeG3+wpRb1vzl2UYBog8FfGhttFKjtAclnZYrRo9ryG9uG/FZQU4AEg8ZE9LjGMzTmqKXPLnlWVnIlQQTvxJf8ip7VgjZjyVPrjw1te5otM7RmP7xm+sK2Gv9I8Gi++BRbEkR9EBw8zRUcKxwp73xkaLiqQb+kGduJTNHG72zcW9LoJgqQxpP3/Tj//c3yB0tqzaml05/+orHLksVO+95kX7/7qgJvnjlrfr2Ggsyx0eoy9uPzN5SPd86aXggOsEKW2Prz7du3VID3/tzs/sSRs2w7ovVHKtjrX2pd7ZMlTxAYfBAL9jiDwfLkq55Tm7ifhMlTGPyCAs7RFRhn47JnlcB9RM5T97ASuZXIcVNuUDIndpDbdsfrqsOppeXl5Y+XVKdjFCTh+zGaVuj0d9zy05PPK3QzBamxdwtTCrzyg/2Rvf2EstUjordGwa/kx9mSJLr8mLLtCW8HHGJc2R5hS219IiF6PnTusOqcMl57gm0Z8kanKMAQg0qSyuZfn7zItsbGyO9QlnxY0eCuD1XL2ys/MsrQhltE7Ug0uFOzufJFE2PxBo/YAx8XPPdDwWN0MrDRYIZF0mSMKCNHgaIVFoBbNoLJ7tEQDKxGF0kcLQimojCZopv0OkNOyWCCg9XMVAi7ARJzQdM2QUh0gmBozjc3Skg6dSBRqDGYSUOu66Zg+I2fNZs/M3/f/Grl/XnyF1Gw3VKCez0PN5IUfFLqvgUN4C0qNqYs5YhPL+aVZYDE4IpUk57oSFnJm4FyCqqOE0jhY2SMyLFoo56zyo6becOS5UVDdj7Vih0zp+tcMhwRpBeLyqtIjlJKAIZSbI8SGSF3k0pA3mR5tHuwPFoa7N7reoq2bqCsAk1HqCu5uvI1n6JuRXI+S1Mco54YmYTwcn6Aeic+kssXi8XpXC4V3t7/ADuTNKaQJdScAAAAAElFTkSuQmCC)](https://mybinder.org/v2/gh/holoviz/holoviews/v1.13.2?urlpath=lab/tree/examples) @@ -55,22 +56,22 @@ Installation ============ HoloViews works with -[Python 2.7 and Python 3](https://travis-ci.org/holoviz/holoviews) -on Linux, Windows, or Mac, and provides optional extensions for working with the -[Jupyter/IPython Notebook](http://jupyter.org). +[Python 2.7 and Python 3](https://github.com/holoviz/holoviews/actions?query=workflow%3Apytest) +on Linux, Windows, or Mac, and works seamlessly with +[Jupyter Notebook and JupyterLab](https://jupyter.org). The recommended way to install HoloViews is using the -[conda](http://conda.pydata.org/docs/) command provided by -[Anaconda](http://docs.continuum.io/anaconda/install) or -[Miniconda](http://conda.pydata.org/miniconda.html): +[conda](https://docs.conda.io/projects/conda/en/latest/index.html) command provided by +[Anaconda](https://docs.anaconda.com/anaconda/install/) or +[Miniconda](https://docs.conda.io/en/latest/miniconda.html): conda install -c pyviz holoviews bokeh This command will install the typical packages most useful with HoloViews, though HoloViews itself depends only on -[Numpy](http://numpy.org) and [Param](https://param.holoviz.org). +[Numpy](https://numpy.org) [Pandas](https://pandas.pydata.org) and [Param](https://param.holoviz.org). Additional installation and configuration options are described in the -[user guide](http://holoviews.org/user_guide/Installing_and_Configuring.html). +[user guide](https://holoviews.org/user_guide/Installing_and_Configuring.html). You can also clone holoviews directly from GitHub and install it with: @@ -81,29 +82,25 @@ You can also clone holoviews directly from GitHub and install it with: Usage ----- -Once you've installed HoloViews, you can get a copy of all the -examples shown on the website: +Once you've installed HoloViews, you can get a copy of all the examples shown on this website: holoviews --install-examples cd holoviews-examples -And then you can launch Jupyter Notebook to explore them: +Now you can launch Jupyter Notebook or JupyterLab to explore them: jupyter notebook -To work with JupyterLab you will also need the PyViz JupyterLab + jupyter lab + +If you are working with a JupyterLab version <2.0 you will also need the PyViz JupyterLab extension: - conda install -c conda-forge jupyterlab jupyter labextension install @pyviz/jupyterlab_pyviz -Once you have installed JupyterLab and the extension launch it with:: - - jupyter-lab - -For more details on setup and configuration see [our website](http://holoviews.org/user_guide/Installing_and_Configuring.html). +For more details on setup and configuration see [our website](https://holoviews.org/user_guide/Installing_and_Configuring.html). For general discussion, we have a [gitter channel](https://gitter.im/pyviz/pyviz). If you find any bugs or have any feature suggestions please file a GitHub -[issue](https://github.com/pyviz/holoviews/issues) +[issue](https://github.com/holoviz/holoviews/issues) or submit a [pull request](https://help.github.com/articles/about-pull-requests). diff --git a/doc/Homepage.ipynb b/doc/Homepage.ipynb index 4ff090abbb..00a6e55ddb 100644 --- a/doc/Homepage.ipynb +++ b/doc/Homepage.ipynb @@ -4,15 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "HoloViews is a [Python](http://python.org) library that makes analyzing and visualizing scientific or engineering data much simpler, more intuitive, and more easily reproducible. Instead of specifying every step for each plot, HoloViews lets you store your data in an annotated format that is instantly visualizable, with immediate access to both the numeric data *and* its visualization. Examples of how HoloViews is used in Python scripts as well as in live [Jupyter Notebooks](http://jupyter.org) may be accessed directly from the [holoviews-contrib](http://github.com/ioam/holoviews-contrib) repository. Here is a quick example of HoloViews in action:" + "HoloViews is a [Python](http://python.org) library that makes analyzing and visualizing scientific or engineering data much simpler, more intuitive, and more easily reproducible. Instead of specifying every step for each plot, HoloViews lets you store your data in an annotated format that is instantly visualizable, with immediate access to both the numeric data *and* its visualization. Examples of how HoloViews is used in Python scripts as well as in live [Jupyter Notebooks](http://jupyter.org) may be accessed directly from the [holoviews gallery](http://holoviews.org/gallery/index.html) webpage. Here is a quick example of HoloViews in action:" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false - }, + "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", @@ -47,9 +45,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false - }, + "metadata": {}, "outputs": [], "source": [ "%%opts Points [scaling_factor=50] Contours (color='w')\n", @@ -77,22 +73,9 @@ } ], "metadata": { - "kernelspec": { - "display_name": "Python 2", - "language": "python", - "name": "python2" - }, "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.12" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/doc/about.rst b/doc/about.rst index fcd3f5bf38..4020a8cd14 100644 --- a/doc/about.rst +++ b/doc/about.rst @@ -20,9 +20,9 @@ Prior Funding :align: left :target: http://www.anc.ed.ac.uk -The original development of HoloViews was supported in part by Grants EP/F500385/1 and BB/F529254/1 -to the `University of Edinburgh -Doctoral Training Centre in Neuroinformatics and Computational Neuroscience `_ -from the UK Engineering and Physical Sciences Research Council, +The original development of HoloViews was supported in part by Grants EP/F500385/1 and BB/F529254/1 +to the `University of Edinburgh +Doctoral Training Centre in Neuroinformatics and Computational Neuroscience `_ +from the UK Engineering and Physical Sciences Research Council, Biotechnology and Biological Sciences Research Council, and Medical Research Council. diff --git a/doc/features.rst b/doc/features.rst index 5028957b38..4bcec417b7 100644 --- a/doc/features.rst +++ b/doc/features.rst @@ -14,7 +14,7 @@ Features * All features `available in vanilla Python 2 or 3 `_, with minimal dependencies. **Support for maintainable, reproducible research** - + * Supports a truly reproducible workflow by minimizing the code needed for analysis and visualization. * Already used in a variety of research projects, from conception to final publication. * All HoloViews objects can be pickled and unpickled. diff --git a/doc/index.rst b/doc/index.rst index 39ca66e703..8fc3db0003 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -74,14 +74,15 @@ ____________ |CondaPkg|_ |PyPI|_ |License|_ |Coveralls|_ -HoloViews works with `Python 2.7 and Python 3 `_ on Linux, Windows, or Mac, and provides optional extensions for working with the `Jupyter/IPython Notebook `_. +HoloViews works with `Python 2.7 and Python 3 `_ on Linux, Windows, or Mac, and works seamlessly with `Jupyter Notebook and JupyterLab `_. -The recommended way to install HoloViews is using the `conda `_ command provided by `Anaconda `_ or `Miniconda `_:: +The recommended way to install HoloViews is using the `conda `_ command provided by `Anaconda `_ or `Miniconda `_:: conda install -c pyviz holoviews bokeh This command will install the typical packages most useful with HoloViews, though HoloViews itself -directly depends only on `Numpy `_ and `Param `_. +directly depends only on `Numpy `_, `Pandas `_ and `Param `_. + Additional installation and configuration options are described in the `user guide `_. @@ -96,23 +97,20 @@ Once you've installed HoloViews, you can get a copy of all the examples shown on holoviews --install-examples cd holoviews-examples -And then you can launch Jupyter Notebook to explore them:: +Now you can launch Jupyter Notebook or JupyterLab to explore them:: jupyter notebook -To work with JupyterLab you will also need the PyViz JupyterLab + jupyter lab + +If you are working with a JupyterLab version <2.0 you will also need the PyViz JupyterLab extension:: - conda install -c conda-forge jupyterlab jupyter labextension install @pyviz/jupyterlab_pyviz -Once you have installed JupyterLab and the extension launch it with:: - - jupyter-lab - -For more details on installing and configuring HoloViews see `the installing and configuring guide `_. +For more details on installing and configuring HoloViews see `the installing and configuring guide `_. -After you have successfully installed and configured HoloViews, please see `Getting Started `_. +After you have successfully installed and configured HoloViews, please see `Getting Started `_. .. |PyPI| image:: https://img.shields.io/pypi/v/holoviews.svg @@ -124,8 +122,8 @@ After you have successfully installed and configured HoloViews, please see `Gett .. |License| image:: https://img.shields.io/pypi/l/holoviews.svg .. _License: https://github.com/holoviz/holoviews/blob/master/LICENSE.txt -.. |Coveralls| image:: https://img.shields.io/coveralls/pyviz/holoviews.svg -.. _Coveralls: https://coveralls.io/r/pyviz/holoviews +.. |Coveralls| image:: https://img.shields.io/coveralls/holoviz/holoviews.svg +.. _Coveralls: https://coveralls.io/r/holoviz/holoviews .. toctree:: :titlesonly: diff --git a/doc/install.rst b/doc/install.rst index b2c2f1eb41..2ebe625752 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -4,10 +4,10 @@ Installing HoloViews The quickest and easiest way to get the latest version of all the recommended packages for working with HoloViews on Linux, Windows, or Mac systems is via the -`conda `_ command provided by +`conda `_ command provided by the -`Anaconda `_ or -`Miniconda `_ scientific +`Anaconda `_ or +`Miniconda `_ scientific Python distributions:: conda install -c pyviz holoviews bokeh @@ -15,7 +15,7 @@ Python distributions:: This recommended installation includes the default `Matplotlib `_ plotting library backend, the more interactive `Bokeh `_ plotting library -backend, and the `Jupyter/IPython Notebook `_. +backend, and the `Jupyter Notebook `_. A similar set of packages can be installed using ``pip``, if that command is available on your system:: @@ -28,25 +28,20 @@ HoloViews objects without visualization:: pip install holoviews -This minimal install includes only the two required libraries `Param -`_ and `Numpy `_, -neither of which has any required dependencies, which makes it very -easy to integrate HoloViews into your workflow or as part of another -project. +This minimal install includes only three required libraries `Param +`_, `Numpy `_ and, +`pandas `_, which makes it very easy to +integrate HoloViews into your workflow or as part of another project. Alternatively, you can ask ``pip`` to install a larger set of packages that provide additional functionality in HoloViews:: - pip install "holoviews[extras]" + pip install "holoviews[examples]" -This option installs all the required and recommended packages, -including the `pandas `_ and `Seaborn -`_ libraries. +This option installs all the required and recommended packages, in +addition to all all libraries required for running all the examples. -Lastly, to get *everything*, including `cyordereddict -`_ to enable optional -speed optimizations and `nose `_ -for running unit tests, you can use:: +Lastly, to get *everything* including the test dependencies, you can use:: pip install "holoviews[all]" @@ -57,16 +52,16 @@ can be installed using:: To get the very latest development version using ``pip``, you can use:: - pip install git+https://github.com/pyviz/holoviews.git + pip install git+https://github.com/holoviz/holoviews.git The alternative approach using git archive (e.g ``pip install -https://github.com/pyviz/holoviews/archive/master.zip``) is *not* +https://github.com/holoviz/holoviews/archive/master.zip``) is *not* recommended as you will have incomplete version strings. Anyone interested in following development can get the very latest version by cloning the git repository:: - git clone https://github.com/pyviz/holoviews.git + git clone https://github.com/holoviz/holoviews.git To make this code available for import you then need to run:: @@ -82,18 +77,19 @@ Jupyter Notebook:: jupyter notebook -To work with JupyterLab you will also need the PyViz JupyterLab +To work with JupyterLab>2.0 you won't need to install anything else, +however for older versions you should also install the PyViz extension:: - conda install -c conda-forge jupyterlab jupyter labextension install @pyviz/jupyterlab_pyviz Once you have installed JupyterLab and the extension launch it with:: - jupyter-lab + jupyter lab -Now you can download the `tutorial notebooks`_. unzip them somewhere -Jupyter Notebook can find them, and then open the Homepage.ipynb -tutorial or any of the others in the Notebook. Enjoy exploring your -data! +Now that you are set up you can get a copy of all the examples shown +on this website:: + + holoviews --install-examples + cd holoviews-examples diff --git a/doc/reference_manual/index.rst b/doc/reference_manual/index.rst index f6fee8d1b0..7cb924fb80 100644 --- a/doc/reference_manual/index.rst +++ b/doc/reference_manual/index.rst @@ -87,4 +87,4 @@ HoloViews subpackages .. _plotting.plotly: holoviews.plotting.plotly.html .. _selection: holoviews.selection.html .. _streams: holoviews.streams.html -.. _util: holoviews.util +.. _util: holoviews.util.html diff --git a/doc/releases.rst b/doc/releases.rst index 5e9c6fd695..9c7f6c22f8 100644 --- a/doc/releases.rst +++ b/doc/releases.rst @@ -1,15 +1,328 @@ Releases ======== -Announcements -------------- +Version 1.14 +~~~~~~~~~~~~ + +Version 1.14.5 +************** +**July 16, 2021** + +This is a hotfix release with a number of important bug fixes. Most +importantly, this version supports for the recent pandas 1.3.0 release. +Many thanks to @kgullikson88, @philippjfr and @jlstevens for +contributing the fixes in this release. + +Bug fixes: + +- Support for pandas>=1.3 + (`#5013 `__) +- Various bug fixes relating to dim transforms including the use of + parameters in slices and the use of getattribute + (`#4993 `__, + `#5001 `__, + `#5005 `__) + + +Version 1.14.4 +************** +**May 18, 2021** + +This release primarily focuses on a number of bug fixes. Many thanks to +@Hoxbro, @nitrocalcite, @brl0, @hyamanieu, @rafiyr, @jbednar, @jlstevens +and @philippjfr for contributing. + +Enhancements: + +- Reenable ``SaveTool`` for plots with ``Tiles`` + (`#4922 `_) +- Enable dask ``TriMesh`` rasterization using datashader + (`#4935 `_) +- Use dataframe index for ``TriMesh`` node indices + (`#4936 `_) + +Bug fixes: + +- Fix hover for stacked ``Bars`` + (`#4892 `_) +- Check before dereferencing Bokeh colormappers + (`#4902 `_) +- Fix multiple parameterized inputs to ``dim`` + (`#4903 `_) +- Fix floating point error when generating bokeh Palettes + (`#4911 `_) +- Fix bug using dimensions with label on ``Bars`` + (`#4929 `_) +- Do not reverse colormaps with '_r' suffix a second time + (`#4931 `_) +- Fix remapping of ``Params`` stream parameter names + (`#4932 `_) +- Ensure ``Area.stack`` keeps labels + (`#4937 `_) + +Documentation: + +- Updated Dashboards user guide to show ``pn.bind`` first + (`#4907 `_) +- Updated docs to correctly declare Scatter kdims + (`#4914 `_) + +Compatibility: + +Unfortunately a number of tile sources are no longer publicly available. +Attempting to use these tile sources will now issue warnings unless +``hv.config.raise_deprecated_tilesource_exception`` is set to ``True`` +in which case exceptions will be raised instead. + +- The ``Wikipedia`` tile source is no longer available as it is no + longer being served outside the wikimedia domain. As one of the most + frequently used tile sources, HoloViews now issues a warning and + switches to the OpenStreetMap (OSM) tile source instead. +- The ``CartoMidnight`` and ``CartoEco`` tile sources are no longer + publicly available. Attempting to use these tile sources will result + in a deprecation warning. + + +Version 1.14.3 +************** +**April 8, 2021** + +This release contains a small number of bug fixes, enhancements and +compatibility for the latest release of matplotlib. Many thanks to +@stonebig, @Hoxbro, @jlstevens, @jbednar and @philippjfr. + +Enhancements: + +- Allow applying linked selections to chained ``DynamicMap`` + (`#4870 `__) +- Issuing improved error message when ``__radd__`` called with an + integer (`#4868 `__) +- Implement ``MultiInterface.assign`` + (`#4880 `__) +- Handle tuple unit on xarray attribute + (`#4881 `__) +- Support selection masks and expressions on gridded data + (`#4882 `__) + +Bug fixes: + +- Handle empty renderers when merging ``HoverTool.renderers`` + (`#4856 `__) + +Compatibility: + +- Support matplotlib versions >=3.4 + (`#4878 `__) + + +Version 1.14.2 +************** + +**March 2, 2021** + + +This release adds support for Bokeh 2.3, introduces a number of minor +enhancements, miscellaneous documentation improvements and a good number +of bug fixes. + +Many thanks to the many contributors to this release, whether directly +by submitting PRs or by reporting issues and making suggestions. +Specifically, we would like to thank @philippjfr for the Bokeh 2.3 +compatibility updates, @kcpevey, @timgates42, and @scottstanie for +documentation improvements as well as @Hoxbro and @LunarLanding for +various bug fixes. In addition, thanks to the maintainers @jbednar, +@jlstevens and @philippjfr for contributing to this release. + +Enhancements: + +- Bokeh 2.3 compatibility + (`#4805 `__, + `#4809 `__) +- Supporting dictionary streams parameter in DynamicMaps and operations + (`#4787 `__, + `#4818 `__, + `#4822 `__) +- Support spatialpandas DaskGeoDataFrame + (`#4792 `__) +- Disable zoom on axis for geographic plots + (`#4812 `__ +- Add support for non-aligned data in Area stack classmethod + (`#4836 `__) +- Handle arrays and datetime ticks + (`#4831 `__) +- Support single-value numpy array as input to HLine and VLine + (`#4798 `__) + +Bug fixes: + +- Ensure link_inputs parameter on operations is passed to apply + (`#4795 `__) +- Fix for muted option on overlaid Bokeh plots + (`#4830 `__) +- Check for nested dim dependencies + (`#4785 `__) +- Fixed np.nanmax call when computing ranges + (`#4847 `__) +- Fix for Dimension pickling + (`#4843 `__) +- Fixes for dask backed elements in plotting + (`#4813 `__) +- Handle isfinite for NumPy and Pandas masked arrays + (`#4817 `__) +- Fix plotting Graph on top of Tiles/Annotation + (`#4828 `__) +- Miscellaneous fixes for the Bokeh plotting extension + (`#4814 `__, + `#4839 `__) +- Miscellaneous fixes for index based linked selections + (`#4776 `__) + +Documentation: + +- Expanded on Tap Stream example in Reference Gallery + `#4782 `__ +- Miscellaneous typo and broken link fixes + (`#4783 `__, + `#4827 `__, + `#4844 `__, + `#4811 `__) + +Version 1.14.1 +************** + +**December 28, 2020** + +This release contains a small number of bug fixes addressing +regressions. Many thanks to the contributors to this release including +@csachs, @GilShoshan94 and the maintainers @jlstevens, @jbednar and +@philippjfr. + +Bug fixes: + +- Fix issues with linked selections on tables + (`#4758 `__) +- Fix Heatmap alpha dimension transform + (`#4757 `__) +- Do not drop tools in linked selections + (`#4756 `__) +- Fixed access to possibly non-existant key + (`#4742 `__) + +Documentation: + +- Warn about disabled interactive features on website + (`#4762 `__) + + +Version 1.14.0 +************** + +**December 1, 2020** + + +This release brings a number of major features including a new +IbisInterface, new Plotly Dash support and greatly improved Plotly +support, and greatly improved interaction and integration with +Datashader. Many thanks to the many contributors to this release, +whether directly by submitting PRs or by reporting issues and making +suggestions. Specifically, we would like to thank @philippjfr, +@jonmmease, and @tonyfast for their work on the IbisInterface and +@jonmmease for improving Plotly support, as well as @kcpevey, @Hoxbro, +@marckassay, @mcepl, and @ceball for various other enhancements, +improvements to documentation and testing infrastructure. In addition, +thanks to the maintainers @jbednar, @jlstevens and @philippjfr for +contributing to this release. This version includes a large number of +new features, enhancements, and bug fixes. + +It is important to note that version 1.14 will be the last HoloViews +release supporting Python 2. + +Major features: -* `Version 1.13 `__ (June 23, 2020) -* `Version 1.10 `__ (April 11, 2018) +- New Plotly Dash support + (`#4605 `__) +- New Plotly support for Tiles element + (`#4686 `__) +- New IbisInterface + (`#4517 `__) +- Greatly improved Datashader ``rasterize()`` + (`#4567 `__). + Previously, many of the features of Datashader were available only + through ``datashade``, which rendered data all the way to RGB pixels + and thus prevented many client-side Bokeh features like hover, + colorbars, dynamic colormaps, etc. ``rasterize`` now supports all + these Bokeh features along with nearly all the Datashader features + previously only available through ``datashade``, including (now + client-side) histogram equalization with ``cnorm='eq_hist'`` and easy + control of transparency via a new ``Dimension.nodata`` parameter. + See the `Large Data User Guide + `__ for more + information. +Enhancements: + +- Implemented datashader aggregation of Rectangles + (`#4701 `__) +- New support for robust color limits (``clim_percentile``) + (`#4712 `__) +- Support for dynamic overlays in link_selections + (`#4683 `__) +- Allow clashing Param stream contents + (`#4677 `__) +- Ensured pandas does not convert times to UTC + (`#4711 `__) +- Removed all use of cyordereddict + (`#4620 `__) +- Testing infrastructure moved to GH Actions + (`#4592 `__) + +Bug fixes: + +- Ensure RangeXY returns x/y ranges in correct order (#4665) + (`#4665 `__) +- Fix datashader instability with Plotly by disabling padding for RGB + elements (`#4705 `__) +- Various Dask and cuDF histogram fixes + (`#4691 `__) +- Fix handling of custom matplotlib and bokeh colormaps + (`#4693 `__) +- Fix cuDF values implementation + (`#4687 `__) +- Fixed range calculation on HexTiles + (`#4689 `__) +- Use PIL for RGB.load_image + (`#4639 `__) + +Documentation: + +- Clarified data types accepted by Points + (`#4430 `__) +- Updated Introduction notebook + (`#4682 `__) +- Fixed releases urls + (`#4672 `__) + +Compatibility: + +- Warning when there are multiple kdims on Chart elements + (`#4710 `__) +- Set histogram ``normed`` option to False by default + (`#4258 `__) +- The default colormap in holoviews is now ‘kbc_r’ instead of ‘fire’; + see issue + `#3500 `__ for + details. This change was made mainly because the highest value of the + fire colormap is white, which meant data was often not visible + against a white background. To restore the old behavior you can set + ``hv.config.default_cmap='fire'``, which you can do via the extension + e.g. ``hv.extension('bokeh', config=dict(default_cmap='fire'))``. + There is also ``hv.config.default_gridded_cmap`` which you can set to + ‘fire’ if you wish to use the old colormap for the ``Raster``, + ``Image`` and ``QuadMesh`` element types. The default ``HeatMap`` + colormap has also been set to ‘kbc_r’ for consistency and can be set + back to the old value of ‘RdYlBu_r’ via + ``hv.config.default_heatmap_cmap``. -Release notes -------------- Version 1.13 ~~~~~~~~~~~~ @@ -17,6 +330,9 @@ Version 1.13 Version 1.13.5 ************** +**October 23, 2020** + + This version contains numerous bug fixes and a number of enhancements. Many thanks for contribution by @bryevdv, @jbednar, @jlstevens, @jonmmease, @kcpevey and @philippjfr. @@ -29,7 +345,7 @@ Bug fixes: (`#4646 `_) - Fixed bug resetting ranges (`#4654 `_) -- Fix bug matching elements to subplots in `DynamicMap` (#4649) +- Fix bug matching elements to subplots in `DynamicMap` (`#4649 `_) - Ensure consistent split `Violin` color assignment (`#4650 `_) @@ -59,6 +375,9 @@ Documentation: Version 1.13.4 ************** +**September 8, 2020** + + This version fixes a large number of bugs particularly relating to linked selections. Additionally it introduces some enhancements laying the groundwork for future functionality. Many thanks for contribution @@ -131,6 +450,9 @@ Enhancements: Version 1.13.3 ************** +**June 23, 2020** + + This version introduces a number of enhancements of existing functionality, particularly for features introduced in 1.13.0, e.g. cuDF support and linked selections. In addition it introduces a @@ -202,6 +524,8 @@ Bug fixes: Version 1.13.2 ************** +**April 2, 2020** + This is a minor patch release fixing a number of regressions introduced as part of the 1.13.x releases. Many thanks to the contributors including @eddienko, @poplarShift, @wuyuani135, @maximlt @@ -228,6 +552,8 @@ Bug fixes: Version 1.13.1 ************** +**March 25, 2020** + This is a minor patch release to fix issues compatibility with the about to be released Bokeh 2.0.1 release. Additionally this release makes Pandas a hard dependency, which was already implicitly the case @@ -273,6 +599,8 @@ Documentation: Version 1.13.0 ************** +**March 20, 2020** + This release is packed full of features and includes a general refactoring of how HoloViews renders widgets now built on top of the Panel library. Many thanks to the many contributors to this release @@ -284,6 +612,10 @@ suggestions. Specifically we would like to thank @poplarShift, @philippjfr for contributing to this release. This version includes the addition of a large number of features, enhancements and bug fixes: +`Read more about version 1.13 here +`__ (June 23, 2020) + + Major features: - Add ``link_selection`` to make custom linked brushing simple @@ -432,6 +764,9 @@ Version 1.12 Version 1.12.7 ************** +**November 22, 2019** + + This a very minor hotfix release fixing an important bug related to axiswise normalization between plots. Many thanks to @srp3003 and @philippjfr for contributing to this release. @@ -451,6 +786,8 @@ Bug fixes: Version 1.12.6 ************** +**October 8, 2019** + This is a minor release containing a large number of bug fixes thanks to the contributions from @joelostblom, @ahuang11, @chbrandt, @randomstuff, @jbednar and @philippjfr. It also contains a number of @@ -517,6 +854,7 @@ Compatibility: Version 1.12.5 ************** +**August 14, 2019** This is a very minor bug fix release ensuring compatibility with recent releases of dask. @@ -532,6 +870,8 @@ Compatibility: Version 1.12.4 ************** +**August 4, 2019** + This is a minor release with a number of bug and compatibility fixes as well as a number of enhancements. @@ -598,6 +938,8 @@ Backwards incompatible changes: Version 1.12.3 ************** +**May 20, 2019** + This is a minor release primarily focused on a number of important bug fixes. Thanks to our users for reporting issues, and special thanks to the internal developers @philippjfr and @jlstevens and external @@ -632,6 +974,8 @@ Bug fixes: Version 1.12.2 ************** +**May 1, 2019** + This is a minor release with a number of important bug fixes and a small number of enhancements. Many thanks to our users for reporting these issues, and special thanks to our internal developers @@ -679,6 +1023,8 @@ Backward compatibility: Version 1.12.1 ************** +**April 10, 2019** + This is a minor release that pins to the newly released Bokeh 1.1 and adds support for parameter instances as streams: @@ -691,6 +1037,8 @@ Enhancements: Version 1.12.0 ************** +**April 2, 2019** + This release provides a number of exciting new features as well as a set of important bug fixes. Many thanks to our users for reporting these issues, and special thanks to @ahuang11, @jonmmease, @poplarShift, @@ -748,6 +1096,8 @@ Version 1.11 Version 1.11.3 ************** +**February 25, 2019** + This is the last micro-release in the 1.11 series providing a number of important fixes. Many thanks to our users for reporting these issues and @poplarShift and @henriqueribeiro for contributing a number @@ -794,6 +1144,8 @@ Enhancements: Version 1.11.2 ************** +**January 28, 2019** + This is a minor bug fix release with a number of minor but important bug fixes. Special thanks to @darynwhite for his contributions. @@ -819,6 +1171,8 @@ Enhancements: Version 1.11.1 ************** +**January 17, 2019** + This is a minor bug fix release with a number of important bug fixes, enhancements and updates to the documentation. Special thanks to Andrew Huang (@ahuang11), @garibarba and @Safrone for their @@ -861,6 +1215,8 @@ Documentation: Version 1.11.0 ************** +**December 24, 2018** + This is a major release containing a large number of features and API improvements. Specifically this release was devoted to improving the general usability and accessibility of the HoloViews API and @@ -969,6 +1325,8 @@ Version 1.10 Version 1.10.8 ************** +**October 29, 2018** + This a likely the last hotfix release in the 1.10.x series containing fixes for compatibility with bokeh 1.0 and matplotlib 3.0. It also contains a wide array of fixes contributed and reported by users: @@ -1042,6 +1400,8 @@ Documentation: Version 1.10.7 ************** +**July 8, 2018** + This a very minor hotfix release mostly containing fixes for datashader aggregation of empty datasets: @@ -1060,6 +1420,8 @@ Fixes: Version 1.10.6 ************** +**June 29, 2018** + This another minor bug fix release in the 1.10 series and likely the last one before the upcoming 1.11 release. In addition to some important fixes relating to datashading and the handling of dask data, this @@ -1091,6 +1453,8 @@ Fixes: Version 1.10.5 ************** +**June 5, 2018** + This is a minor bug fix release containing a mixture of small enhancements, a number of important fixes and improved compatibility with pandas 0.23. @@ -1128,6 +1492,8 @@ Fixes: Version 1.10.4 ************** +**May 14, 2018** + This is a minor bug fix release including a number of crucial fixes for issues reported by our users. @@ -1149,6 +1515,8 @@ Fixes: Version 1.10.3 ************** +**May 8, 2018** + This is a minor bug fix release including a number of crucial fixes for issues reported by our users. @@ -1183,6 +1551,8 @@ API: Version 1.10.2 ************** +**April 30, 2018** + This is a minor bug fix release with a number of small fixes for features and regressions introduced in 1.10: @@ -1215,6 +1585,8 @@ Deprecations: Version 1.10.1 ************** +**April 20, 2018** + This is a minor bug fix release with a number of fixes for regressions and minor bugs introduced in the 1.10.0 release: @@ -1232,12 +1604,18 @@ Fixes: Version 1.10.0 ************** +**April 17, 2018** + This is a major release with a large number of new features and bug fixes, as well as a small number of API changes. Many thanks to the numerous users who filed bug reports, tested development versions, and contributed a number of new features and bug fixes, including special thanks to @mansenfranzen, @ea42gh, @drs251 and @jakirkham. +`Read more about version 1.10 here +`__ (April 11, 2018) + + JupyterLab support: - Full compatibility with JupyterLab when installing the @@ -1371,6 +1749,8 @@ Version 1.9 Version 1.9.5 ************* +**March 2, 2018** + This release includes a very small number of minor bugfixes and a new feature to simplify setting options in python: @@ -1391,6 +1771,8 @@ Fixes: Version 1.9.4 ************* +**February 16, 2018** + This release contains a small number of important bug fixes: - Compatibility with recent versions of Dask and pandas @@ -1406,6 +1788,8 @@ This release contains a small number of important bug fixes: Version 1.9.3 ************* +**February 11, 2018** + This release contains a number of important bug fixes and minor enhancements. @@ -1460,6 +1844,8 @@ API Changes: Version 1.9.2 ************* +**December 11, 2017** + This release is a minor bug fix release patching various issues which were found in the 1.9.1 release. @@ -1491,6 +1877,8 @@ Fixes: Version 1.9.1 ************* +**November 13, 2017** + This release is a minor bug fix release patching various issues which were found in the 1.9.0 release. @@ -1516,6 +1904,8 @@ Fixes: Version 1.9.0 ************* +**November 3, 2017** + This release includes a large number of long awaited features, improvements and bug fixes, including streaming and graph support, binary transfer of Bokeh data, fast Image/RGB regridding, first-class @@ -1602,6 +1992,8 @@ Version 1.8 Version 1.8.4 ************* +**September 13, 2017** + This bugfix release includes a number of critical fixes for compatiblity with Bokeh 0.12.9 along with various other bug fixes. Many thanks to our users for various detailed bug reports, feedback and contributions. @@ -1627,6 +2019,8 @@ Fixes: Version 1.8.3 ************* +**August 21, 2017** + This bugfix release fixes a number of minor issues identified since the last release: @@ -1653,6 +2047,8 @@ Fixes: Version 1.8.2 ************* +**August 4, 2017** + This bugfix release addresses a number of minor issues identified since the 1.8.1 release: @@ -1679,6 +2075,8 @@ Fixes: Version 1.8.1 ************* +**July 7, 2017** + This bugfix release addresses a number of minor issues identified since the 1.8 release: @@ -1719,6 +2117,8 @@ Fixes: Version 1.8.0 ************* +**June 29, 2017** + This release includes a complete and long awaited overhaul of the HoloViews documentation and website, with a new gallery, getting-started section, and logo. In the process, we have also improved and made small @@ -1842,6 +2242,8 @@ Version 1.7 Version 1.7.0 ************* +**April 25, 2017** + This version is a major new release incorporating seven months of work involving several hundred PRs and over 1700 commits. Highlights include extensive new support for easily building highly interactive @@ -2220,6 +2622,8 @@ Version 1.6 Version 1.6.2 ************* +**August 23, 2016** + Bug fix release with various fixes for gridded data backends and optimizations for Bokeh. @@ -2254,6 +2658,8 @@ optimizations for Bokeh. Version 1.6.1 ************* +**July 27, 2016** + Bug fix release following the 1.6 major release with major bug fixes for the grid data interfaces and improvements to the options system. @@ -2273,6 +2679,8 @@ the grid data interfaces and improvements to the options system. Version 1.6.0 ************* +**July 14, 2016** + A major release with an optional new data interface based on xarray, support for batching Bokeh plots for huge increases in performance, support for Bokeh 0.12 and various other fixes and improvements. @@ -2306,6 +2714,8 @@ Version 1.5 Version 1.5.0 ************* +**May 12, 2016** + A major release with a large number of new features including new data interfaces for grid based data, major improvements for DynamicMaps and a large number of bug fixes. @@ -2363,6 +2773,8 @@ Version 1.4 Version 1.4.3 ************* +**February 11, 2016** + A minor bugfix release to patch a number of small but important issues. Fixes and improvements: @@ -2401,6 +2813,8 @@ Backwards compatibility: Version 1.4.2 ************* +**February 7, 2016** + Over the past month since the 1.4.1 release, we have improved our infrastructure for building documentation, updated the main website and made several additional usability improvements. @@ -2446,6 +2860,8 @@ Fixes and improvements: Version 1.4.1 ************* +**December 22, 2015** + Over the past two weeks since the 1.4 release, we have implemented several important bug fixes and have made several usability improvements. @@ -2491,6 +2907,8 @@ Notable bug fixes: Version 1.4.0 ************* +**December 4, 2015** + Over the past few months we have added several major new features and with the help of our users have been able to address a number of bugs and inconsistencies. We have closed 57 issues and added over 1100 new @@ -2566,6 +2984,8 @@ Version 1.3 Version 1.3.2 ************* +**July 6, 2015** + Minor bugfix release to address a small number of issues: Features: @@ -2583,6 +3003,8 @@ Bug fixes: Version 1.3.1 ************* +**July 1, 2015** + Minor bugfix release to address a number of issues that weren't caught in time for the 1.3.0 release with the addition of a small number of features: @@ -2605,6 +3027,8 @@ Bug fixes: Version 1.3.0 ************* +**June 27, 2015** + Since the last release we closed over 34 issues and have made 380 commits mostly focused on fixing bugs, cleaning up the API and working extensively on the plotting and rendering system to ensure HoloViews is @@ -2674,6 +3098,8 @@ Version 1.2 Version 1.2.0 ************* +**May 27, 2015** + Since the last release we closed over 20 issues and have made 334 commits, adding a ton of functionality and fixing a large range of bugs in the process. @@ -2739,6 +3165,8 @@ Version 1.1 Version 1.1.0 ************* +**April 15, 2015** + Highlights: - Support for nbagg as a backend (09eab4f1) @@ -2768,6 +3196,8 @@ Version 1.0 Version 1.0.1 ************* +**March 26, 2015** + Minor release addressing bugs and issues with 1.0.0. Highlights: @@ -2789,6 +3219,8 @@ Highlights: Version 1.0.0 ************* +**March 16, 2015** + First public release available on GitHub and PyPI. .. Backticks and links don't play nicely together in RST diff --git a/doc/roadmap.rst b/doc/roadmap.rst index 16cecebb02..caa874de81 100644 --- a/doc/roadmap.rst +++ b/doc/roadmap.rst @@ -1,9 +1,9 @@ HoloViews Roadmap, as of 3/2019 =============================== -HoloViews is maintained by a core development team who coordinate contributions from many other different users/developers. The core-developer priorities depend on funding, usage in ongoing projects, and other factors. For 2019, the scheduled tasks are: +HoloViews is maintained by a core development team who coordinate contributions from many other different users/developers. The core-developer priorities depend on funding, usage in ongoing projects, and other factors. For 2019, the scheduled tasks are: -1. **Ongoing maintenance, improved documentation and examples**: +1. **Ongoing maintenance, improved documentation and examples**: As always, there are various bugs and usability issues reported on the issue tracker, and we will address these as time permits. 2. **More flexible and maintainable widgets and layouts using Panel** (`#805 `__): @@ -46,8 +46,8 @@ Other things we'd like to see in HoloViews but have not currently scheduled for 5. **Better 3D support**: There is some improvement planned to 3D support in 2019, but there would still be a long way to go after that, and so anyone who routinely goes from 2D to 3D plotting and back could consider improving the 3D functionality available in HoloViews to make that simpler. -If any of the functionality above is interesting to you (or you have ideas of your own!) and can offer help with implementation, please open an issue on this repository or on the specific subproject repository involved. And if you are lucky enough to be in a position to fund our developers to work on it, please contact ``jbednar@anaconda.com``. +If any of the functionality above is interesting to you (or you have ideas of your own!) and can offer help with implementation, please open an issue on this repository or on the specific subproject repository involved. And if you are lucky enough to be in a position to fund our developers to work on it, please contact ``jbednar@anaconda.com``. -And please note that many of the features that you might think should be part of HoloViews may already be available or planned for one of the other `PyViz tools `__ that are designed to work well with HoloViews, so please also check out the `PyViz Roadmap `__. +And please note that many of the features that you might think should be part of HoloViews may already be available or planned for one of the other `PyViz tools `__ that are designed to work well with HoloViews, so please also check out the `PyViz Roadmap `__. diff --git a/examples/README.md b/examples/README.md index 55ece0945d..3db37a2720 100644 --- a/examples/README.md +++ b/examples/README.md @@ -15,7 +15,7 @@ This directory contains all the notebooks built as part of the ## Contributing to examples If you have improvements to suggest regarding the existing set of -notebooks, please tell us on [gitter](https://gitter.im/ioam/holoviews) +notebooks, please tell us on [gitter](https://gitter.im/pyviz/pyviz) or submit a pull request. We are happy to consider user submissions for ``/topics`` so if you have an interesting notebook using HoloViews that isn't too long and about a domain not already in ``/topics``, please @@ -23,4 +23,4 @@ consider submitting a PR. Lastly, if you find a particular notebook that does not seem to be working, please file an -[issue](https://github.com/ioam/holoviews/issues). +[issue](https://github.com/holoviz/holoviews/issues). diff --git a/examples/gallery/apps/bokeh/crossfilter.py b/examples/gallery/apps/bokeh/crossfilter.py index 66449910db..6b284cf976 100644 --- a/examples/gallery/apps/bokeh/crossfilter.py +++ b/examples/gallery/apps/bokeh/crossfilter.py @@ -35,11 +35,11 @@ size = pnw.Select(name='Size', value='None', options=['None'] + quantileable) color = pnw.Select(name='Color', value='None', options=['None'] + quantileable) -@pn.depends(x.param.value, y.param.value, color.param.value, size.param.value) +@pn.depends(x.param.value, y.param.value, color.param.value, size.param.value) def create_figure(x, y, color, size): opts = dict(cmap='rainbow', width=800, height=600, line_color='black') if color != 'None': - opts['color'] = color + opts['color'] = color if size != 'None': opts['size'] = hv.dim(size).norm()*20 return hv.Points(df, [x, y], label="%s vs %s" % (x.title(), y.title())).opts(**opts) diff --git a/examples/gallery/apps/bokeh/gapminder.py b/examples/gallery/apps/bokeh/gapminder.py index afd5115b30..4377c0bc79 100644 --- a/examples/gallery/apps/bokeh/gapminder.py +++ b/examples/gallery/apps/bokeh/gapminder.py @@ -40,8 +40,8 @@ # Define options # Combine Points and Text hvgapminder = (gapminder_ds * text).opts( - opts.Points(alpha=0.6, color='Group', cmap='Set1', line_color='black', - size=np.sqrt(dim('Population'))*0.005, + opts.Points(alpha=0.6, color='Group', cmap='Set1', line_color='black', + size=np.sqrt(dim('Population'))*0.005, tools=['hover'], title='Gapminder Demo', responsive=True, show_grid=True), opts.Text(text_font_size='52pt', text_color='lightgray') diff --git a/examples/gallery/demos/bokeh/life_expectancy_split_violin.ipynb b/examples/gallery/demos/bokeh/life_expectancy_split_violin.ipynb index c179ad5be2..2a0425b923 100644 --- a/examples/gallery/demos/bokeh/life_expectancy_split_violin.ipynb +++ b/examples/gallery/demos/bokeh/life_expectancy_split_violin.ipynb @@ -57,27 +57,14 @@ "metadata": {}, "outputs": [], "source": [ - "violin.opts(split='Region', responsive=True, min_height=500, show_legend=True, violin_width=1.5, legend_position='bottom_right', title='Life Expectancy by Year for Asian subregions', fontscale=1.5)" + "violin.opts(split='Region', xrotation=90, responsive=True, min_height=500, show_legend=True, violin_width=1.5, legend_position='bottom_right', title='Life Expectancy by Year for Asian subregions', fontscale=1.5)" ] } ], "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.5" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/examples/getting_started/1-Introduction.ipynb b/examples/getting_started/1-Introduction.ipynb index 7f3d2f7435..284feca4f0 100644 --- a/examples/getting_started/1-Introduction.ipynb +++ b/examples/getting_started/1-Introduction.ipynb @@ -210,7 +210,7 @@ "metadata": {}, "outputs": [], "source": [ - "points = hv.Points(station_info, ['lon','lat'])\n", + "points = hv.Points(station_info, ['lon','lat']).opts(color=\"red\")\n", "image + image * points" ] }, @@ -218,9 +218,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n", "On the left, we have the visual representation of the ``image`` object we declared. Using ``+`` we put it into a ``Layout`` together with a new compositional object created with the ``*`` operator called an ``Overlay``. This particular overlay displays the station positions on top of our image, which works correctly because the data in both elements exists in the same space, namely New York City.\n", "\n", + "The `.opts()` method call for specifying the visual style is part of the HoloViews options system, which is described in the next ['Getting started' section](2-Customization.ipynb).\n", + "\n", "This overlay on the right lets us see the location of all the subway stations in relation to our midnight taxi dropoffs. Of course, HoloViews allows you to visually express more of the available information with our points. For instance, you could represent the ridership of each subway by point color or point size. For more information see [Applying Customizations](../user_guide/03-Applying_Customizations.ipynb)." ] }, @@ -306,15 +307,13 @@ "\n", "composition.opts(\n", " opts.Image(xrotation=90),\n", - " opts.Points(color='deepskyblue', marker='v', size=6))" + " opts.Points(color='red', marker='v', size=6))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The `.opts()` method call for specifying the visual style is part of the HoloViews options system, which is described in the next ['Getting started' section](2-Customization.ipynb).\n", - "\n", "In the cell above we created and styled a composite object with a few short lines of code. Furthermore, this composite object relates tabular and array data and is immediately presented in a way that can be explored interactively. This way of working enables highly productive exploration, allowing new insights to be gained easily. For instance, after exploring with the slider we notice a hotspot of taxi dropoffs at 7am, which we can select as follows:" ] }, diff --git a/examples/reference/containers/bokeh/DynamicMap.ipynb b/examples/reference/containers/bokeh/DynamicMap.ipynb index 3675c06e9d..c036cdff4c 100644 --- a/examples/reference/containers/bokeh/DynamicMap.ipynb +++ b/examples/reference/containers/bokeh/DynamicMap.ipynb @@ -56,8 +56,6 @@ "metadata": {}, "outputs": [], "source": [ - "frequencies = [0.5, 0.75, 1.0, 1.25]\n", - "\n", "def sine_curve(phase, freq):\n", " xvals = [0.1* i for i in range(100)]\n", " return hv.Curve((xvals, [np.sin(phase+freq*x) for x in xvals]))\n", diff --git a/examples/reference/containers/bokeh/GridSpace.ipynb b/examples/reference/containers/bokeh/GridSpace.ipynb index e09bf9c4b5..2aec528ce4 100644 --- a/examples/reference/containers/bokeh/GridSpace.ipynb +++ b/examples/reference/containers/bokeh/GridSpace.ipynb @@ -28,7 +28,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A ``GridSpace`` is a two-dimensional dictionary of HoloViews objects presented onscreen as a grid. In one sense, due to the restriction on it's dimensionality, a ``GridSpace`` may be considered a special-case of [``HoloMap``](./HoloMap.ipynb). In another sense, ``GridSpace`` may be seen as more general as a ``GridSpace`` can hold a ``HoloMap`` but the converse is not permitted; see the [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) user guide for details on how to compose containers." + "A ``GridSpace`` is a two-dimensional dictionary of HoloViews objects presented onscreen as a grid. In one sense, due to the restriction on its dimensionality, a ``GridSpace`` may be considered a special case of [``HoloMap``](./HoloMap.ipynb). In another sense, ``GridSpace`` may be seen as more general as a ``GridSpace`` can hold a ``HoloMap`` but the converse is not permitted; see the [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) user guide for details on how to compose containers." ] }, { diff --git a/examples/reference/containers/matplotlib/GridSpace.ipynb b/examples/reference/containers/matplotlib/GridSpace.ipynb index 0a13acfc92..902781a4aa 100644 --- a/examples/reference/containers/matplotlib/GridSpace.ipynb +++ b/examples/reference/containers/matplotlib/GridSpace.ipynb @@ -28,7 +28,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A ``GridSpace`` is a two-dimensional dictionary of HoloViews objects presented onscreen as a grid. In one sense, due to the restriction on it's dimensionality, a ``GridSpace`` may be considered a special-case of [``HoloMap``](./HoloMap.ipynb). In another sense, ``GridSpace`` may be seen as more general as a ``GridSpace`` can hold a ``HoloMap`` but the converse is not permitted; see the [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) user guide for details on how to compose containers." + "A ``GridSpace`` is a two-dimensional dictionary of HoloViews objects presented onscreen as a grid. In one sense, due to the restriction on its dimensionality, a ``GridSpace`` may be considered a special case of [``HoloMap``](./HoloMap.ipynb). In another sense, ``GridSpace`` may be seen as more general as a ``GridSpace`` can hold a ``HoloMap`` but the converse is not permitted; see the [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) user guide for details on how to compose containers." ] }, { diff --git a/examples/reference/containers/plotly/GridSpace.ipynb b/examples/reference/containers/plotly/GridSpace.ipynb index 70ad4e424f..50e5071df5 100644 --- a/examples/reference/containers/plotly/GridSpace.ipynb +++ b/examples/reference/containers/plotly/GridSpace.ipynb @@ -28,7 +28,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A ``GridSpace`` is a two-dimensional dictionary of HoloViews objects presented onscreen as a grid. In one sense, due to the restriction on it's dimensionality, a ``GridSpace`` may be considered a special-case of [``HoloMap``](./HoloMap.ipynb). In another sense, ``GridSpace`` may be seen as more general as a ``GridSpace`` can hold a ``HoloMap`` but the converse is not permitted; see the [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) user guide for details on how to compose containers." + "A ``GridSpace`` is a two-dimensional dictionary of HoloViews objects presented onscreen as a grid. In one sense, due to the restriction on its dimensionality, a ``GridSpace`` may be considered a special case of [``HoloMap``](./HoloMap.ipynb). In another sense, ``GridSpace`` may be seen as more general as a ``GridSpace`` can hold a ``HoloMap`` but the converse is not permitted; see the [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) user guide for details on how to compose containers." ] }, { diff --git a/examples/reference/elements/bokeh/Image.ipynb b/examples/reference/elements/bokeh/Image.ipynb index 8550f73d6e..b9127c279a 100644 --- a/examples/reference/elements/bokeh/Image.ipynb +++ b/examples/reference/elements/bokeh/Image.ipynb @@ -131,7 +131,7 @@ "source": [ "The constructor of ``Image`` attempts to validate the input data by ensuring it is regularly sampled. In some cases, your data may be not be regularly sampled to a sufficiently high precision in which case you qill see an exception recommending the use of [``QuadMesh``](./QuadMesh.ipynb) instead. If you see this message and are sure that the ``Image`` element is appropriate, you can set the ``rtol`` value in the constructor to allow a higher deviation in sample spacing than the default of ``10e-6``. Alternatively, you can set this globally using ``hv.config.image_rtol`` as described in the [Installing and Configuring](../../../user_guide/Installing_and_Configuring.ipynb) user guide.\n", "\n", - "One additional way to create Image objects is via the separate [ImaGen](http://ioam.github.io/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", + "One additional way to create Image objects is via the separate [ImaGen](https://github.com/pyviz-topics/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Image).``" ] diff --git a/examples/reference/elements/bokeh/Points.ipynb b/examples/reference/elements/bokeh/Points.ipynb index ad291664c5..70b3e5bced 100644 --- a/examples/reference/elements/bokeh/Points.ipynb +++ b/examples/reference/elements/bokeh/Points.ipynb @@ -53,7 +53,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Here the random x values and random y values are both considered to be the 'data' with no dependency between them (compare this to how [``Scatter``](./Scatter.ipynb) elements are defined). You can think of ``Points`` as simply marking positions in some two-dimensional space that can be sliced by specifying a 2D region-of-interest:" + "Here the random ``x`` values and random ``y`` values are *both* considered to be the coordinates, with no dependency between them (compare this to the different way that [``Scatter``](./Scatter.ipynb) elements are defined). You can think of ``Points`` as simply marking positions in some two-dimensional space. Such positions can be sliced by specifying a 2D region of interest:" ] }, { @@ -70,7 +70,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Although the simplest ``Points`` element simply marks positions in a two-dimensional space without any associated value, other value dimensions (``vdims``) are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" + "Although the simplest ``Points`` element simply marks positions in a two-dimensional space without any associated value, value dimensions (``vdims``) are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims``s ``z`` and ``size`` (visualized as the color and size of the dots, respectively):" ] }, { @@ -81,10 +81,10 @@ "source": [ "np.random.seed(10)\n", "data = np.random.rand(100,4)\n", - "popts = opts.Points(color='z', size=dim('size')*20, cmap=\"inferno_r\")\n", + "popts = opts.Points(color='z', size=dim('size')*20)\n", "\n", "points = hv.Points(data, vdims=['z', 'size'])\n", - "(points + points[0.3:0.7, 0.3:0.5].hist('z')).opts(popts)" + "(points + points[0.3:0.7, 0.3:0.7].hist()).opts(popts)" ] }, { @@ -127,7 +127,7 @@ "metadata": {}, "outputs": [], "source": [ - "copts = opts.Points(color='z', size=dim('size')*20, cmap=\"inferno_r\", width=250, height=250)\n", + "copts = opts.Points(color='z', size=dim('size')*20, width=250, height=250)\n", "\n", "xs = [0.44183317, 0.37764192, 0.30021061, 0.54346504, 0.38412185]\n", "ys = [0.43401399, 0.42874733, 0.39644188, 0.33775465, 0.39611779]\n", @@ -147,10 +147,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different. The fundamental difference is that [Scatter](./Scatter.ipynb) is used to visualize data where the *y* variable is *dependent*. This semantic difference also explains why the histogram generated by ``hist`` call above visualizes the distribution of a different dimension than it does for [``Scatter``](./Scatter.ipynb).\n", - "\n", - "This difference means that ``Points`` naturally combine elements that express independent variables in two-dimensional space, for instance [``Raster``](./Raster.ipynb) types such as [``Image``](./Image.ipynb). Similarly, ``Scatter`` expresses a dependent relationship in two-dimensions and combine naturally with ``Chart`` types such as [``Curve``](./Curve.ipynb).\n", + "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different. The fundamental difference is that [Scatter](./Scatter.ipynb) is used to visualize data where the *y* variable is *dependent*, unlike ``Points``. This semantic difference also explains why the histogram generated by the ``hist`` call above visualizes the distribution of a different dimension than it does for [``Scatter``](./Scatter.ipynb) (because here *z*, not *y*, is the first ``vdim``).\n", "\n", + "This difference means that ``Points`` elements can most naturally overlay with other elements that express independent variables in two-dimensional space, such as [``Raster``](./Raster.ipynb) types like [``Image``](./Image.ipynb). Conversely, ``Scatter`` expresses a dependent relationship between *x* and *y* and thus most naturally overlay with ``Chart`` types such as [``Curve``](./Curve.ipynb).\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Points).``" ] diff --git a/examples/reference/elements/bokeh/RGB.ipynb b/examples/reference/elements/bokeh/RGB.ipynb index af375cfb01..f074eff1a4 100644 --- a/examples/reference/elements/bokeh/RGB.ipynb +++ b/examples/reference/elements/bokeh/RGB.ipynb @@ -140,7 +140,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "One additional way to create RGB objects is via the separate [ImaGen](http://ioam.github.io/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", + "One additional way to create RGB objects is via the separate [ImaGen](https://github.com/pyviz-topics/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.RGB).``" ] diff --git a/examples/reference/elements/bokeh/Scatter.ipynb b/examples/reference/elements/bokeh/Scatter.ipynb index 200b675241..3e3c9042ab 100644 --- a/examples/reference/elements/bokeh/Scatter.ipynb +++ b/examples/reference/elements/bokeh/Scatter.ipynb @@ -52,7 +52,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Here the random *y* values are considered to be the 'data' whereas the x positions express where those values are located (compare this to how [``Points``](./Points.ipynb) elements are defined). In this sense, ``Scatter`` can be thought of as a [``Curve``](./Curve.ipynb) without any lines connecting the samples and you can use slicing to view the *y* values corresponding to a chosen *x* range:" + "Here the random *y* values are considered to be the 'data' whereas the *x* positions express where those data values were measured (compare this to the different way that [``Points``](./Points.ipynb) elements are defined). In this sense, ``Scatter`` is equivalent to a [``Curve``](./Curve.ipynb) without any lines connecting the samples, and you can use slicing to view the *y* values corresponding to a chosen *x* range:" ] }, { @@ -68,7 +68,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A ``Scatter`` element must always have at least one value dimension but that doesn't mean additional value dimensions aren't supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" + "A ``Scatter`` element must always have at least one value dimension (to give it a *y* location), but additional value dimensions are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" ] }, { @@ -82,7 +82,7 @@ "\n", "scatter = hv.Scatter(data, vdims=['y', 'z', 'size'])\n", "scatter = scatter.opts(color='z', size=dim('size')*20)\n", - "scatter + scatter[0.3:0.7, 0.3:0.7].hist('z')" + "scatter + scatter[0.3:0.7, 0.3:0.7].hist()" ] }, { @@ -103,9 +103,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different: ``Points`` are used to visualize data where the *y* variable is *dependent*. This semantic difference also explains why the histogram generated by ``hist`` call above visualizes the distribution of a different dimension than it does for [``Points``](./Points.ipynb).\n", + "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different: Unlike ``Scatter``, ``Points`` are used to visualize data where the *y* variable is *independent*. This semantic difference also explains why the histogram generated by the ``hist`` call above visualizes the distribution of a different dimension than it does for [``Points``](./Points.ipynb) (because here *y*, not *z*, is the first ``vdim``).\n", "\n", - "This difference means that ``Scatter`` naturally combine elements that express dependent variables in two-dimensional space such as the ``Chart`` types, such as [``Curve``](./Curve.ipynb). Similarly, ``Points`` express a independent relationship in two-dimensions and combine naturally with [``Raster``](./Raster.ipynb) types such as [``Image``](./Image.ipynb).\n", + "This difference means that ``Scatter`` elements can most naturally overlay with other elements that express dependent relationships between the *x* and *y* axes in two-dimensional space, such as the ``Chart`` types like [``Curve``](./Curve.ipynb). Conversely, ``Points`` elements either capture *(x,y)* spatial locations or they express a dependent relationship between an *(x,y)* location and some other dimension (expressed as point size, color, etc.), and thus they can most naturally overlay with [``Raster``](./Raster.ipynb) types like [``Image``](./Image.ipynb).\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Scatter).``" ] diff --git a/examples/reference/elements/matplotlib/Image.ipynb b/examples/reference/elements/matplotlib/Image.ipynb index be72cbd4bd..51a40446ea 100644 --- a/examples/reference/elements/matplotlib/Image.ipynb +++ b/examples/reference/elements/matplotlib/Image.ipynb @@ -116,7 +116,7 @@ "The constructor of ``Image`` attempts to validate the input data by ensuring it is regularly sampled. In some cases, your data may be not be regularly sampled to a sufficiently high precision in which case you qill see an exception recommending the use of [``QuadMesh``](./QuadMesh.ipynb) instead. If you see this message and are sure that the ``Image`` element is appropriate, you can set the ``rtol`` value in the constructor to allow a higher deviation in sample spacing than the default of ``10e-6``. Alternatively, you can set this globally using ``hv.config.image_rtol`` as described in the [Installing and Configuring](../../../user_guide/Installing_and_Configuring.ipynb) user guide.\n", "\n", "\n", - "One additional way to create Image objects is via the separate [ImaGen](http://ioam.github.io/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", + "One additional way to create Image objects is via the separate [ImaGen](https://github.com/pyviz-topics/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Image).``" ] diff --git a/examples/reference/elements/matplotlib/Points.ipynb b/examples/reference/elements/matplotlib/Points.ipynb index e52cdd80aa..16f9cb715d 100644 --- a/examples/reference/elements/matplotlib/Points.ipynb +++ b/examples/reference/elements/matplotlib/Points.ipynb @@ -53,7 +53,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Here both the random ``x`` values and random ``y`` values are *both* considered to be the coordinates with no dependency between them (compare this to how [``Scatter``](./Scatter.ipynb) elements are defined). You can think of ``Points`` as simply marking positions in some two-dimensional space that can be sliced by specifying a 2D region-of-interest:" + "Here the random ``x`` values and random ``y`` values are *both* considered to be the coordinates, with no dependency between them (compare this to the different way that [``Scatter``](./Scatter.ipynb) elements are defined). You can think of ``Points`` as simply marking positions in some two-dimensional space. Such positions can be sliced by specifying a 2D region of interest:" ] }, { @@ -69,7 +69,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Although the simplest ``Points`` element simply mark positions in a two-dimensional space without any associated value this doesn't mean value dimensions aren't supported. Here is an example with two additional quantities for each point, declared as the ``vdims``s ``z`` and ``size`` visualized as the color and size of the dots, respectively:" + "Although the simplest ``Points`` element simply marks positions in a two-dimensional space without any associated value, value dimensions (``vdims``) are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims``s ``z`` and ``size`` (visualized as the color and size of the dots, respectively):" ] }, { @@ -100,9 +100,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different. The fundamental difference is that [Scatter](./Scatter.ipynb) is used to visualize data where the *y* variable is *dependent*. This semantic difference also explains why the histogram generated by ``hist`` call above visualizes the distribution of a different dimension than it does for [``Scatter``](./Scatter.ipynb).\n", + "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different. The fundamental difference is that [Scatter](./Scatter.ipynb) is used to visualize data where the *y* variable is *dependent*, unlike ``Points``. This semantic difference also explains why the histogram generated by the ``hist`` call above visualizes the distribution of a different dimension than it does for [``Scatter``](./Scatter.ipynb) (because here *z*, not *y*, is the first ``vdim``).\n", "\n", - "This difference means that ``Points`` naturally combine elements that express independent variables in two-dimensional space, for instance [``Raster``](./Raster.ipynb) types such as [``Image``](./Image.ipynb). Similarly, ``Scatter`` expresses a dependent relationship in two-dimensions and combine naturally with ``Chart`` types such as [``Curve``](./Curve.ipynb).\n", + "This difference means that ``Points`` elements can most naturally overlay with other elements that express independent variables in two-dimensional space, such as [``Raster``](./Raster.ipynb) types like [``Image``](./Image.ipynb). Conversely, ``Scatter`` expresses a dependent relationship between *x* and *y* and thus most naturally overlays with ``Chart`` types such as [``Curve``](./Curve.ipynb).\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Points).``" ] diff --git a/examples/reference/elements/matplotlib/RGB.ipynb b/examples/reference/elements/matplotlib/RGB.ipynb index 5c51d9c1b9..2094b18a6e 100644 --- a/examples/reference/elements/matplotlib/RGB.ipynb +++ b/examples/reference/elements/matplotlib/RGB.ipynb @@ -127,7 +127,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "One additional way to create RGB objects is via the separate [ImaGen](http://ioam.github.io/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", + "One additional way to create RGB objects is via the separate [ImaGen](https://github.com/pyviz-topics/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.RGB).``" ] diff --git a/examples/reference/elements/matplotlib/Scatter.ipynb b/examples/reference/elements/matplotlib/Scatter.ipynb index 59ef5a97ae..4a1f99e489 100644 --- a/examples/reference/elements/matplotlib/Scatter.ipynb +++ b/examples/reference/elements/matplotlib/Scatter.ipynb @@ -53,7 +53,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Here the random *y* values are considered to be the 'data' whereas the x positions express where those values are located (compare this to how [``Points``](./Points.ipynb) elements are defined). In this sense, ``Scatter`` can be thought of as a [``Curve``](./Curve.ipynb) without any lines connecting the samples and you can use slicing to view the *y* values corresponding to a chosen *x* range:" + "Here the random *y* values are considered to be the 'data' whereas the *x* positions express where those data values were measured (compare this to the different way that [``Points``](./Points.ipynb) elements are defined). In this sense, ``Scatter`` is equivalent to a [``Curve``](./Curve.ipynb) without any lines connecting the samples, and you can use slicing to view the *y* values corresponding to a chosen *x* range:" ] }, { @@ -69,7 +69,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A ``Scatter`` element must always have at least one value dimension but that doesn't mean additional value dimensions aren't supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" + "A ``Scatter`` element must always have at least one value dimension (to give it a *y* location), but additional value dimensions are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" ] }, { @@ -82,8 +82,8 @@ "data = np.random.rand(100,4)\n", "\n", "scatter = hv.Scatter(data, vdims=['y', 'z', 'size'])\n", - "scatter = scatter.opts(color='z', s=dim('size')*20)\n", - "scatter + scatter[0.3:0.7, 0.3:0.7].hist('z')" + "scatter = scatter.opts(color='z', s=dim('size')*100)\n", + "scatter + scatter[0.3:0.7, 0.3:0.7].hist()" ] }, { @@ -94,9 +94,9 @@ "\n", "The marker shape specified above can be any supported by [matplotlib](http://matplotlib.org/api/markers_api.html), e.g. ``s``, ``d``, or ``o``; the other options select the color and size of the marker.\n", "\n", - "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different: ``Points`` are used to visualize data where the *y* variable is *dependent*. This semantic difference also explains why the histogram generated by ``hist`` call above visualizes the distribution of a different dimension than it does for [``Points``](./Points.ipynb).\n", + "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different: Unlike ``Scatter``, ``Points`` are used to visualize data where the *y* variable is *independent*. This semantic difference also explains why the histogram generated by the ``hist`` call above visualizes the distribution of a different dimension than it does for [``Points``](./Points.ipynb) (because here *y*, not *z*, is the first ``vdim``).\n", "\n", - "This difference means that ``Scatter`` naturally combine elements that express dependent variables in two-dimensional space such as the ``Chart`` types, such as [``Curve``](./Curve.ipynb). Similarly, ``Points`` express a independent relationship in two-dimensions and combine naturally with [``Raster``](./Raster.ipynb) types such as [``Image``](./Image.ipynb).\n", + "This difference means that ``Scatter`` elements most naturally overlay with other elements that express dependent relationships between the x and y axes in two-dimensional space, such as the ``Chart`` types like [``Curve``](./Curve.ipynb). Conversely, ``Points`` elements either capture (x,y) spatial locations or they express a dependent relationship between an (x,y) location and some other dimension (expressed as point size, color, etc.), and thus they most naturally overlay with [``Raster``](./Raster.ipynb) types like [``Image``](./Image.ipynb).\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Scatter).``" ] diff --git a/examples/reference/elements/plotly/Points.ipynb b/examples/reference/elements/plotly/Points.ipynb index 9cd837a39f..9eed7e9540 100644 --- a/examples/reference/elements/plotly/Points.ipynb +++ b/examples/reference/elements/plotly/Points.ipynb @@ -50,7 +50,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Here both the random ``x`` values and random ``y`` values are *both* considered to be the 'data' with no dependency between them (compare this to how [``Scatter``](./Scatter.ipynb) elements are defined). You can think of ``Points`` as simply marking positions in some two-dimensional space that can be sliced by specifying a 2D region-of-interest:" + "Here the random ``x`` values and random ``y`` values are *both* considered to be the coordinates, with no dependency between them (compare this to the different way that [``Scatter``](./Scatter.ipynb) elements are defined). You can think of ``Points`` as simply marking positions in some two-dimensional space. Such positions can be sliced by specifying a 2D region of interest:" ] }, { @@ -66,7 +66,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Although the simplest ``Points`` element simply mark positions in a two-dimensional space without any associated value this doesn't mean value dimensions aren't supported. Here is an example with two additional quantities for each point, declared as the ``value_dimension``s *z* and α visualized as the color and size of the dots, respectively:" + "Although the simplest ``Points`` element simply marks positions in a two-dimensional space without any associated value, value dimensions (``vdims``) are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims``s ``z`` and ``size`` (visualized as the color and size of the dots, respectively):" ] }, { @@ -79,7 +79,7 @@ "data = np.random.rand(100,4)\n", "\n", "points = hv.Points(data, vdims=['z', 'size'])\n", - "(points + points[0.3:0.7, 0.3:0.7]).opts(\n", + "(points + points[0.3:0.7, 0.3:0.7].hist()).opts(\n", " opts.Points(color='z', size=dim('size')*20))" ] }, @@ -94,9 +94,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different. The fundamental difference is that [Scatter](./Scatter.ipynb) is used to visualize data where the *y* variable is *dependent*. This semantic difference also explains why the histogram generated by ``hist`` call above visualizes the distribution of a different dimension than it does for [``Scatter``](./Scatter.ipynb).\n", + "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different. The fundamental difference is that [Scatter](./Scatter.ipynb) is used to visualize data where the *y* variable is *dependent*, unlike ``Points``. This semantic difference also explains why the histogram generated by the ``hist`` call above visualizes the distribution of a different dimension than it does for [``Scatter``](./Scatter.ipynb) (because here *z*, not *y*, is the first ``vdim``).\n", "\n", - "This difference means that ``Points`` naturally combine elements that express independent variables in two-dimensional space, for instance [``Raster``](./Raster.ipynb) types such as [``Image``](./Image.ipynb). Similarly, ``Scatter`` expresses a dependent relationship in two-dimensions and combine naturally with ``Chart`` types such as [``Curve``](./Curve.ipynb).\n", + "This difference means that ``Points`` elements can most naturally overlay with other elements that express independent variables in two-dimensional space, such as [``Raster``](./Raster.ipynb) types like [``Image``](./Image.ipynb). Conversely, ``Scatter`` expresses a dependent relationship between *x* and *y* and thus most naturally overlays with ``Chart`` types such as [``Curve``](./Curve.ipynb).\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Points).``" ] diff --git a/examples/reference/elements/plotly/RGB.ipynb b/examples/reference/elements/plotly/RGB.ipynb index e2abd85510..a0f2ea6bff 100644 --- a/examples/reference/elements/plotly/RGB.ipynb +++ b/examples/reference/elements/plotly/RGB.ipynb @@ -140,7 +140,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "One additional way to create RGB objects is via the separate [ImaGen](http://ioam.github.io/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", + "One additional way to create RGB objects is via the separate [ImaGen](https://github.com/pyviz-topics/imagen) library, which creates parameterized streams of images for experiments, simulations, or machine-learning applications.\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.RGB).``" ] diff --git a/examples/reference/elements/plotly/Scatter.ipynb b/examples/reference/elements/plotly/Scatter.ipynb index 13f716004a..d5ae5407f9 100644 --- a/examples/reference/elements/plotly/Scatter.ipynb +++ b/examples/reference/elements/plotly/Scatter.ipynb @@ -49,7 +49,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Here the random ``y`` values are considered to be the 'data' whereas the ``x`` positions express where those values are located (compare this to how [``Points``](./Points.ipynb) elements are defined). In this sense, ``Scatter`` can be thought of as a [``Curve``](./Curve.ipynb) without any lines connecting the samples and you can use slicing to view the ```` values corresponding to a chosen ``x`` range:" + "Here the random *y* values are considered to be the 'data' whereas the *x* positions express where those data values were measured (compare this to the different way that [``Points``](./Points.ipynb) elements are defined). In this sense, ``Scatter`` is equivalent to a [``Curve``](./Curve.ipynb) without any lines connecting the samples, and you can use slicing to view the *y* values corresponding to a chosen *x* range:" ] }, { @@ -65,7 +65,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A ``Scatter`` element must always have at least one value dimension but that doesn't mean additional value dimensions aren't supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" + "A ``Scatter`` element must always have at least one value dimension (to give it a *y* location), but additional value dimensions are also supported. Here is an example with two additional quantities for each point, declared as the ``vdims`` ``'z'`` and ``'size'`` visualized as the color and size of the dots, respectively:" ] }, { @@ -78,7 +78,7 @@ "data = np.random.rand(100,4)\n", "\n", "scatter = hv.Scatter(data, vdims=['y', 'z', 'size'])\n", - "(scatter + scatter[0.3:0.7, 0.3:0.7]).opts(\n", + "(scatter + scatter[0.3:0.7, 0.3:0.7].hist()).opts(\n", " opts.Scatter(color='z', size=dim('size')*10))" ] }, @@ -93,9 +93,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different: ``Points`` are used to visualize data where the *y* variable is *dependent*. This semantic difference also explains why the histogram generated by ``hist`` call above visualizes the distribution of a different dimension than it does for [``Points``](./Points.ipynb).\n", + "**Note**: Although the ``Scatter`` element is superficially similar to the [``Points``](./Points.ipynb) element (they can generate plots that look identical), the two element types are semantically quite different: Unlike ``Scatter``, ``Points`` are used to visualize data where the *y* variable is *independent*. This semantic difference also explains why the histogram generated by the ``hist`` call above visualizes the distribution of a different dimension than it does for [``Points``](./Points.ipynb) (because here *y*, not *z*, is the first ``vdim``).\n", "\n", - "This difference means that ``Scatter`` naturally combine elements that express dependent variables in two-dimensional space such as the ``Chart`` types, such as [``Curve``](./Curve.ipynb). Similarly, ``Points`` express a independent relationship in two-dimensions and combine naturally with [``Raster``](./Raster.ipynb) types such as [``Image``](./Image.ipynb).\n", + "This difference means that ``Scatter`` elements can most naturally overlay with other elements that express dependent relationships between the *x* and *y* axes in two-dimensional space, such as the ``Chart`` types like [``Curve``](./Curve.ipynb). Conversely, ``Points`` elements either capture *(x,y)* spatial locations or they express a dependent relationship between an *(x,y)* location and some other dimension (expressed as point size, color, etc.), and thus they can most naturally overlay with [``Raster``](./Raster.ipynb) types like [``Image``](./Image.ipynb).\n", "\n", "For full documentation and the available style and plot options, use ``hv.help(hv.Scatter).``" ] diff --git a/examples/reference/features/bokeh/table_hooks_example.ipynb b/examples/reference/features/bokeh/table_hooks_example.ipynb index 86981f9d6e..d925f114ba 100644 --- a/examples/reference/features/bokeh/table_hooks_example.ipynb +++ b/examples/reference/features/bokeh/table_hooks_example.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "name = ['homepage', 'github', 'chat']\n", - "link = ['http://holoviews.org', 'https://github.com/ioam/holoviews', 'https://gitter.im/ioam/holoviews']\n", + "link = ['http://holoviews.org', 'https://github.com/holoviz/holoviews', 'https://gitter.im/pyviz/pyviz']\n", "table = hv.Table({'Name':name, 'Link':link}, kdims=[], vdims=['Name', 'Link'])" ] }, diff --git a/examples/reference/streams/bokeh/Selection1D_tap.ipynb b/examples/reference/streams/bokeh/Selection1D_tap.ipynb index 6eea8f0ea3..9632d1759e 100644 --- a/examples/reference/streams/bokeh/Selection1D_tap.ipynb +++ b/examples/reference/streams/bokeh/Selection1D_tap.ipynb @@ -46,7 +46,7 @@ "data = [('Week %d' % (i%10), np.random.rand(), chr(65+np.random.randint(5)), i) for i in range(100)]\n", "sample_data = hv.NdOverlay({i: hv.Points(gen_samples(np.random.randint(1000, 5000), r2))\n", " for _, r2, _, i in data})\n", - "points = hv.Scatter(data, ['Date', 'r2'], ['block', 'id']).redim.range(r2=(0., 1))\n", + "points = hv.Scatter(data, 'Date', ['r2', 'block', 'id']).redim.range(r2=(0., 1))\n", "stream = Selection1D(source=points)\n", "empty = (hv.Points(np.random.rand(0, 2)) * hv.Slope(0, 0)).relabel('No selection')\n", "\n", diff --git a/examples/reference/streams/bokeh/Tap.ipynb b/examples/reference/streams/bokeh/Tap.ipynb index f50544964f..542afe6cd9 100644 --- a/examples/reference/streams/bokeh/Tap.ipynb +++ b/examples/reference/streams/bokeh/Tap.ipynb @@ -20,6 +20,7 @@ "outputs": [], "source": [ "import pandas as pd\n", + "import panel as pn\n", "import numpy as np\n", "import holoviews as hv\n", "from holoviews import opts\n", @@ -27,6 +28,78 @@ "hv.extension('bokeh', width=90)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, let's look at an extremely simple example. \n", + "\n", + "We will create an empty `hv.Points` element and set it as the source for the `Tap` stream. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create an empty Points element\n", + "points = hv.Points([])\n", + "# Create the Tap stream with the points element as the source\n", + "# We set the x and y here with starting values\n", + "stream = hv.streams.Tap(source=points, x=np.nan, y=np.nan)\n", + "\n", + "# Create a callback for a dynamic map\n", + "def location(x, y):\n", + " \"\"\"Create an empty plot with a changing label\"\"\"\n", + " return hv.Points([], label='x: %0.3f, y: %0.3f' % (x, y))\n", + "\n", + "\n", + "# Connect the Tap stream to the tap_histogram callback\n", + "tap_dmap = hv.DynamicMap(location, streams=[stream])\n", + "\n", + "# Overlay the Points element (which is linked to the tap stream) with the location plot\n", + "points * tap_dmap" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's see what it looks like if we used Panel to give us more control over layout and event triggering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# create an empty Points element\n", + "points = hv.Points([])\n", + "# Create the Tap stream with the points element as the source\n", + "# We set the x and y here with starting values\n", + "stream = hv.streams.Tap(source=points, x=np.nan, y=np.nan)\n", + "\n", + "# make a function that displays the location when called.\n", + "def location(x, y):\n", + " \"\"\"Display pane showing the x and y values\"\"\"\n", + " return pn.pane.Str('Click at %0.3f, %0.3f' % (x, y), width=200)\n", + "\n", + "# Display the points and the function output, updated\n", + "# whenever the stream values change\n", + "layout = pn.Row(points, pn.bind(location, x=stream.param.x, y=stream.param.y))\n", + "# display the container\n", + "layout" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we will now look at a more complex example." + ] + }, { "cell_type": "code", "execution_count": null, @@ -39,7 +112,7 @@ "\n", "# Declare HeatMap\n", "heatmap = hv.HeatMap(dataset.aggregate(['Year', 'State'], np.mean),\n", - " label='Measles Incidence').select(Year=(1928, 2002))\n", + " label='Average Weekly Measles Incidence').select(Year=(1928, 2002))\n", "\n", "# Declare Tap stream with heatmap as source and initial values\n", "posxy = hv.streams.Tap(source=heatmap, x=1951, y='New York')\n", @@ -47,15 +120,24 @@ "# Define function to compute histogram based on tap location\n", "def tap_histogram(x, y):\n", " return hv.Curve(dataset.select(State=y, Year=int(x)), kdims='Week',\n", - " label='Year: %s, State: %s' % (x, y))\n", + " label=f'Year: {x}, State: {y}')\n", "\n", + "# Connect the Tap stream to the tap_histogram callback\n", "tap_dmap = hv.DynamicMap(tap_histogram, streams=[posxy])\n", "\n", + "# Get the range of the aggregated data we're using for plotting\n", + "cmin, cmax = dataset.aggregate(['Year', 'State'], np.mean).range(dim='measles')\n", + "# Adjust the min value since log color mapper lower bound must be >0.0\n", + "cmin += 0.0000001\n", + "\n", + "# Display the Heatmap and Curve side by side\n", "(heatmap + tap_dmap).opts(\n", " opts.Curve(framewise=True, height=500, line_color='black', width=375, yaxis='right'),\n", - " opts.HeatMap(cmap='RdBu_r', fontsize={'xticks': '6pt'}, height=500,\n", - " logz=True, tools=['hover'], width=700, xrotation=90)\n", - ")" + " opts.HeatMap(clim=(cmin, cmax), cmap='RdBu_r', \n", + " fontsize={'xticks': '6pt'}, height=500, logz=True,\n", + " tools=['hover'], width=700, xrotation=90,\n", + " )\n", + ")\n" ] }, { diff --git a/examples/user_guide/04-Style_Mapping.ipynb b/examples/user_guide/04-Style_Mapping.ipynb index b4280c0cdc..c3594edeac 100644 --- a/examples/user_guide/04-Style_Mapping.ipynb +++ b/examples/user_guide/04-Style_Mapping.ipynb @@ -386,7 +386,7 @@ "source": [ "### Explicit color mapping\n", "\n", - "Some elements work through implicit colormapping the prime example being the ``Image`` type, however other elements can be colormapped using style mapping instead, by setting the color to an existing dimension." + "Some elements work through implicit colormapping, the prime example being the ``Image`` type. However, other elements can be colormapped using style mapping instead, by setting the color to an existing dimension." ] }, { @@ -521,11 +521,66 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "By default (left plot above), the min and max values in the array map to the first color (white) and last color (dark blue) in the colormap, and NaNs are ``'transparent'`` (an RGBA tuple of (0, 0, 0, 0)), revealing the underlying plot background. When the specified `clipping_colors` are supplied (middle plot above), NaN values are now colored gray, but the plot is otherwise the same because the autoranging still ensures that no value is mapped outside the available color range. Finally, when the `z` range is reduced (right plot above), the color range is mapped from a different range of numerical `z` values, and some values now fall outside the range and are thus clipped to red or green as specified.\n", - " \n", - "#### Other options\n", + "By default (left plot above), the min and max values in the array map to the first color (white) and last color (dark blue) in the colormap, and NaNs are ``'transparent'`` (an RGBA tuple of (0, 0, 0, 0)), revealing the underlying plot background. When the specified `clipping_colors` are supplied (middle plot above), NaN values are now colored gray, but the plot is otherwise the same because the autoranging still ensures that no value is mapped outside the available color range. Finally, when the `z` range is reduced (right plot above), the color range is mapped from a different range of numerical `z` values, and some values now fall outside the range and are thus clipped to red or green as specified.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Normalization modes\n", + "\n", + "When using a colormap, there are three available color normalization or `cnorm` options to determine how numerical values are mapped to the range of colors in the colorbar:\n", + "\n", + "* `linear`: Simple linear mapping (used by default)\n", + "* `log`: Logarithmic mapping\n", + "* `eq_hist`: Histogram-equalized mapping\n", + "\n", + "The following cell defines an `Image` containing random samples drawn from a normal distribution (mean of 3) with a square of constant value 100 in the middle, shown with the three `cnorm` modes:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "np.random.seed(42)\n", + "data = np.random.normal(loc=3, scale=0.3, size=(100,100))\n", + "print(\"Mean value of random samples is {mean:.3f}, \".format(mean=np.mean(data))\n", + " + \"which is much lower\\nthan the black square in the center (value 100).\")\n", + "data[45:55,45:55] = 100\n", + "\n", + "imopts=dict(colorbar=True, xaxis='bare', yaxis='bare', height=160, width=200)\n", + "pattern = hv.Image(data)\n", + "\n", + "( pattern.options(cnorm='linear', title='linear', **imopts) \n", + " + pattern.options(cnorm='log', title='log', **imopts)\n", + " + pattern.options(cnorm='eq_hist', title='eq_hist', **imopts))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `'linear'` mode is very easy to interpret numerically, with colors mapped to numerical values linearly as indicated. However, as you can see in this case, high-value outliers like the square here can make it difficult to see any structure in the remaining values. The Gaussian noise values all map to the first few colors at the bottom of the colormap, resulting in a background that is almost uniformly yellow even though we know the data includes a variety of different values in the background area.\n", + "\n", + "In the `'log'` mode, the random values are a little easier to see but these samples still use a small portion of the colormap. Logarithmic colormaps are most useful when you know that you are plotting data with an approximately logarithmic distribution.\n", + "\n", + "In the `'eq_hist'` mode, colors are nonlinearly mapped according to the actual distribution of values in the plot, such that each color in the colormap represents an approximately equal number of values in the plot (here with few or no colors reserved for the nearly empty range between 10 and 100). In this mode both the outliers and the overall low-amplitude noise can be seen clearly, but the non-linear distortion can make the colors more difficult to interpret as numerical values.\n", + "\n", + "When working with unknown data distributions, it is often a good idea to try all three of these modes, using `eq_hist` to be sure that you are seeing all of the patterns in the data, then either `log` or `linear` (depending on which one is a better match to your distribution) with the values clipped to the range of values you want to show." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Other colormapping options\n", "\n", - "* ``logz``: Enable logarithmic color scale (e.g. ``logz=True``)\n", + "* ``clim_percentile``: Percentile value to compute colorscale robust to outliers. If `True`, uses 2nd and 98th percentile; otherwise uses the specified percentile value. \n", + "* ``cnorm``: Color normalization to be applied during colormapping. Allows switching between 'linear', 'log', and 'eq_hist'.\n", + "* ``logz``: Enable logarithmic color scale (same as `cnorm='log'`; to be deprecated at some point)\n", "* ``symmetric``: Ensures that the color scale is centered on zero (e.g. ``symmetric=True``)" ] }, diff --git a/examples/user_guide/09-Gridded_Datasets.ipynb b/examples/user_guide/09-Gridded_Datasets.ipynb index 959c143983..ecc7540162 100644 --- a/examples/user_guide/09-Gridded_Datasets.ipynb +++ b/examples/user_guide/09-Gridded_Datasets.ipynb @@ -474,8 +474,8 @@ "source": [ "\n", "\n", - "Additional examples of visualizing xarrays in the context of geographical data can be found in the GeoViews documentation: [Gridded Datasets I](http://geo.holoviews.org/Gridded_Datasets_I.html) and\n", - "[Gridded Datasets II](http://geo.holoviews.org/Gridded_Datasets_II.html). These guides also contain useful information on the interaction between xarray data structures and HoloViews Datasets in general." + "Additional examples of visualizing xarrays in the context of geographical data can be found in the GeoViews documentation: [Gridded Datasets I](http://geoviews.org/user_guide/Gridded_Datasets_I.html) and\n", + "[Gridded Datasets II](http://geoviews.org/user_guide/Gridded_Datasets_II.html). These guides also contain useful information on the interaction between xarray data structures and HoloViews Datasets in general." ] }, { diff --git a/examples/user_guide/11-Transforming_Elements.ipynb b/examples/user_guide/11-Transforming_Elements.ipynb index 21afee16df..1a8ced63a6 100644 --- a/examples/user_guide/11-Transforming_Elements.ipynb +++ b/examples/user_guide/11-Transforming_Elements.ipynb @@ -227,7 +227,7 @@ "source": [ "## Operations are parameterized\n", "\n", - "In cases a simple transform is not sufficient or you want to encapsulate some transformation in a more rigorous way an `Operation` allows encapsulating the parameters of a transform on a function-like object. Operations in HoloViews are subclasses of ``Operation``, which transform one Element or ``Overlay`` of Elements by returning a new Element that may be a transformation of the original. All operations are parameterized using the [param](https://ioam.github.io/param/) library which allows easy validation and documentation of the operation arguments. In particular, operations are instances of ``param.ParameterizedFunction`` which allows operations to be used in the same way as normal python functions.\n", + "In cases a simple transform is not sufficient or you want to encapsulate some transformation in a more rigorous way an `Operation` allows encapsulating the parameters of a transform on a function-like object. Operations in HoloViews are subclasses of ``Operation``, which transform one Element or ``Overlay`` of Elements by returning a new Element that may be a transformation of the original. All operations are parameterized using the [param](https://github.com/holoviz/param) library which allows easy validation and documentation of the operation arguments. In particular, operations are instances of ``param.ParameterizedFunction`` which allows operations to be used in the same way as normal python functions.\n", "\n", "This approach has several advantages, one of which is that we can manipulate the parameters of operations at several different levels: at the class-level, at the instance-level or when it is called. Another advantage is that using parameterizing operations allows them to be inspected just like any other HoloViews object using ``hv.help``. We will now do this for the ``histogram`` operation:" ] diff --git a/examples/user_guide/12-Responding_to_Events.ipynb b/examples/user_guide/12-Responding_to_Events.ipynb index 0ecd24a064..8bcac2aea5 100644 --- a/examples/user_guide/12-Responding_to_Events.ipynb +++ b/examples/user_guide/12-Responding_to_Events.ipynb @@ -107,7 +107,7 @@ "source": [ "The core concept behind a stream is simple: it defines one or more parameters that can change over time that automatically refreshes code depending on those parameter values. \n", "\n", - "Like all objects in HoloViews, these parameters are declared using [param](https://ioam.github.io/param) and streams are defined as a parameterized subclass of the ``holoviews.streams.Stream``. A more convenient way is to use the ``Stream.define`` classmethod:" + "Like all objects in HoloViews, these parameters are declared using [param](https://param.holoviz.org/) and streams are defined as a parameterized subclass of the ``holoviews.streams.Stream``. A more convenient way is to use the ``Stream.define`` classmethod:" ] }, { @@ -124,7 +124,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "This results in a ``Time`` class with a numeric ``t`` parameter that defaults to zero. As this object is parameterized, we can use ``hv.help`` to view it's parameters:" + "This results in a ``Time`` class with a numeric ``t`` parameter that defaults to zero. As this object is parameterized, we can use ``hv.help`` to view its parameters:" ] }, { @@ -335,7 +335,7 @@ "source": [ "## Using Parameterized classes as a stream\n", "\n", - "Creating a custom ``Stream`` class is one easy way to declare parameters, however in many cases you may have already expressed your domain knowledge on a ``Parameterized`` class. A ``DynamicMap`` can easily be linked to the parameters of the class using a so called ``Params`` stream, let's define a simple example which will let use dynamically alter the style applied to the ``Image`` from the previous example. We define a ``Style`` class with two parameters, one to control the colormap and another to vary the number of color levels and then use the `.apply` accessor to set those options:" + "Creating a custom ``Stream`` class is one easy way to declare parameters. However, there's no need to make a Stream if you have already expressed your domain knowledge on a ``Parameterized`` class. For instance, let's assume you have made a simple parameterized `BankAccount` class:\n" ] }, { @@ -344,26 +344,79 @@ "metadata": {}, "outputs": [], "source": [ - "from holoviews.streams import Params\n", + "class BankAccount(param.Parameterized):\n", + " balance = param.Number(default=0, doc=\"Bank balance in USD\")\n", + " overdraft = param.Number(default=200, doc=\"Overdraft limit\")\n", "\n", - "class Style(param.Parameterized):\n", - "\n", - " cmap = param.ObjectSelector(default='viridis', objects=['viridis', 'plasma', 'magma'])\n", + "account = BankAccount(name='Jane', balance=300)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can link parameter changes straight to DynamicMap callable parameters by passing a keyword:param dictionary to the `streams` argument (for HoloViews version >= 1.14.2):" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "streams = dict(total=account.param.balance, overdraft=account.param.overdraft, owner=account.param.name)\n", "\n", - " color_levels = param.Integer(default=255, bounds=(1, 255))\n", + "def table(owner, total, overdraft):\n", + " return hv.Table([(owner, overdraft, total)], ['Owner', 'Overdraft ($)', 'Total ($)'])\n", "\n", - "style = Style()\n", + "bank_dmap = hv.DynamicMap(table, streams=streams)\n", + "bank_dmap.opts(height=100)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now as you set the `balance` parameter on the `janes_account` instance, the DynamicMap above updates. Note that the dictionary specifies that the `balance` parameter is mapped to the `total` argument of the callable." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "account.balance=65.4\n", + "account.overdraft=350" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Use with `panel`\n", "\n", - "stream = Params(style)\n", + "This dictionary format is particularly useful when used with the [Panel](http://panel.pyviz.org/) library (a dependency of HoloViews that should always be available), because `panel` widgets always reflect their values on the `value` parameter. This means that if you declare two Panel widgets as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import panel as pn\n", "\n", - "image.apply.opts(streams=[stream]).opts(colorbar=True, width=400)" + "slider = pn.widgets.FloatSlider(start=0, end=500, name='Balance')\n", + "checkbox = pn.widgets.Select(options=['student','regular', 'savings'], name='Account Type')\n", + "pn.Row(slider, checkbox)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Using the `.apply` accessor in this automatically makes resulting `DynamicMap` depend on the streams. Unlike a regular streams class the plot will update whenever a parameter on the instance or class changes, e.g. we can update set the ``cmap`` and ``color_level`` parameters and watch the plot update in response:" + "You can map both widget values into a `DynamicMap` callback without having a name clash as follows:" ] }, { @@ -372,15 +425,27 @@ "metadata": {}, "outputs": [], "source": [ - "style.color_levels = 10\n", - "style.cmap = 'plasma'" + "overdraft_limits = {'student':300, 'regular':100, 'savings':0} # Overdraft limits for different account types\n", + "streams = dict(owner=account.param.name, total=slider.param.value, acc=checkbox.param.value)\n", + "\n", + "def account_info(owner, total, acc):\n", + " return hv.Table([(owner, acc, overdraft_limits[acc], total)], \n", + " ['Owner', 'Account Type', 'Overdraft ($)', 'Total ($)'])\n", + "\n", + "widget_dmap = hv.DynamicMap(account_info, streams=streams)\n", + "widget_dmap.opts(height=100)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "It is also possible to be even more explicit and directly map parameters to `.apply` keywords without manually constructing a stream, e.g. the example may also be written as:" + "\n", + "You can now update the plot above using the slider and dropdown widgets. Note that for all these examples, a `Params` stream is created internally. This type of stream can wrap Parameterized objects or sets of Parameters but (since HoloViews 1.10.8) it is rare that an explicit stream object like that needs to be used directly at the user level. To see more examples of how to use Panel with HoloViews, see the [Dashboards user guide](./16-Dashboards.ipynb).\n", + "\n", + "### Using `.apply.opts`\n", + "\n", + "You can supplying Parameters in a similar manner to the `.apply.opts` method. In the following example, a `Style` class has Parameters that indicate the desired colorma and color levels for the `image` instance defined earlier. We can link these together as follows:" ] }, { @@ -389,14 +454,31 @@ "metadata": {}, "outputs": [], "source": [ - "image.apply.opts(cmap=style.param.cmap, color_levels=style.param.color_levels, colorbar=True, width=400)" + "class Style(param.Parameterized):\n", + "\n", + " colormap = param.ObjectSelector(default='viridis', objects=['viridis', 'plasma', 'magma'])\n", + "\n", + " color_levels = param.Integer(default=255, bounds=(1, 255))\n", + "\n", + "style = Style()\n", + "image.apply.opts(colorbar=True, width=400, cmap=style.param.colormap, color_levels=style.param.color_levels)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "This is a powerful pattern to link parameters to a plot, particularly when combined with the [Panel](http://panel.pyviz.org/) library, which makes it easy to generate a set of widgets from a Parameterized class. To see how this works in practice see the [Dashboards user guide](./16-Dashboards.ipynb)." + "Using the `.apply` accessor in this automatically makes the resulting `DynamicMap` depend on the streams specified by the Parameters. Unlike a regular streams class, the plot will update whenever a Parameter on the instance or class changes. For instance, we can update the ``cmap`` and ``color_level`` parameters and watch the plot update in response:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "style.color_levels = 10\n", + "style.colormap = 'plasma' # Note that this is mapped to the 'cmap' keyword in .apply.opts" ] }, { @@ -502,7 +584,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Occasionally, it is useful to suppress some of the stream parameters of a stream class, especially when using the *linked streams* described in [Custom_Interactivity](13-Custom_Interactivity.ipynb). To do this you can rename the stream parameter to ``None`` so that you no longer need to worry about it being passed as an argument to the callable. To re-enable a stream parameter, it is sufficient to either give the stream parameter it's original string name or a new string name." + "Occasionally, it is useful to suppress some of the stream parameters of a stream class, especially when using the *linked streams* described in [Custom_Interactivity](13-Custom_Interactivity.ipynb). To do this you can rename the stream parameter to ``None`` so that you no longer need to worry about it being passed as an argument to the callable. To re-enable a stream parameter, it is sufficient to either give the stream parameter its original string name or a new string name." ] }, { diff --git a/examples/user_guide/15-Large_Data.ipynb b/examples/user_guide/15-Large_Data.ipynb index 9422bc4e13..66fc418b4b 100644 --- a/examples/user_guide/15-Large_Data.ipynb +++ b/examples/user_guide/15-Large_Data.ipynb @@ -6,11 +6,11 @@ "source": [ "# Working with large data using Datashader\n", "\n", - "The various plotting-library backends supported by HoloViews, such as Matplotlib and Bokeh, each have a variety of limitations on the amount of data that is practical to work with. Bokeh in particular mirrors your data directly into an HTML page viewable in your browser, which can cause problems when data sizes approach the limited memory available for each web page in current browsers.\n", + "The various plotting-library backends supported by HoloViews, such as Matplotlib, Bokeh, and Plotly, each have limitations on the amount of data that is practical to work with. Bokeh and Plotly in particular mirror your data directly into an HTML page viewable in your browser, which can cause problems when data sizes approach the limited memory available for each web page in current browsers.\n", "\n", - "Luckily, a visualization of even the largest dataset will be constrained by the resolution of your display device, and so one approach to handling such data is to pre-render or rasterize the data into a fixed-size array or image *before* sending it to the backend. The [Datashader](https://github.com/bokeh/datashader) library provides a high-performance big-data server-side rasterization pipeline that works seamlessly with HoloViews to support datasets that are orders of magnitude larger than those supported natively by the plotting-library backends, including millions or billions of points even on ordinary laptops.\n", + "Luckily, a visualization of even the largest dataset will be constrained by the resolution of your display device, and so one approach to handling such data is to pre-render or rasterize the data into a fixed-size array or image *before* sending it to the backend plotting library and thus to your local web browser. The [Datashader](https://github.com/bokeh/datashader) library provides a high-performance big-data server-side rasterization pipeline that works seamlessly with HoloViews to support datasets that are orders of magnitude larger than those supported natively by the plotting-library backends, including millions or billions of points even on ordinary laptops.\n", "\n", - "Here, we will see how and when to use Datashader with HoloViews Elements and Containers. For simplicity in this discussion we'll focus on simple synthetic datasets, but the [Datashader docs](http://datashader.org/topics) include a wide variety of real datasets that give a much better idea of the power of using Datashader with HoloViews, and [PyViz.org](http://pyviz.org) shows how to install and work with HoloViews and Datashader together.\n", + "Here, we will see how and when to use Datashader with HoloViews Elements and Containers. For simplicity in this discussion we'll focus on simple synthetic datasets, but [Datashader's examples](http://datashader.org/topics) include a wide variety of real datasets that give a much better idea of the power of using Datashader with HoloViews, and [HoloViz.org](http://holoviz.org) shows how to install and work with HoloViews and Datashader together.\n", "\n", "" ] @@ -26,8 +26,7 @@ "import holoviews as hv\n", "\n", "from holoviews import opts\n", - "\n", - "from holoviews.operation.datashader import datashade, shade, dynspread, rasterize\n", + "from holoviews.operation.datashader import datashade, shade, dynspread, spread, rasterize\n", "from holoviews.operation import decimate\n", "\n", "hv.extension('bokeh','matplotlib')\n", @@ -75,9 +74,9 @@ "source": [ "# Principles of datashading\n", "\n", - "Because HoloViews elements are fundamentally data containers, not visualizations, you can very quickly declare elements such as ``Points`` or ``Path`` containing datasets that may be as large as the full memory available on your machine (or even larger if using Dask dataframes). So even for very large datasets, you can easily specify a data structure that you can work with for making selections, sampling, aggregations, and so on. However, as soon as you try to visualize it directly with either the matplotlib or bokeh plotting extensions, the rendering process may be prohibitively expensive.\n", + "Because HoloViews elements are fundamentally data containers, not visualizations, you can very quickly declare elements such as ``Points`` or ``Path`` containing datasets that may be as large as the full memory available on your machine (or even larger if using Dask dataframes). So even for very large datasets, you can easily specify a data structure that you can work with for making selections, sampling, aggregations, and so on. However, as soon as you try to visualize it directly with either the Matplotlib, Plotly, or Bokeh plotting extensions, the rendering process may be prohibitively expensive.\n", "\n", - "Let's start with a simple example we can visualize as normal:" + "Let's start with a simple example that's easy to visualize in any plotting library:" ] }, { @@ -88,7 +87,7 @@ "source": [ "np.random.seed(1)\n", "points = hv.Points(np.random.multivariate_normal((0,0), [[0.1, 0.1], [0.1, 1.0]], (1000,)),label=\"Points\")\n", - "paths = hv.Path([random_walk(2000,30)], label=\"Paths\")\n", + "paths = hv.Path([random_walk(2000,30)], kdims=[\"u\",\"v\"], label=\"Paths\")\n", "\n", "points + paths" ] @@ -101,7 +100,7 @@ "\n", "Because all of the data in these plots gets transferred directly into the web browser, the interactive functionality will be available even on a static export of this figure as a web page. Note that even though the visualization above is not computationally expensive, even with just 1000 points as in the scatterplot above, the plot already suffers from [overplotting](https://anaconda.org/jbednar/plotting_pitfalls), with later points obscuring previously plotted points. \n", "\n", - "With much larger datasets, these issues will quickly make it impossible to see the true structure of the data. We can easily declare 50X or 1000X larger versions of the same plots above, but if we tried to visualize them they would be nearly unusable even if the browser did not crash:" + "With much larger datasets, these issues will quickly make it impossible to see the true structure of the data. We can easily declare 50X or 1000X larger versions of the same plots above, but if we tried to visualize them directly they would be unusably slow even if the browser did not crash:" ] }, { @@ -112,7 +111,7 @@ "source": [ "np.random.seed(1)\n", "points = hv.Points(np.random.multivariate_normal((0,0), [[0.1, 0.1], [0.1, 1.0]], (1000000,)),label=\"Points\")\n", - "paths = hv.Path([0.15*random_walk(100000) for i in range(10)],label=\"Paths\")\n", + "paths = hv.Path([0.15*random_walk(100000) for i in range(10)], kdims=[\"u\",\"v\"], label=\"Paths\")\n", "\n", "#points + paths ## Danger! Browsers can't handle 1 million points!" ] @@ -130,20 +129,31 @@ "metadata": {}, "outputs": [], "source": [ - "decimate(points) + datashade(points) + datashade(paths)" + "decimate(points) + rasterize(points) + rasterize(paths)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Decimating a plot in this way can be useful, but it discards most of the data, yet still suffers from overplotting. If you have Datashader installed, you can instead use the `datashade()` operation to create a dynamic Datashader-based Bokeh plot. The middle plot above shows the result of using `datashade()` to create a dynamic Datashader-based plot out of an Element with arbitrarily large data. In the Datashader version, a new image is regenerated automatically on every zoom or pan event, revealing all the data available at that zoom level and avoiding issues with overplotting by dynamically rescaling the colors used. The same process is used for the line-based data in the Paths plot.\n", + "Decimating a plot in this way can be useful, but it discards most of the data, yet still suffers from overplotting. If you have Datashader installed, you can instead use Datashader operations like `rasterize()` to create a dynamic Datashader-based Bokeh plot. The middle plot above shows the result of using `rasterize()` to create a dynamic Datashader-based plot out of an Element with arbitrarily large data. In the rasterized version, the data is binned into a fixed-size 2D array automatically on every zoom or pan event, revealing all the data available at that zoom level and avoiding issues with overplotting by dynamically rescaling the colors used. Each pixel is colored by how many datapoints fall in that pixel, faithfully revealing the data's distribution in a easy-to-display plot. The same process is used for the line-based data in the Paths plot, where darker colors represent path intersections.\n", "\n", - "These two Datashader-based plots are similar to the native Bokeh plots above, but instead of making a static Bokeh plot that embeds points or line segments directly into the browser, HoloViews sets up a Bokeh plot with dynamic callbacks that render the data as an RGB image using Datashader instead. The dynamic re-rendering provides an interactive user experience even though the data itself is never provided directly to the browser. Of course, because the full data is not in the browser, a static export of this page (e.g. on holoviews.org or on anaconda.org) will only show the initially rendered version, and will not update with new images when zooming as it will when there is a live Python process available.\n", + "These two Datashader-based plots are similar to the native Bokeh plots above, but instead of making a static Bokeh plot that embeds points or line segments directly into the browser, HoloViews sets up a Bokeh plot with dynamic callbacks instructing Datashader to rasterize the data into a fixed-size array (effectivey a 2D histogram) instead. The dynamic re-rendering provides an interactive user experience, even though the data itself is never provided directly to the browser. Of course, because the full data is not in the browser, a static export of this page (e.g. on holoviews.org or on anaconda.org) will only show the initially rendered version, and will not update with new rasterized arrays when zooming as it will when there is a live Python process available.\n", "\n", - "Though you can no longer have a completely interactive exported file, with the Datashader version on a live server you can now change the number of data points from 1000000 to 10000000 or more to see how well your machine will handle larger datasets. It will get a bit slower, but if you have enough memory, it should still be very usable, and should never crash your browser as transferring the whole dataset into your browser would. If you don't have enough memory, you can instead set up a [Dask](http://dask.pydata.org) dataframe as shown in other Datashader examples, which will provide out-of-core and/or distributed processing to handle even the largest datasets.\n", + "Though you can no longer have a completely interactive exported file, with the Datashader version on a live server you can now change the number of data points from 1000000 to 10000000 or more to see how well your machine will handle larger datasets. It will get a bit slower, but if you have enough memory, it should still be very usable, and should never crash your browser as transferring the whole dataset into your browser would. If you don't have enough memory, you can instead set up a [Dask](http://dask.pydata.org) dataframe as shown in other Datashader examples, which will provide out-of-core and/or distributed processing to handle even the largest datasets if you have enough computational power and memory or are willing to wait for out-of-core computation." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# HoloViews operations for datashading\n", "\n", - "The `datashade()` operation is actually a \"macro\" or shortcut that combines the two main computations done by datashader, namely `shade()` and `rasterize()`:" + "HoloViews provides several operations for calling Datashader on HoloViews elements, including `rasterize()`, `shade()`, and `datashade()`.\n", + "\n", + "`rasterize()` uses Datashader to render the data into what is by default a 2D histogram, where every array cell counts the data points falling into that pixel. Bokeh then colormaps that array, turning each cell into a pixel in an image. \n", + "\n", + "Instead of having Bokeh do the colormapping, you can instruct Datashader to do so, by wrapping the output of `rasterize()` in a call to `shade()`, where `shade()` is Datashader's colormapping function. The `datashade()` operation is also provided as a simple macro, where `datashade(x)` is equivalent to `shade(rasterize(x))`:" ] }, { @@ -152,18 +162,29 @@ "metadata": {}, "outputs": [], "source": [ - "rasterize(points).hist() + shade(rasterize(points)) + datashade(points)" + "ropts = dict(colorbar=True, tools=[\"hover\"], width=350)\n", + "\n", + "rasterize( points).opts(cmap=\"kbc_r\", cnorm=\"linear\").relabel('rasterize()').opts(**ropts).hist() + \\\n", + "shade(rasterize(points), cmap=\"kbc_r\", cnorm=\"linear\").relabel(\"shade(rasterize())\") + \\\n", + "datashade( points, cmap=\"kbc_r\", cnorm=\"linear\").relabel(\"datashade()\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "In all three of the above plots, `rasterize()` is being called to aggregate the data (a large set of x,y locations) into a rectangular grid, with each grid cell counting up the number of points that fall into it. In the plot on the left, only `rasterize()` is done, and the resulting numeric array of counts is passed to Bokeh for colormapping. Bokeh can then use dynamic (client-side, browser-based) operations in JavaScript, allowing users to have dynamic control over even static HTML plots. For instance, in this case, users can use the Box Select tool and select a range of the histogram shown, dynamically remapping the colors used in the plot to cover the selected range.\n", + "In all three of the above plots, `rasterize()` is being called to aggregate the data (a large set of x,y locations) into a rectangular grid, with each grid cell counting up the number of points that fall into it. In the first plot, only `rasterize()` is done, and the resulting numeric array of counts is passed to Bokeh for colormapping. That way hover and colorbars can be supported (as shown), and Bokeh can then provide dynamic (client-side, browser-based) colormapping tools in JavaScript, allowing users to have dynamic control over even static HTML plots. For instance, in this case, users can use the Box Select tool and select a range of the histogram shown, dynamically remapping the colors used in the plot to cover the selected range.\n", "\n", - "The other two plots should be identical. In both cases, the numerical array output of `rasterize()` is mapped into RGB colors by Datashader itself, in Python (\"server-side\"), which allows special Datashader computations like the histogram-equalization in the above plots and the \"spreading\" discussed below. The `shade()` and `datashade()` operations accept a `cmap` argument that lets you control the colormap used, which can be selected to match the HoloViews/Bokeh `cmap` option but is strictly independent of it. See ``hv.help(rasterize)``, ``hv.help(shade)``, and ``hv.help(datashade)`` for options that can be selected, and the [Datashader web site](http://datashader.org) for all the details. The lower-level `aggregate()` and `regrid()` give more control over how the data is aggregated.\n", + "The other two plots should be identical in appearance, but with the numerical array output of `rasterize()` mapped into RGB colors by Datashader itself, in Python (\"server-side\"), which allows some special Datashader computations described below but prevents other Bokeh-based features like hover and colorbars from being used. Here we've instructed Datashader to use the same colormap used by bokeh, so that the plots look similar, but as you can see the `rasterize()` colormap is determined by a HoloViews plot option, while the `shade` and `datashade` colormap is determined by an argument to those operations. See ``hv.help(rasterize)``, ``hv.help(shade)``, and ``hv.help(datashade)`` for options that can be selected, and the [Datashader web site](http://datashader.org) for all the details. HoloViews also provides lower-level `aggregate()` and `regrid()` operations that implement `rasterize()` and give more control over how the data is aggregated, but these are not needed for typical usage." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setting options\n", "\n", - "Since datashader only sends the data currently in view to the plotting backend, the default behavior is to rescale colormap to the range of the visible data as the zoom level changes. This behavior may not be desirable when working with images; to instead use a fixed colormap range, the `clim` parameter can be passed to the `bokeh` backend via the `opts()` method. Note that this approach works with `rasterize()` where the colormapping is done by the `bokeh` backend. With `datashade()`, the colormapping is done with the `shade()` function which takes a `clims` parameter directly instead of passing additional parameters to the backend via `opts()`." + "By their nature, the datashading operations accept one HoloViews Element type and return a different Element type. Regardless of what type they are given, `rasterize()` returns an `hv.Image`, while `shade()` and `datashade()` return an `hv.RGB`. It is important to keep this transformation in mind, because HoloViews options that you set on your original Element type are not normally transferred to your new Element:" ] }, { @@ -172,31 +193,150 @@ "metadata": {}, "outputs": [], "source": [ - "n = 10_000\n", + "points2 = decimate(points, dynamic=False, max_samples=3000)\n", + "points2.opts(color=\"green\", size=6, marker=\"s\")\n", "\n", - "# Strong signal on top\n", - "rs = np.random.RandomState(101010)\n", - "x = rs.pareto(n, n)\n", - "y = x + rs.standard_normal(n)\n", - "img1, *_ = np.histogram2d(x, y, bins=60)\n", + "points2 + rasterize(points2).relabel(\"Rasterized\") + datashade(points2).relabel(\"Datashaded\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can see the datashaded plot represents each point as a single pixel, many of which are very difficult to see, and that the color, size, and marker shape that you set on the Points element will not be applied to the rasterized or datashaded plot. \n", "\n", - "# Weak signal in the middle\n", - "x2 = rs.standard_normal(n)\n", - "y2 = 5 * x + 10 * rs.standard_normal(n)\n", - "img2, *_ = np.histogram2d(x2, y2, bins=60)\n", + "If you want to use Datashader to recreate the options from the original plot, you can usually do so, but you will have to use the various Datashader-specific features explained in the sections below along with HoloViews options for `hv.Image` or `hv.RGB`. For example:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "points2 + \\\n", + "spread(rasterize(points2), px=4, shape='square').opts(cmap=[\"green\"]).relabel(\"Rasterized\") +\\\n", + "spread(datashade(points2, cmap=[\"green\"]), px=4, shape='square').relabel(\"Datashaded\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that by forcing the single-color colormap `[\"green\"]`, Datashader's support for avoiding overplotting has been lost. In most cases you will want to map to a proper colormap rather than a single color, to better reveal the underlying distribution:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "decimate(points2) + \\\n", + "spread(rasterize(points2), px=4, shape='square').opts(cmap=\"Greens\").relabel(\"Rasterized\") +\\\n", + "spread(datashade(points2, cmap=\"green\"), px=4, shape='square').relabel(\"Datashaded\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Colormapping\n", "\n", - "img = img1 + img2\n", - "hv_img = hv.Image(img).opts(active_tools=['wheel_zoom'])\n", - "auto_scale_grid = rasterize(hv_img).opts(title='Automatic color range rescaling')\n", - "fixed_scale_grid = rasterize(hv_img).opts(title='Fixed color range', clim=(img.min(), img.max()))\n", - "auto_scale_grid + fixed_scale_grid; # Output supressed and gif shown below instead" + "As you can see above, the choice of colormap and the various colormapping options can be very important for datashaded plots. One issue often seen in large, real-world datasets is that there is structure at many spatial scales, which requires special attention to colormapping options. This example dataset from the [Datashader documentation](https://datashader.org/getting_started/Pipeline.html) illustrates the issues, with data clustering at five different spatial scales:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "\n", + "num=10000\n", + "np.random.seed(1)\n", + "\n", + "dists = {cat: pd.DataFrame(dict([('x',np.random.normal(x,s,num)), \n", + " ('y',np.random.normal(y,s,num)), \n", + " ('val',val), \n", + " ('cat',cat)])) \n", + " for x, y, s, val, cat in \n", + " [( 2, 2, 0.03, 10, \"d1\"), \n", + " ( 2, -2, 0.10, 20, \"d2\"), \n", + " ( -2, -2, 0.50, 30, \"d3\"), \n", + " ( -2, 2, 1.00, 40, \"d4\"), \n", + " ( 0, 0, 3.00, 50, \"d5\")] }\n", + "\n", + "df = pd.concat(dists,ignore_index=True)\n", + "df[\"cat\"]=df[\"cat\"].astype(\"category\")\n", + "df" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "" + "Each of the five categories has 10000 points, but distributed over different spatial areas. Bokeh supports three colormap normalization options, which each behave differently:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ropts = dict(tools=[\"hover\"], height=380, width=330, colorbar=True, colorbar_position=\"bottom\")\n", + "\n", + "hv.Layout([rasterize(hv.Points(df)).opts(**ropts).opts(cnorm=n).relabel(n)\n", + " for n in [\"linear\", \"log\", \"eq_hist\"]])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, the `linear` map is easy to interpret, but nearly all of the pixels are drawn in the lightest blue, because the highest-count pixel (around a count of 6000) is much larger in value than the typical pixels. The other two plots show the full structure (five concentrations of data points, including one in the background), with `log` using a standard logarithmic transformation of the count data before colormapping, and `eq_hist` using a histogram-equalization technique (see the [Datashader docs](https://datashader.org/getting_started/Pipeline.html) to reveal structure without any assumptions about the incoming distribution (but with an irregularly spaced colormap that makes the numeric values difficult to reason about). In practice, it is generally a good idea to use `eq_hist` when exploring a large dataset initially, so that you will see any structure present, then switch to `log` or `linear` to share the plots with a simpler-to-explain colormap. All three of these options are supported by the various backends (including Bokeh version 2.2.3 or later) and by `shade()` and `datashade()` except that `eq_hist` is not yet available for the Plotly backend." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Since datashader only sends the data currently in view to the plotting backend, the default behavior is to rescale the colormap to the range of the visible data as the zoom level changes. This behavior may not be desirable when working with images; to instead use a fixed colormap range, the `clim` parameter can be passed to the `bokeh` backend via the `opts()` method. Note that this approach works with `rasterize()` where the colormapping is done by the `bokeh` backend. With `datashade()`, the colormapping is done with the `shade()` function which takes a `clims` parameter directly instead of passing additional parameters to the backend via `opts()`. For example (removing the semicolon in a live notebook to see the output):" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pts1 = rasterize(hv.Points(df)).opts(**ropts).opts(tools=[], cnorm='log', axiswise=True)\n", + "pts2 = rasterize(hv.Points(df)).opts(**ropts).opts(tools=[], cnorm='log', axiswise=True)\n", + "\n", + "pts1 + pts2.opts(clim=(0, 10000));" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "By default, pixels with a count of zero are transparent, letting the plot background show through so that the data can be used in overlays. If you want zero to map to the lowest colormap color instead to make a dense, fully filled-in image, you can use `redim.nodata` to set the `Dimension.nodata` parameter to None:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "hv.Layout([rasterize(hv.Points(df), vdim_prefix='').redim.nodata(Count=n)\\\n", + " .opts(**ropts, cnorm=\"eq_hist\").relabel(\"nodata=\"+str(n))\n", + " for n in [0, None]])" ] }, { @@ -205,16 +345,16 @@ "source": [ "## Spreading\n", "\n", - "The Datashader examples above treat points and lines as infinitesimal in width, such that a given point or small bit of line segment appears in at most one pixel. This approach ensures that the overall distribution of the points will be mathematically well founded -- each pixel will scale in value directly by the number of points that fall into it, or by the lines that cross it.\n", + "By default, Datashader treats points and lines as infinitesimal in width, such that a given point or small bit of line segment appears in at most one pixel. This approach ensures that the overall distribution of the points will be mathematically well founded -- each pixel will scale in value directly by the number of points that fall into it, or by the lines that cross it. As a consequence, Datashader does not currently provide support for a marker size or a line width, which both effectively default to one pixel.\n", "\n", - "However, many monitors are sufficiently high resolution that the resulting point or line can be difficult to see---a single pixel may not actually be visible on its own, and its color may likely be very difficult to make out. To compensate for this, HoloViews provides access to Datashader's image-based \"spreading\", which makes isolated pixels \"spread\" into adjacent ones for visibility. There are two varieties of spreading supported:\n", + "However, many monitors are sufficiently high resolution that a single-pixel point or line can be difficult to see---one pixel may not be visible at all on its own, and even if it is visible it is often difficult to see its color. To compensate for this, HoloViews provides access to Datashader's raster-based \"spreading\" (a generalization of image dilation and convolution), which makes isolated nonzero cells \"spread\" into adjacent ones for visibility. There are two varieties of spreading supported:\n", "\n", - "1. ``spread``: fixed spreading of a certain number of pixels, which is useful if you want to be sure how much spreading is done regardless of the properties of the data.\n", - "2. ``dynspread``: spreads up to a maximum size as long as it does not exceed a specified fraction of adjacency between pixels. \n", + "1. ``spread``: fixed spreading of a certain number of cells (pixels), which is useful if you want to be sure how much spreading is done regardless of the properties of the data.\n", + "2. ``dynspread``: spreads up to a maximum size as long as it does not exceed a specified fraction of adjacency between cells (pixels).\n", "\n", - "Dynamic spreading is typically more useful, because it adjusts depending on how close the datapoints are to each other on screen. Both types of spreading require Datashader to do the colormapping (applying `shade`), because they operate on RGB pixels, not data arrays.\n", + "Dynamic spreading is typically more useful for interactive plotting, because it adjusts depending on how close the datapoints are to each other on screen. As of Datashader 0.12, both types of spreading are supported for both `rasterize()` and `shade()`, but previous Datashader versions only support spreading on the RGB output of `shade()`.\n", "\n", - "You can compare the results in the two plots below after zooming in:" + "As long as you have Datashader 0.12 or later, you can compare the results when you zoom the two plots below; when you zoom in far enough you should be able to see that the in the two zoomed-in plots below, then zoom out to see that the plots are the same when points are clustered together to form a distribution. (If running a live notebook; remove the semicolon so that you see the live output rather than the saved GIF.)" ] }, { @@ -223,13 +363,17 @@ "metadata": {}, "outputs": [], "source": [ - "datashade(points) + dynspread(datashade(points))" + "pts = rasterize(points).opts(cnorm='eq_hist')\n", + "\n", + "pts + dynspread(pts);" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ + "\n", + "\n", "Both plots show the same data, and look identical when zoomed out, but when zoomed in enough you should be able to see the individual data points on the right while the ones on the left are barely visible. The dynspread parameters typically need some hand tuning, as the only purpose of such spreading is to make things visible on a particular monitor for a particular observer; the underlying mathematical operations in Datashader do not normally need parameters to be adjusted.\n", "\n", "The same operation works similarly for line segments:" @@ -241,7 +385,7 @@ "metadata": {}, "outputs": [], "source": [ - "datashade(paths) + dynspread(datashade(paths))" + "rasterize(paths) + dynspread(rasterize(paths), threshold=0.6)" ] }, { @@ -286,7 +430,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Above you can see that (as of Datashader 0.11) categorical aggregates can take any reduction function, either `count`ing the datapoints (left) or reporting some other statistic (e.g. the mean value of a column, right).\n", + "Above you can see that (as of Datashader 0.11) categorical aggregates can take any reduction function, either `count`ing the datapoints (left) or reporting some other statistic (e.g. the mean value of a column, right). This type of categorical mixing is currently only supported by `shade()` and `datashade()`, not `rasterize()` alone, because it depends on Datashader's custom color mixing code.\n", "\n", "Categorical aggregates are one way to allow separate lines or other shapes to be visually distinctive from one another while avoiding obscuring data due to overplotting:" ] @@ -356,7 +500,7 @@ "\n", "dates = pd.date_range(start=\"2014-01-01\", end=\"2016-01-01\", freq='1D') # or '1min'\n", "curve = hv.Curve((dates, time_series(N=len(dates), sigma = 1)))\n", - "datashade(curve, cmap=[\"blue\"], width=800).opts(width=800)" + "rasterize(curve, width=800).opts(width=800, cmap=['blue'])" ] }, { @@ -376,10 +520,10 @@ "smoothed = rolling(curve, rolling_window=50)\n", "outliers = rolling_outlier_std(curve, rolling_window=50, sigma=2)\n", "\n", - "ds_curve = datashade(curve, cmap=[\"blue\"])\n", - "spread = dynspread(datashade(smoothed, cmap=[\"red\"], width=800),max_px=1) \n", + "ds_curve = rasterize(curve).opts(cmap=[\"blue\"])\n", + "curvespread = dynspread(datashade(smoothed, cmap=[\"red\"], width=800),max_px=1) \n", "\n", - "(ds_curve * spread * outliers).opts(\n", + "(ds_curve * curvespread * outliers).opts(\n", " opts.Scatter(line_color=\"black\", fill_color=\"red\", size=10, tools=['hover', 'box_select'], width=800))" ] }, @@ -387,46 +531,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Note that the above plot will look blocky in a static export (such as on anaconda.org), because the exported version is generated without taking the size of the actual plot (using default height and width for Datashader) into account, whereas the live notebook automatically regenerates the plot to match the visible area on the page. The result of all these operations can be laid out, overlaid, selected, and sampled just like any other HoloViews element, letting you work naturally with even very large datasets.\n", - "\n", - "\n", - "# Hover info\n", - "\n", - "As you can see in the examples above, converting the data to an image using Datashader makes it feasible to work with even very large datasets interactively. One unfortunate side effect is that the original datapoints and line segments can no longer be used to support \"tooltips\" or \"hover\" information directly for RGB images generated with `datashade`; that data simply is not present at the browser level, and so the browser cannot unambiguously report information about any specific datapoint. \n", - "\n", - "If you do need hover information, there are two good options available:\n", - "\n", - "1) Use the ``rasterize`` operation without `shade`, which will let the plotting code handle the conversion to colors while still having the actual aggregated data to support hovering\n", - "\n", - "2) Overlay a separate layer as a ``QuadMesh`` or ``Image`` containing the hover information" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from holoviews.streams import RangeXY\n", - "\n", - "rasterized = rasterize(points, width=400, height=400)\n", + "The result of all these operations can be laid out, overlaid, selected, and sampled just like any other HoloViews element, letting you work naturally with even very large datasets.\n", "\n", - "fixed_hover = (datashade(points, width=400, height=400) * \n", - " hv.QuadMesh(rasterize(points, width=10, height=10, dynamic=False)))\n", - "\n", - "dynamic_hover = (datashade(points, width=400, height=400) * \n", - " rasterize(points, width=10, height=10, streams=[RangeXY]).apply(hv.QuadMesh))\n", - "\n", - "(rasterized + fixed_hover + dynamic_hover).opts(\n", - " opts.QuadMesh(tools=['hover'], alpha=0, hover_alpha=0.2), \n", - " opts.Image(tools=['hover']))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the above examples, the plot on the left provides hover information directly on the aggregated ``Image``. The middle plot displays hover information as a ``QuadMesh`` at a fixed spatial scale, while the one on the right reports on an area that scales with the zoom level so that arbitrarily small regions of data space can be examined, which is generally more useful (but requires a live Python server)." + "Note that the above plot will look blocky in a static export (such as on anaconda.org), because the exported version is generated without taking the size of the actual plot (using default height and width for Datashader) into account, whereas the live notebook automatically regenerates the plot to match the visible area on the page. " ] }, { @@ -451,7 +558,7 @@ "\n", "- **points**: [`hv.Nodes`](../reference/elements/bokeh/Graph.ipynb), [`hv.Points`](../reference/elements/bokeh/Points.ipynb), [`hv.Scatter`](../reference/elements/bokeh/Scatter.ipynb)\n", "- **line**: [`hv.Contours`](../reference/elements/bokeh/Contours.ipynb), [`hv.Curve`](../reference/elements/bokeh/Curve.ipynb), [`hv.Path`](../reference/elements/bokeh/Path.ipynb), [`hv.Graph`](../reference/elements/bokeh/Graph.ipynb), [`hv.EdgePaths`](../reference/elements/bokeh/Graph.ipynb), [`hv.Spikes`](../reference/elements/bokeh/Spikes.ipynb), [`hv.Segments`](../reference/elements/bokeh/Segments.ipynb)\n", - "- **area**: [`hv.Area`](../reference/elements/bokeh/Area.ipynb), [`hv.Spread`](../reference/elements/bokeh/Spread.ipynb)\n", + "- **area**: [`hv.Area`](../reference/elements/bokeh/Area.ipynb), [`hv.Rectangles`](../reference/elements/bokeh/Rectangles.ipynb), [`hv.Spread`](../reference/elements/bokeh/Spread.ipynb)\n", "- **raster**: [`hv.Image`](../reference/elements/bokeh/Image.ipynb), [`hv.HSV`](../reference/elements/bokeh/HSV.ipynb), [`hv.RGB`](../reference/elements/bokeh/RGB.ipynb)\n", "- **trimesh**: [`hv.TriMesh`](../reference/elements/bokeh/TriMesh.ipynb)\n", "- **quadmesh**: [`hv.QuadMesh`](../reference/elements/bokeh/QuadMesh.ipynb)\n", @@ -470,11 +577,11 @@ "\n", "- datashadable annotations: [`hv.Arrow`](../reference/elements/bokeh/Arrow.ipynb), [`hv.Bounds`](../reference/elements/bokeh/Bounds.ipynb), [`hv.Box`](../reference/elements/bokeh/Box.ipynb), [`hv.Ellipse`](../reference/elements/bokeh/Ellipse.ipynb) (actually do work with datashade currently, but not officially supported because they are not vectorized and thus unlikely to have enough items to be worth datashading)\n", "- other annotations: [`hv.Arrow`](../reference/elements/bokeh/Arrow.ipynb), [`hv.HLine`](../reference/elements/bokeh/HLine.ipynb), [`hv.VLine`](../reference/elements/bokeh/VLine.ipynb), [`hv.Text`](../reference/elements/bokeh/Text.ipynb)\n", - "- kdes: [`hv.Distribution`](../reference/elements/bokeh/Distribution.ipynb), [`hv.Bivariate`](../reference/elements/bokeh/Bivariate.ipynb)\n", + "- kdes: [`hv.Distribution`](../reference/elements/bokeh/Distribution.ipynb), [`hv.Bivariate`](../reference/elements/bokeh/Bivariate.ipynb) (already aggregated)\n", "- categorical/symbolic: [`hv.BoxWhisker`](../reference/elements/bokeh/BoxWhisker.ipynb), [`hv.Bars`](../reference/elements/bokeh/Bars.ipynb), [`hv.ErrorBars`](../reference/elements/bokeh/ErrorBars.ipynb)\n", "- tables: [`hv.Table`](../reference/elements/bokeh/Table.ipynb), [`hv.ItemTable`](../reference/elements/bokeh/ItemTable.ipynb)\n", "\n", - "Examples of each supported Element type:" + "Let's make some examples of each supported Element type. First, some dummy data:" ] }, { @@ -483,16 +590,9 @@ "metadata": {}, "outputs": [], "source": [ - "hv.output(backend='matplotlib')\n", - "\n", + "from bokeh.sampledata.unemployment import data as unemployment\n", "from bokeh.sampledata.us_counties import data as counties\n", "\n", - "opts.defaults(\n", - " opts.Image(aspect=1, axiswise=True, xaxis='bare', yaxis='bare'),\n", - " opts.RGB(aspect=1, axiswise=True, xaxis='bare', yaxis='bare'),\n", - " opts.HSV(aspect=1, axiswise=True, xaxis='bare', yaxis='bare'),\n", - " opts.Layout(vspace=0.1, hspace=0.1, sublabel_format=\"\", fig_size=80))\n", - "\n", "np.random.seed(12)\n", "N=100\n", "pts = [(10*i/N, np.sin(10*i/N)) for i in range(N)]\n", @@ -512,46 +612,111 @@ "Qy = np.sin(Y) + np.sin(X)\n", "Z = np.sqrt(X**2 + Y**2)\n", "\n", + "rect_colors = {True: 'red', False: 'green'}\n", "s = np.random.randn(100).cumsum()\n", - "e = s + np.random.randn(100)\n", + "e = s + np.random.randn(100)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, some options:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "hv.output(backend='matplotlib')\n", "\n", + "opts.defaults(opts.Layout(vspace=0.1, hspace=0.1, sublabel_format='', fig_size=48))\n", + "eopts = dict(aspect=1, axiswise=True, xaxis='bare', yaxis='bare', xticks=False, yticks=False)\n", "opts2 = dict(filled=True, edge_color='z')\n", - "tri = hv.TriMesh.from_vertices(hv.Points(np.random.randn(N,3), vdims='z')).opts(**opts2)\n", - "(tri + tri.edgepaths + datashade(tri, aggregator=ds.mean('z')) + datashade(tri.edgepaths)).cols(2)\n", - "\n", - "shadeable = [elemtype(pts) for elemtype in [hv.Curve, hv.Scatter]]\n", - "shadeable += [hv.Path(counties[(1, 1)], ['lons', 'lats']), hv.Points(counties[(1, 1)], ['lons', 'lats'])]\n", - "shadeable += [hv.Spikes(np.random.randn(10000))]\n", - "shadeable += [hv.Segments((np.arange(100), s, np.arange(100), e))]\n", - "shadeable += [hv.Area(np.random.randn(10000).cumsum())]\n", - "shadeable += [hv.Spread((np.arange(10000), np.random.randn(10000).cumsum(), np.random.randn(10000)*10))]\n", - "shadeable += [hv.Image((x,y,z))]\n", - "shadeable += [hv.QuadMesh((Qx,Qy,Z))]\n", - "shadeable += [hv.Graph(((np.zeros(N), np.arange(N)),))]\n", - "shadeable += [tri.edgepaths]\n", - "shadeable += [tri]\n", - "shadeable += [hv.Polygons([county for county in counties.values() if county['state'] == 'tx'], ['lons', 'lats'], ['name'])]\n", - "shadeable += [hv.operation.contours(hv.Image((x,y,z)), levels=10)]\n", - "\n", - "rasterizable = [hv.RGB(np.dstack([r,g,b])), hv.HSV(np.dstack([g,b,r]))]\n", + "rect_opts = opts.Rectangles(lw=0, color=hv.dim('sign').categorize(rect_colors))\n", "\n", "ds_opts = {\n", - " hv.Path: dict(aggregator='any'),\n", - " hv.Points: dict(aggregator='any'),\n", - " hv.Polygons: dict(aggregator=ds.count_cat('name'), color_key=hv.plotting.util.process_cmap('glasbey')),\n", - " hv.Segments: dict(aggregator='any')\n", - "}\n", - "\n", - "hv.Layout([dynspread(datashade(e.relabel(e.__class__.name), **ds_opts.get(e.__class__, {}))) for e in shadeable] + \n", - " [ rasterize(e.relabel(e.__class__.name)) for e in rasterizable]).opts(shared_axes=False).cols(6)" + " hv.Path: dict(aggregator='any'),\n", + " hv.Scatter: dict(aggregator='any'),\n", + " hv.Points: dict(aggregator='any'),\n", + " hv.Segments: dict(aggregator='any'),\n", + " hv.Rectangles: dict(aggregator=ds.count_cat('sign'), color_key=rect_colors)\n", + "}" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Here we called `datashade()` on each Element type, letting Datashader do the full process of rasterization and shading, except that for `RGB` and `HSV` we only called `rasterize()` or else the results would have been converted into a monochrome image.\n", + "Now, some Elements that support datashading, in categories depending on whether they work best with `spread(rasterize())`, with plain `rasterize()`, or require `datashade()`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "spreadable = [e(pts) for e in [hv.Curve, hv.Scatter]]\n", + "spreadable += [e(counties[(1, 1)], ['lons', 'lats']) for e in [hv.Path, hv.Points]]\n", + "spreadable += [hv.Segments((np.arange(100), s, np.arange(100), e))]\n", + "spreadable += [hv.Graph(((np.zeros(N), np.arange(N)),))]\n", + "spreadable += [hv.operation.contours(hv.Image((x,y,z)), levels=10)]\n", + "\n", + "tri = hv.TriMesh.from_vertices(hv.Points(np.random.randn(N,3), vdims='z')).opts(**opts2)\n", + "spreadable += [tri.edgepaths]\n", + "\n", + "rasterizable = [tri]\n", + "rasterizable += [hv.Area(np.random.randn(10000).cumsum())]\n", + "rasterizable += [hv.Spread((np.arange(10000), np.random.randn(10000).cumsum(), np.random.randn(10000)*10))]\n", + "rasterizable += [hv.Spikes(np.random.randn(1000))]\n", + "rasterizable += [hv.QuadMesh((Qx,Qy,Z))]\n", + "\n", + "polys = hv.Polygons([dict(county, unemployment=unemployment[k]) \n", + " for k, county in counties.items()\n", + " if county['state'] == 'tx'], \n", + " ['lons', 'lats'], ['unemployment']).opts(color='unemployment')\n", + "try:\n", + " import spatialpandas # Needed for datashader polygon support\n", + " rasterizable += [polys]\n", + "except: pass\n", + "\n", + "rasterizable += [hv.Image((x,y,z))]\n", + "rasterizable += [hv.RGB(np.dstack([r,g,b])), hv.HSV(np.dstack([g,b,r]))]\n", + "\n", + "shadeable = [hv.Rectangles((np.arange(100)-0.4, s, np.arange(100)+0.4, e, s>e), vdims='sign').opts(rect_opts)]\n", "\n", + "def nop(x,**k): return x\n", + "def spread4(e, **k): return spread(rasterize(e, **k), px=4).opts(cnorm='eq_hist', padding=0.1) \n", + "def plot(e, operation=nop):\n", + " return operation(e.relabel(e.__class__.name), **ds_opts.get(e.__class__, {})).opts(**eopts)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now view these with Datashader via `spread(rasterize())`, `rasterize()`, or `datashade`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "hv.Layout(\n", + " [plot(e, spread4) for e in spreadable] + \\\n", + " [plot(e, rasterize) for e in rasterizable] + \\\n", + " [plot(e, datashade) for e in shadeable]).cols(6)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ "For comparison, you can see the corresponding non-datashaded plots (as long as you leave N lower than 10000 unless you have a long time to wait!):" ] }, @@ -561,20 +726,32 @@ "metadata": {}, "outputs": [], "source": [ - "el_opts = dict(aspect=1, axiswise=True, xaxis='bare', yaxis='bare')\n", - "hv.Layout([e.relabel(e.__class__.name).opts(**el_opts) for e in shadeable + rasterizable]).cols(6)" + "hv.Layout([plot(e) for e in spreadable + rasterizable + shadeable]).cols(6)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The previous two sets of examples use Matplotlib, but if they were switched to Bokeh and you had a live server, they would support dynamic re-rendering on zoom and pan so that you could explore the full range of data available (e.g. even very large raster images, networks, paths, point clouds, or meshes)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "hv.output(backend='bokeh') # restore bokeh backend in case cells will run out of order" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "These two examples use Matplotlib, but if they were switched to Bokeh and you had a live server, they would support dynamic re-rendering on zoom and pan so that you could explore the full range of data available (e.g. even very large raster images, networks, paths, point clouds, or meshes).\n", - "\n", - "\n", "# Container types supported for datashading\n", "\n", - "In the above examples `datashade()` was called directly on each Element, but it can also be called on Containers, in which case each Element in the Container will be datashaded separately (for all Container types other than a Layout):" + "In the above examples `datashade()` or `rasterize` was called directly on each Element, but these operations can also be called on Containers, in which case each Element in the Container will be datashaded separately (for all Container types other than a Layout):" ] }, { @@ -583,12 +760,8 @@ "metadata": {}, "outputs": [], "source": [ - "hv.output(dpi=80, size=100)\n", - "\n", "curves = {'+':hv.Curve(pts), '-':hv.Curve([(x, -1.0*y) for x, y in pts])}\n", - "\n", - "supported = [hv.HoloMap(curves,'sign'), hv.Overlay(list(curves.values())), hv.NdOverlay(curves), hv.GridSpace(hv.NdOverlay(curves))]\n", - "hv.Layout([datashade(e.relabel(e.__class__.name)) for e in supported]).cols(4)" + "spread(rasterize(hv.HoloMap(curves,'sign')))" ] }, { @@ -597,7 +770,8 @@ "metadata": {}, "outputs": [], "source": [ - "dynspread(datashade(hv.NdLayout(curves,'sign')))" + "containers = [hv.Overlay(list(curves.values())), hv.NdOverlay(curves), hv.GridSpace(hv.NdOverlay(curves))]\n", + "hv.Layout([rasterize(e.relabel(e.__class__.name)) for e in containers]).cols(4)" ] }, { @@ -606,7 +780,7 @@ "metadata": {}, "outputs": [], "source": [ - "hv.output(backend='bokeh')" + "spread(rasterize(hv.NdLayout(curves,'sign')))" ] }, { diff --git a/examples/user_guide/17-Dashboards.ipynb b/examples/user_guide/17-Dashboards.ipynb index 4b970b3b08..5f7065e5db 100644 --- a/examples/user_guide/17-Dashboards.ipynb +++ b/examples/user_guide/17-Dashboards.ipynb @@ -26,7 +26,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In the [Data Processing Pipelines section](./14-Data_Pipelines.ipynb) we discovered how to declare a ``DynamicMap`` and control multiple processing steps with the use of custom streams as described in the [Responding to Events](./12-Responding_to_Events.ipynb) guide. Here we will use the same example exploring a dataset of stock timeseries and build a small dashboard using the [Panel](https://panel.pyviz.org) library, which allows us to declare easily declare custom widgets and link them to our streams. We will begin by once again declaring our function that loads the stock data:" + "In the [Data Processing Pipelines section](./14-Data_Pipelines.ipynb) we discovered how to declare a ``DynamicMap`` and control multiple processing steps with the use of custom streams as described in the [Responding to Events](./12-Responding_to_Events.ipynb) guide. A DynamicMap works like a tiny web application, with widgets that select values along a dimension, and a plot that updates. Let's start with a function that loads stock data and see what a DynamicMap can do:" ] }, { @@ -35,15 +35,25 @@ "metadata": {}, "outputs": [], "source": [ - "def load_symbol(symbol, variable='adj_close', **kwargs):\n", + "def load_symbol(symbol, variable, **kwargs):\n", " df = pd.DataFrame(getattr(stocks, symbol))\n", " df['date'] = df.date.astype('datetime64[ns]')\n", " return hv.Curve(df, ('date', 'Date'), variable).opts(framewise=True)\n", "\n", "stock_symbols = ['AAPL', 'IBM', 'FB', 'GOOG', 'MSFT']\n", - "dmap = hv.DynamicMap(load_symbol, kdims='Symbol').redim.values(Symbol=stock_symbols)\n", + "variables = ['open', 'high', 'low', 'close', 'volume', 'adj_close']\n", + "dmap = hv.DynamicMap(load_symbol, kdims=['Symbol','Variable'])\n", + "dmap = dmap.redim.values(Symbol=stock_symbols, Variable=variables)\n", "\n", - "dmap.opts(framewise=True)" + "dmap.opts(framewise=True)\n", + "rolling(dmap, rolling_window=2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we already have widgets for Symbol and Variable, as those are dimensions in the DynamicMap, but what if we wanted a widget to control the `rolling_window`width value in the HoloViews operation? We could redefine the DynamicMap to include the operation and accept that parameter as another dimension, but in complex cases we would quickly find we need more flexibility in defining widgets and layouts than DynamicMap can give us directly." ] }, { @@ -52,9 +62,9 @@ "source": [ "## Building dashboards\n", "\n", - "Controlling stream events manually from the Python prompt can be a bit cumbersome. However since you can now trigger events from Python we can easily bind any Python based widget framework to the stream. HoloViews itself is based on param and param has various UI toolkits that accompany it and allow you to quickly generate a set of widgets. Here we will use ``panel``, which is based on bokeh to control our stream values.\n", + "For more flexibility, we can build a full-featured dashboard using the [Panel](https://panel.pyviz.org) library, which is what a DynamicMap is already using internally to generate widgets and layouts. We can easily declare our own custom Panel widgets and link them to HoloViews streams to get dynamic, user controllable analysis workflows.\n", "\n", - "To do so we will declare a ``StockExplorer`` class subclassing ``Parameterized`` and defines two parameters, the ``rolling_window`` as an integer and the ``symbol`` as an ObjectSelector. Additionally we define a view method, which defines the DynamicMap and applies the two operations we have already played with, returning an overlay of the smoothed ``Curve`` and outlier ``Scatter``.\n" + "Here, let's start with defining various Panel widgets explicitly, choosing a `RadioButtonGroup` for the `symbol` instead of DynamicMaps's default `Select` widget:" ] }, { @@ -63,31 +73,22 @@ "metadata": {}, "outputs": [], "source": [ - "import param\n", "import panel as pn\n", "\n", - "variables = ['open', 'high', 'low', 'close', 'volume', 'adj_close']\n", - "\n", - "class StockExplorer(param.Parameterized):\n", - "\n", - " rolling_window = param.Integer(default=10, bounds=(1, 365))\n", - " \n", - " symbol = param.ObjectSelector(default='AAPL', objects=stock_symbols)\n", - " \n", - " variable = param.ObjectSelector(default='adj_close', objects=variables)\n", + "symbol = pn.widgets.RadioButtonGroup(options=stock_symbols)\n", + "variable = pn.widgets.Select(options=variables)\n", + "rolling_window = pn.widgets.IntSlider(name='Rolling Window', value=10, start=1, end=365)\n", "\n", - " @param.depends('symbol', 'variable')\n", - " def load_symbol(self):\n", - " df = pd.DataFrame(getattr(stocks, self.symbol))\n", - " df['date'] = df.date.astype('datetime64[ns]')\n", - " return hv.Curve(df, ('date', 'Date'), self.variable).opts(framewise=True)" + "pn.Column(symbol, variable, rolling_window)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "You will have noticed the ``param.depends`` decorator on the ``load_symbol`` method above, this declares that the method depends on these two parameters. When we pass the method to a ``DynamicMap`` it will now automatically listen to changes to the 'symbol', and 'variable' parameters. To generate a set of widgets to control these parameters we can simply supply the ``explorer.param`` accessor to a panel layout, and combining the two we can quickly build a little GUI:" + "As you can see, these widgets can be displayed but they aren't yet attached to anything, so they don't do much. We can now use ``pn.bind`` to bind the `symbol` and `variable` widgets to the arguments of the DynamicMap callback function, and provide `rolling_window` to the `rolling` operation argument. (HoloViews operations accept Panel widgets or param Parameter values, and they will then update reactively to changes in those widgets.)\n", + "\n", + "We can then lay it all out into a simple application that works similarly to the regular DynamicMap display but where we can add our additional widget and control every aspect of the widget configuration and the layout:" ] }, { @@ -96,18 +97,41 @@ "metadata": {}, "outputs": [], "source": [ - "explorer = StockExplorer()\n", + "dmap = hv.DynamicMap(pn.bind(load_symbol, symbol=symbol, variable=variable))\n", + "smoothed = rolling(dmap, rolling_window=rolling_window)\n", "\n", - "stock_dmap = hv.DynamicMap(explorer.load_symbol)\n", + "app = pn.Row(pn.WidgetBox('## Stock Explorer', symbol, variable, rolling_window), \n", + " smoothed.opts(width=500, framewise=True)).servable()\n", + "app" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we chose to lay the widgets out into a box to the left of the plot, but we could put the widgets each in different locations, add different plots, etc., to create a full-featured dashboard. See [panel.holoviz.org](https://panel.holoviz.org) for the full set of widgets and layouts supported.\n", "\n", - "pn.Row(pn.panel(explorer.param, parameters=['symbol', 'variable']), stock_dmap)" + "Now that we have an app, we can launch it in a separate server if we wish (using `app.show()`), run it as an entirely separate process (`panel serve .ipynb`, to serve the object marked `servable` above), or export it to a static HTML file (sampling the space of parameter values using \"embed\"):" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "app.save(\"dashboard.html\", embed=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The ``rolling_window`` parameter is not yet connected to anything however, so just like in the [Data Processing Pipelines section](./14-Data_Pipelines.ipynb) we will see how we can get the widget to control the parameters of an operation. Both the ``rolling`` and ``rolling_outlier_std`` operations accept a ``rolling_window`` parameter, so we simply pass that parameter into the operation. Finally we compose everything into a panel ``Row``:" + "## Declarative dashboards\n", + "\n", + "What if we want our analysis code usable both as a dashboard and also in \"headless\" contexts such as batch jobs or remote execution? Both Panel and HoloViews are built on the [param](https://param.holoviz.org) library, which lets you capture the definitions and allowable values for your widgets in a way that's not attached to any GUI. That way you can declare all of your attributes and allowed values once, presenting a GUI if you want to explore them interactively or else simply provide specific values if you want batch operation.\n", + "\n", + "With this approach, we declare a ``StockExplorer`` class subclassing ``Parameterized`` and defining three parameters, namely the rolling window, the symbol, and the variable to show for that symbol:" ] }, { @@ -116,25 +140,30 @@ "metadata": {}, "outputs": [], "source": [ - "# Apply rolling mean\n", - "smoothed = rolling(stock_dmap, rolling_window=explorer.param.rolling_window)\n", + "import param\n", "\n", - "# Find outliers\n", - "outliers = rolling_outlier_std(stock_dmap, rolling_window=explorer.param.rolling_window).opts(\n", - " color='red', marker='triangle')\n", + "class StockExplorer(param.Parameterized):\n", "\n", - "pn.Row(explorer.param, (smoothed * outliers).opts(width=600))" + " rolling_window = param.Integer(default=10, bounds=(1, 365))\n", + " symbol = param.ObjectSelector(default='AAPL', objects=stock_symbols)\n", + " variable = param.ObjectSelector(default='adj_close', objects=variables)\n", + "\n", + " @param.depends('symbol', 'variable')\n", + " def load_symbol(self):\n", + " df = pd.DataFrame(getattr(stocks, self.symbol))\n", + " df['date'] = df.date.astype('datetime64[ns]')\n", + " return hv.Curve(df, ('date', 'Date'), self.variable).opts(framewise=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## A function based approach\n", + "Here the StockExplorer class will look similar to the Panel code above, defining most of the same information that's in the Panel widgets, but without any dependency on Panel or other GUI libraries; it's simply declaring that this code accepts certain parameter values of the specified types and ranges. These declarations are useful even outside a GUI context, because they allow type and range checking for detecting user errors, but they are also sufficient for creating a GUI later. \n", "\n", - "Instead of defining a whole Parameterized class we can also use the ``depends`` decorator to directly link the widgets to a DynamicMap callback function. This approach makes the link between the widgets and the computation very explicit at the cost of tying the widget and display code very closely together.\n", + "Instead of using `pn.bind` to bind widget values to functions, here we are declaring that each method depends on the specified parameters, which can be expressed independently of whether there is a widget controlling those parameters; it simply declares (in a way that Panel can utilize) that the given method needs re-running when any of the parameters in that list changes. \n", "\n", - "Instead of declaring the dependencies as strings we map the parameter instance to a particular keyword argument in the ``depends`` call. In this way we can link the symbol to the ``RadioButtonGroup`` value and the ``variable`` to the ``Select`` widget value:" + "Now let's use the `load_symbol` method, which already declares which parameters it depends on, as the callback of a DynamicMap and create widgets out of those parameters to build a little GUI:" ] }, { @@ -143,19 +172,32 @@ "metadata": {}, "outputs": [], "source": [ - "symbol = pn.widgets.RadioButtonGroup(options=stock_symbols)\n", - "variable = pn.widgets.Select(options=variables)\n", - "rolling_window = pn.widgets.IntSlider(name='Rolling Window', value=10, start=1, end=365)\n", - "\n", - "@pn.depends(symbol=symbol.param.value, variable=variable.param.value)\n", - "def load_symbol_cb(symbol, variable):\n", - " return load_symbol(symbol, variable)\n", - "\n", - "dmap = hv.DynamicMap(load_symbol_cb)\n", + "explorer = StockExplorer()\n", + "stock_dmap = hv.DynamicMap(explorer.load_symbol)\n", + "pn.Row(explorer.param, stock_dmap)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here you'll notice that the `rolling_window` widget doesn't do anything, because it's not connected to anything (e.g., nothing `@param.depends` on it). As we saw in the [Data Processing Pipelines section](./14-Data_Pipelines.ipynb), the ``rolling`` and ``rolling_outlier_std`` operations both accept a ``rolling_window`` parameter, so lets provide that to the operations and display the output of those operations. Finally we compose everything into a panel ``Row``:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Apply rolling mean\n", + "smoothed = rolling(stock_dmap, rolling_window=explorer.param.rolling_window)\n", "\n", - "smoothed = rolling(dmap, rolling_window=rolling_window.param.value)\n", + "# Find outliers\n", + "outliers = rolling_outlier_std(stock_dmap, rolling_window=explorer.param.rolling_window).opts(\n", + " color='red', marker='triangle')\n", "\n", - "pn.Row(pn.WidgetBox('## Stock Explorer', symbol, variable, rolling_window), smoothed.opts(width=500, framewise=True))" + "pn.Row(explorer.param, (smoothed * outliers).opts(width=600))" ] }, { @@ -215,7 +257,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "As you can see using streams we have bound the widgets to the streams letting us easily control the stream values and making it trivial to define complex dashboards. For more information on how to deploy bokeh apps from HoloViews and build dashboards see the [Deploying Bokeh Apps](./Deploying_Bokeh_Apps.ipynb)." + "As you can see using streams we have bound the widgets to the streams letting us easily control the stream values and making it trivial to define complex dashboards. For more information on how to deploy bokeh apps from HoloViews and build dashboards see the [Deploying Bokeh Apps](./Deploying_Bokeh_Apps.ipynb) user guide section." ] } ], diff --git a/examples/user_guide/Continuous_Coordinates.ipynb b/examples/user_guide/Continuous_Coordinates.ipynb index 2684232c43..c32f7d391f 100644 --- a/examples/user_guide/Continuous_Coordinates.ipynb +++ b/examples/user_guide/Continuous_Coordinates.ipynb @@ -365,7 +365,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Similarly, if we ask for the value of a given *y* location in continuous space, we will get a ``Curve`` with the array row closest to that *y* value in the ``Image`` 2D array returned as arrays of $x$ values and the corresponding *z* value from the image:" + "Similarly, if we ask for the value of a given *y* location in continuous space, we will get a ``Curve`` with the array row closest to that *y* value in the ``Image`` 2D array returned as arrays of `x` values and the corresponding *z* value from the image:" ] }, { diff --git a/examples/user_guide/Exporting_and_Archiving.ipynb b/examples/user_guide/Exporting_and_Archiving.ipynb index dc0838cf4c..ee0ed4b9d7 100644 --- a/examples/user_guide/Exporting_and_Archiving.ipynb +++ b/examples/user_guide/Exporting_and_Archiving.ipynb @@ -48,7 +48,7 @@ "outputs": [], "source": [ "penguins = hv.RGB.load_image('../assets/penguins.png')\n", - "hv.save(penguins, 'penguin_plot.png', fmt='svg')\n", + "hv.save(penguins, 'penguin_plot.png', fmt='png')\n", "penguins" ] }, @@ -256,7 +256,7 @@ "source": [ "import json\n", "hv.archive.add(filename='metadata.json', \n", - " data=json.dumps({'repository':'git@github.com:ioam/holoviews.git',\n", + " data=json.dumps({'repository':'git@github.com:holoviz/holoviews.git',\n", " 'commit':'437e8d69'}), info={'mime_type':'text/json'})" ] }, diff --git a/examples/user_guide/Installing_and_Configuring.ipynb b/examples/user_guide/Installing_and_Configuring.ipynb index 3fe9e7ebc7..dee4d8fa7a 100644 --- a/examples/user_guide/Installing_and_Configuring.ipynb +++ b/examples/user_guide/Installing_and_Configuring.ipynb @@ -33,7 +33,7 @@ " pip install 'holoviews[extras]'\n", " pip install 'holoviews[all]'\n", "\n", - "The first option installs just the bare library and the [NumPy](http://numpy.org) and [Param](http://ioam.github.com/param) libraries, which is all you need on your system to generate and work with HoloViews objects without visualizing them. The other options install additional libraries that are often useful, with the `recommended` option being similar to the `conda` install command above.\n", + "The first option installs just the bare library and the [NumPy](http://numpy.org) and [Param](https://github.com/holoviz/param) libraries, which is all you need on your system to generate and work with HoloViews objects without visualizing them. The other options install additional libraries that are often useful, with the `recommended` option being similar to the `conda` install command above.\n", "\n", "Between releases, development snapshots are made available as conda packages:\n", "\n", @@ -42,7 +42,7 @@ "To get the very latest development version you can clone our git\n", "repository and put it on the Python path:\n", "\n", - " git clone git://github.com/ioam/holoviews.git\n", + " git clone https://github.com/holoviz/holoviews.git\n", " cd holoviews\n", " pip install -e ." ] diff --git a/holoviews/__init__.py b/holoviews/__init__.py index 29c9ba2dfb..46c225ab4b 100644 --- a/holoviews/__init__.py +++ b/holoviews/__init__.py @@ -1,5 +1,3 @@ - -from __future__ import print_function, absolute_import import os, io import numpy as np # noqa (API import) @@ -90,4 +88,4 @@ def help(obj, visualization=True, ansi=True, backend=None, pydoc.help(obj) -del absolute_import, io, np, os, print_function, rcfile, warnings +del io, np, os, rcfile, warnings diff --git a/holoviews/annotators.py b/holoviews/annotators.py index 9faeb56c10..baa3be8069 100644 --- a/holoviews/annotators.py +++ b/holoviews/annotators.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import sys from collections import OrderedDict @@ -111,7 +109,7 @@ def compose(cls, *annotators): if isinstance(annotator, Layout): l, ts = annotator layers.append(l) - tables += ts + tables += list(ts) elif isinstance(annotator, annotate): layers.append(annotator.plot) tables += [t[0].object for t in annotator.editor] @@ -158,7 +156,6 @@ def __call__(self, element, **params): return self.compose(*layers) - class Annotator(PaneBase): """ An Annotator allows drawing, editing and annotating a specific @@ -224,7 +221,7 @@ def _object_name(self): return self._element_type.__name__ def __init__(self, object=None, **params): - super(Annotator, self).__init__(None, **params) + super().__init__(None, **params) self.object = self._process_element(object) self._table_row = Row() self.editor = Tabs(('%s' % param_name(self.name), self._table_row)) @@ -338,7 +335,7 @@ class PathAnnotator(Annotator): def __init__(self, object=None, **params): self._vertex_table_row = Row() - super(PathAnnotator, self).__init__(object, **params) + super().__init__(object, **params) self.editor.append(('%s Vertices' % param_name(self.name), self._vertex_table_row)) @@ -389,13 +386,13 @@ def _process_element(self, element=None): # Add options to element tools = [tool() for tool in self._tools] - opts = dict(tools=tools, color_index=None, **self.default_opts) + opts = dict(tools=tools, **self.default_opts) opts.update(self._extra_opts) return element.options(**{k: v for k, v in opts.items() if k not in element.opts.get('plot').kwargs}) def _update_links(self): - super(PathAnnotator, self)._update_links() + super()._update_links() if hasattr(self, '_vertex_link'): self._vertex_link.unlink() self._vertex_link = self._vertex_table_link(self.plot, self._vertex_table) diff --git a/holoviews/core/accessors.py b/holoviews/core/accessors.py index 3359bfa2ad..5576496d4d 100644 --- a/holoviews/core/accessors.py +++ b/holoviews/core/accessors.py @@ -1,8 +1,6 @@ """ Module for accessor objects for viewable HoloViews objects. """ -from __future__ import absolute_import, unicode_literals - import copy import sys @@ -157,7 +155,7 @@ def __call__(self, apply_function, streams=[], link_inputs=True, dynamic, per_element, **kwargs ) - if isinstance(apply_function, util.basestring): + if isinstance(apply_function, str): args = kwargs.pop('_method_args', ()) method_name = apply_function def apply_function(object, **kwargs): @@ -329,7 +327,7 @@ def replace_dimensions(cls, dimensions, overrides): if override is None: replaced.append(d) - elif isinstance(override, (util.basestring, tuple)): + elif isinstance(override, (str, tuple)): replaced.append(d.clone(override)) elif isinstance(override, Dimension): replaced.append(override) @@ -474,6 +472,9 @@ def soft_range(self, specs=None, **values): def type(self, specs=None, **values): return self._redim('type', specs, **values) + def nodata(self, specs=None, **values): + return self._redim('nodata', specs, **values) + def step(self, specs=None, **values): return self._redim('step', specs, **values) @@ -560,12 +561,12 @@ def __call__(self, *args, **kwargs): """ if self._mode is None: apply_groups, _, _ = util.deprecated_opts_signature(args, kwargs) - if apply_groups and util.config.future_deprecations: + if apply_groups: msg = ("Calling the .opts method with options broken down by options " "group (i.e. separate plot, style and norm groups) is deprecated. " "Use the .options method converting to the simplified format " "instead or use hv.opts.apply_groups for backward compatibility.") - param.main.warning(msg) + param.main.param.warning(msg) return self._dispatch_opts( *args, **kwargs) diff --git a/holoviews/core/boundingregion.py b/holoviews/core/boundingregion.py index 42a0c78b7f..7f22cea8bd 100644 --- a/holoviews/core/boundingregion.py +++ b/holoviews/core/boundingregion.py @@ -134,7 +134,7 @@ def __init__(self, **args): else: self._aarect = AARectangle((-0.5, -0.5), (0.5, 0.5)) - super(BoundingBox, self).__init__(**args) + super().__init__(**args) def __contains__(self, other): @@ -352,10 +352,7 @@ class BoundingRegionParameter(param.Parameter): def __init__(self, default=BoundingBox(radius=0.5), **params): self.set_hook = identity_hook - super(BoundingRegionParameter, self).__init__(default=default, - instantiate=True, - **params) - + super().__init__(default=default, instantiate=True, **params) def __set__(self, obj, val): """ @@ -371,4 +368,4 @@ def __set__(self, obj, val): if not isinstance(val, BoundingRegion): raise ValueError("Parameter must be a BoundingRegion.") else: - super(BoundingRegionParameter, self).__set__(obj, val) + super().__set__(obj, val) diff --git a/holoviews/core/data/__init__.py b/holoviews/core/data/__init__.py index 1f1600cd87..652b41090f 100644 --- a/holoviews/core/data/__init__.py +++ b/holoviews/core/data/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - try: import itertools.izip as zip except ImportError: @@ -8,13 +6,14 @@ import types import copy +from contextlib import contextmanager + import numpy as np import param import pandas as pd # noqa from param.parameterized import add_metaclass, ParameterizedMetaclass -from .. import util from ..accessors import Redim from ..dimension import ( Dimension, Dimensioned, LabelledData, dimension_name, process_dimensions @@ -33,13 +32,18 @@ from .multipath import MultiInterface # noqa (API import) from .image import ImageInterface # noqa (API import) from .pandas import PandasInterface # noqa (API import) -from .spatialpandas import SpatialPandasInterface # noqa (API import) +from .spatialpandas import SpatialPandasInterface # noqa (API import) +from .spatialpandas_dask import DaskSpatialPandasInterface # noqa (API import) from .xarray import XArrayInterface # noqa (API import) +# Ensures correct holoviews.core.util is sourced +from .. import util + default_datatype = 'dataframe' -datatypes = ['dataframe', 'dictionary', 'grid', 'xarray', 'dask', - 'cuDF', 'spatialpandas', 'array', 'multitabular', 'ibis'] +datatypes = ['dataframe', 'dictionary', 'grid', 'xarray', 'multitabular', + 'spatialpandas', 'dask_spatialpandas', 'dask', 'cuDF', 'array', + 'ibis'] def concat(datasets, datatype=None): @@ -153,11 +157,25 @@ def __call__(self, new_type, kdims=None, vdims=None, groupby=None, return group +@contextmanager +def disable_pipeline(): + """ + Disable PipelineMeta class from storing pipelines. + """ + PipelineMeta.disable = True + try: + yield + finally: + PipelineMeta.disable = False + + class PipelineMeta(ParameterizedMetaclass): # Public methods that should not be wrapped blacklist = ['__init__', 'clone'] + disable = False + def __new__(mcs, classname, bases, classdict): for method_name in classdict: @@ -182,6 +200,8 @@ def pipelined_fn(*args, **kwargs): try: result = method_fn(*args, **kwargs) + if PipelineMeta.disable: + return result op = method_op.instance( input_type=type(inst), @@ -292,12 +312,12 @@ def __init__(self, data, kdims=None, vdims=None, **kwargs): if isinstance(data, Element): if 'kdims' in kwargs: kwargs['kdims'] = [ - data.get_dimension(kd) if isinstance(kd, util.basestring) else kd + data.get_dimension(kd) if isinstance(kd, str) else kd for kd in kwargs['kdims'] ] if 'kdims' in kwargs: kwargs['vdims'] = [ - data.get_dimension(vd) if isinstance(vd, util.basestring) else vd + data.get_dimension(vd) if isinstance(vd, str) else vd for vd in kwargs['vdims'] ] pvals = util.get_param_values(data) @@ -353,6 +373,16 @@ def __init__(self, data, kdims=None, vdims=None, **kwargs): if hasattr(self, '_binned'): self._dataset._binned = self._binned + def __getstate__(self): + "Ensures pipelines are dropped" + obj_dict = super(Dataset, self).__getstate__() + if '_pipeline' in obj_dict: + pipeline = obj_dict['_pipeline'] + obj_dict['_pipeline'] = pipeline.instance(operations=pipeline.operations[:1]) + if '_transforms' in obj_dict: + obj_dict['_transforms'] = [] + return obj_dict + @property def redim(self): return Redim(self, mode='dataset') @@ -506,7 +536,7 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): Returns: Cloned object containing the new dimension """ - if isinstance(dimension, (util.basestring, tuple)): + if isinstance(dimension, (str, tuple)): dimension = Dimension(dimension) if dimension.name in self.kdims: @@ -765,7 +795,7 @@ def sample(self, samples=[], bounds=None, closest=True, **kwargs): # Note: Special handling sampling of gridded 2D data as Curve # may be replaced with more general handling - # see https://github.com/ioam/holoviews/issues/1173 + # see https://github.com/holoviz/holoviews/issues/1173 from ...element import Table, Curve datatype = ['dataframe', 'dictionary', 'dask', 'ibis'] if len(samples) == 1: @@ -874,6 +904,11 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): transformed = transformed.collapse() return transformed.clone(new_type=type(self)) + ndims = len(dimensions) + min_d, max_d = self.param.objects('existing')['kdims'].bounds + generic_type = (min_d is not None and ndims < min_d) or (max_d is not None and ndims > max_d) + new_type = Dataset if generic_type else None + # Handle functions kdims = [self.get_dimension(d, strict=True) for d in dimensions] if not self: @@ -882,17 +917,15 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): vdims = [d for vd in self.vdims for d in [vd, vd.clone('_'.join([vd.name, spread_name]))]] else: vdims = self.vdims - return self.clone([], kdims=kdims, vdims=vdims) + if not kdims and len(vdims) == 1: + return np.nan + return self.clone([], kdims=kdims, vdims=vdims, new_type=new_type) vdims = self.vdims aggregated, dropped = self.interface.aggregate(self, kdims, function, **kwargs) aggregated = self.interface.unpack_scalar(self, aggregated) vdims = [vd for vd in vdims if vd not in dropped] - ndims = len(dimensions) - min_d, max_d = self.param.objects('existing')['kdims'].bounds - generic_type = (min_d is not None and ndims < min_d) or (max_d is not None and ndims > max_d) - if spreadfn: error, _ = self.interface.aggregate(self, dimensions, spreadfn) spread_name = spreadfn.__name__ @@ -905,7 +938,7 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): idx = vdims.index(d) combined = combined.add_dimension(dim, idx+1, dvals, True) vdims = combined.vdims - return combined.clone(new_type=Dataset if generic_type else type(self)) + return combined.clone(new_type=new_type) if np.isscalar(aggregated): return aggregated @@ -916,8 +949,7 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): except: datatype = self.param.objects('existing')['datatype'].default return self.clone(aggregated, kdims=kdims, vdims=vdims, - new_type=Dataset if generic_type else None, - datatype=datatype) + new_type=new_type, datatype=datatype) def groupby(self, dimensions=[], container_type=HoloMap, group_type=None, @@ -1067,8 +1099,11 @@ def dimension_values(self, dimension, expanded=True, flat=True): NumPy array of values along the requested dimension """ dim = self.get_dimension(dimension, strict=True) - return self.interface.values(self, dim, expanded, flat) - + values = self.interface.values(self, dim, expanded, flat) + if dim.nodata is not None: + # Ensure nodata applies to boolean data in py2 + values = np.where(values==dim.nodata, np.NaN, values) + return values def get_dimension_type(self, dim): """Get the type of the requested dimension. @@ -1168,9 +1203,7 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, elif self._in_method and 'dataset' not in overrides: overrides['dataset'] = self.dataset - return super(Dataset, self).clone( - data, shared_data, new_type, *args, **overrides - ) + return super(Dataset, self).clone(data, shared_data, new_type, *args, **overrides) # Overrides of superclass methods that are needed so that PipelineMeta # will find them to wrap with pipeline support diff --git a/holoviews/core/data/array.py b/holoviews/core/data/array.py index 75c9d53420..3402e33a60 100644 --- a/holoviews/core/data/array.py +++ b/holoviews/core/data/array.py @@ -271,7 +271,7 @@ def aggregate(cls, dataset, dimensions, function, **kwargs): def iloc(cls, dataset, index): rows, cols = index if np.isscalar(cols): - if isinstance(cols, util.basestring): + if isinstance(cols, str): cols = dataset.get_dimension_index(cols) if np.isscalar(rows): return dataset.data[rows, cols] diff --git a/holoviews/core/data/cudf.py b/holoviews/core/data/cudf.py index 3bcaa85317..d08a209e6f 100644 --- a/holoviews/core/data/cudf.py +++ b/holoviews/core/data/cudf.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import sys import warnings @@ -18,6 +16,7 @@ from ..ndmapping import NdMapping, item_check, sorted_context from .interface import DataError, Interface from .pandas import PandasInterface +from .util import finite_range class cuDFInterface(PandasInterface): @@ -123,11 +122,14 @@ def init(cls, eltype, data, kdims, vdims): @classmethod def range(cls, dataset, dimension): - column = dataset.data[dataset.get_dimension(dimension, strict=True).name] + dimension = dataset.get_dimension(dimension, strict=True) + column = dataset.data[dimension.name] + if dimension.nodata is not None: + column = cls.replace_value(column, dimension.nodata) if column.dtype.kind == 'O': return np.NaN, np.NaN else: - return (column.min(), column.max()) + return finite_range(column, column.min(), column.max()) @classmethod @@ -188,7 +190,7 @@ def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): def select_mask(cls, dataset, selection): """ Given a Dataset object and a dictionary with dimension keys and - selection keys (i.e tuple ranges, slices, sets, lists or literals) + selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. """ diff --git a/holoviews/core/data/dask.py b/holoviews/core/data/dask.py index 656c255554..24fa27ac36 100644 --- a/holoviews/core/data/dask.py +++ b/holoviews/core/data/dask.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import sys try: import itertools.izip as zip @@ -85,11 +83,14 @@ def shape(cls, dataset): @classmethod def range(cls, dataset, dimension): import dask.dataframe as dd - column = dataset.data[dataset.get_dimension(dimension).name] + dimension = dataset.get_dimension(dimension, strict=True) + column = dataset.data[dimension.name] if column.dtype.kind == 'O': column = np.sort(column[column.notnull()].compute()) return (column[0], column[-1]) if len(column) else (None, None) else: + if dimension.nodata is not None: + column = cls.replace_value(column, dimension.nodata) return dd.compute(column.min(), column.max()) @classmethod @@ -112,7 +113,7 @@ def values(cls, dataset, dim, expanded=True, flat=True, compute=True, keep_index def select_mask(cls, dataset, selection): """ Given a Dataset object and a dictionary with dimension keys and - selection keys (i.e tuple ranges, slices, sets, lists or literals) + selection keys (i.e. tuple ranges, slices, sets, lists. or literals) return a boolean mask over the rows in the Dataset object that have been selected. """ diff --git a/holoviews/core/data/dictionary.py b/holoviews/core/data/dictionary.py index 9085175417..91a37802ed 100644 --- a/holoviews/core/data/dictionary.py +++ b/holoviews/core/data/dictionary.py @@ -19,7 +19,7 @@ class DictInterface(Interface): """ Interface for simple dictionary-based dataset format. The dictionary - keys correspond to the column (i.e dimension) names and the values + keys correspond to the column (i.e. dimension) names and the values are collections representing the values in that column. """ @@ -248,7 +248,7 @@ def sort(cls, dataset, by=[], reverse=False): @classmethod def range(cls, dataset, dimension): - dim = dataset.get_dimension(dimension) + dim = dataset.get_dimension(dimension, strict=True) column = dataset.data[dim.name] if isscalar(column): return column, column @@ -440,7 +440,7 @@ def holes(cls, dataset): holes.append(subholes) return [holes] else: - return super(DictInterface, cls).holes(dataset) + return super().holes(dataset) Interface.register(DictInterface) diff --git a/holoviews/core/data/grid.py b/holoviews/core/data/grid.py index 2a33197f17..94af20880b 100644 --- a/holoviews/core/data/grid.py +++ b/holoviews/core/data/grid.py @@ -1,8 +1,3 @@ -from __future__ import absolute_import - -import sys -import datetime as dt - from collections import OrderedDict, defaultdict try: @@ -19,7 +14,7 @@ from ..dimension import OrderedDict as cyODict from ..ndmapping import NdMapping, item_check, sorted_context from .. import util -from .interface import is_dask, dask_array_module, get_array_types +from .util import finite_range, is_dask, dask_array_module, get_array_types @@ -28,7 +23,7 @@ class GridInterface(DictInterface): Interface for simple dictionary-based dataset format using a compressed representation that uses the cartesian product between key dimensions. As with DictInterface, the dictionary keys correspond - to the column (i.e dimension) names and the values are NumPy arrays + to the column (i.e. dimension) names and the values are NumPy arrays representing the values in that column. To use this compressed format, the key dimensions must be orthogonal @@ -260,9 +255,6 @@ def _infer_interval_breaks(cls, coord, axis=0): [ 2.5, 3.5, 4.5]]) """ coord = np.asarray(coord) - if sys.version_info.major == 2 and len(coord) and isinstance(coord[0], (dt.datetime, dt.date)): - # np.diff does not work on datetimes in python 2 - coord = coord.astype('datetime64') if coord.shape[axis] == 0: return np.array([], dtype=coord.dtype) if coord.shape[axis] > 1: @@ -523,7 +515,7 @@ def key_select_mask(cls, dataset, values, ind): mask &= values < ind.stop # Expand empty mask if mask is True: - mask = np.ones(values.shape, dtype=np.bool) + mask = np.ones(values.shape, dtype=np.bool_) elif isinstance(ind, (set, list)): iter_slcs = [] for ik in ind: @@ -537,7 +529,7 @@ def key_select_mask(cls, dataset, values, ind): index_mask = values == ind if (dataset.ndims == 1 or dataset._binned) and np.sum(index_mask) == 0: data_index = np.argmin(np.abs(values - ind)) - mask = np.zeros(len(values), dtype=np.bool) + mask = np.zeros(len(values), dtype=np.bool_) mask[data_index] = True else: mask = index_mask @@ -548,6 +540,9 @@ def key_select_mask(cls, dataset, values, ind): @classmethod def select(cls, dataset, selection_mask=None, **selection): + if selection_mask is not None: + raise ValueError("Masked selections currently not supported for {0}.".format(cls.__name__)) + dimensions = dataset.kdims val_dims = [vdim for vdim in dataset.vdims if vdim in selection] if val_dims: @@ -582,7 +577,7 @@ def select(cls, dataset, selection_mask=None, **selection): raise IndexError("Index %s more than or equal to upper bound " "of %s for %s dimension." % (ind, emax, dim)) idx = max([np.digitize([ind], edges)[0]-1, 0]) - mask = np.zeros(len(values), dtype=np.bool) + mask = np.zeros(len(values), dtype=np.bool_) mask[idx] = True values = edges[idx:idx+2] elif len(inds): @@ -787,29 +782,30 @@ def iloc(cls, dataset, index): @classmethod def range(cls, dataset, dimension): + dimension = dataset.get_dimension(dimension, strict=True) if dataset._binned and dimension in dataset.kdims: expanded = cls.irregular(dataset, dimension) - column = cls.coords(dataset, dimension, expanded=expanded, edges=True) + array = cls.coords(dataset, dimension, expanded=expanded, edges=True) else: - column = cls.values(dataset, dimension, expanded=False, flat=False) + array = cls.values(dataset, dimension, expanded=False, flat=False) + + if dimension.nodata is not None: + array = cls.replace_value(array, dimension.nodata) da = dask_array_module() - if column.dtype.kind == 'M': - dmin, dmax = column.min(), column.max() - if da and isinstance(column, da.Array): - return da.compute(dmin, dmax) - return dmin, dmax - elif len(column) == 0: + if len(array) == 0: return np.NaN, np.NaN + + if array.dtype.kind == 'M': + dmin, dmax = array.min(), array.max() else: try: - dmin, dmax = (np.nanmin(column), np.nanmax(column)) - if da and isinstance(column, da.Array): - return da.compute(dmin, dmax) - return dmin, dmax + dmin, dmax = (np.nanmin(array), np.nanmax(array)) except TypeError: - column.sort() - return column[0], column[-1] + return np.NaN, np.NaN + if da and isinstance(array, da.Array): + return finite_range(array, *da.compute(dmin, dmax)) + return finite_range(array, dmin, dmax) @classmethod def assign(cls, dataset, new_data): @@ -824,6 +820,6 @@ def assign(cls, dataset, new_data): data[k] = cls.canonicalize(dataset, v) return data - + Interface.register(GridInterface) diff --git a/holoviews/core/data/ibis.py b/holoviews/core/data/ibis.py index 583bff870a..441f99a803 100644 --- a/holoviews/core/data/ibis.py +++ b/holoviews/core/data/ibis.py @@ -9,8 +9,9 @@ from .. import util from ..element import Element from ..ndmapping import NdMapping, item_check, sorted_context -from .interface import Interface, cached +from .interface import Interface from . import pandas +from .util import cached class IbisInterface(Interface): @@ -98,9 +99,12 @@ def nonzero(cls, dataset): @classmethod @cached def range(cls, dataset, dimension): + dimension = dataset.get_dimension(dimension, strict=True) if cls.dtype(dataset, dimension).kind in 'SUO': return None, None - column = dataset.data[dataset.get_dimension(dimension, strict=True).name] + if dimension.nodata is not None: + return Interface.range(dataset, dimension) + column = dataset.data[dimension.name] return tuple( dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :] ) diff --git a/holoviews/core/data/image.py b/holoviews/core/data/image.py index 6640fdcbad..f9d81a3314 100644 --- a/holoviews/core/data/image.py +++ b/holoviews/core/data/image.py @@ -8,6 +8,7 @@ from .. import util from .grid import GridInterface from .interface import Interface, DataError +from .util import finite_range class ImageInterface(GridInterface): @@ -135,6 +136,7 @@ def coords(cls, dataset, dim, ordered=False, expanded=False, edges=False): @classmethod def range(cls, obj, dim): + dim = obj.get_dimension(dim, strict=True) dim_idx = obj.get_dimension_index(dim) if dim_idx in [0, 1] and obj.bounds: l, b, r, t = obj.bounds.lbrt() @@ -151,7 +153,9 @@ def range(cls, obj, dim): elif 1 < dim_idx < len(obj.vdims) + 2: dim_idx -= 2 data = np.atleast_3d(obj.data)[:, :, dim_idx] - drange = (np.nanmin(data), np.nanmax(data)) + if dim.nodata is not None: + data = cls.replace_value(data, dim.nodata) + drange = finite_range(data, np.nanmin(data), np.nanmax(data)) else: drange = (None, None) return drange diff --git a/holoviews/core/data/interface.py b/holoviews/core/data/interface.py index 957587f298..ac3d62e7aa 100644 --- a/holoviews/core/data/interface.py +++ b/holoviews/core/data/interface.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import sys import warnings @@ -10,40 +8,7 @@ from .. import util from ..element import Element from ..ndmapping import NdMapping - - -def get_array_types(): - array_types = (np.ndarray,) - da = dask_array_module() - if da is not None: - array_types += (da.Array,) - return array_types - -def dask_array_module(): - try: - import dask.array as da - return da - except: - return None - -def is_dask(array): - da = dask_array_module() - if da is None: - return False - return da and isinstance(array, da.Array) - -def cached(method): - """ - Decorates an Interface method and using a cached version - """ - def cached(*args, **kwargs): - cache = getattr(args[1], '_cached') - if cache is None: - return method(*args, **kwargs) - else: - args = (cache,)+args[2:] - return getattr(cache.interface, method.__name__)(*args, **kwargs) - return cached +from .util import finite_range class DataError(ValueError): @@ -52,7 +17,7 @@ class DataError(ValueError): def __init__(self, msg, interface=None): if interface is not None: msg = '\n\n'.join([msg, interface.error()]) - super(DataError, self).__init__(msg) + super().__init__(msg) class Accessor(object): @@ -238,7 +203,7 @@ def initialize(cls, eltype, data, kdims, vdims, datatype=None): vdims = pvals.get('vdims') if vdims is None else vdims # Process Element data - if (hasattr(data, 'interface') and issubclass(data.interface, Interface)): + if hasattr(data, 'interface') and isinstance(data.interface, type) and issubclass(data.interface, Interface): if datatype is None: datatype = [dt for dt in data.datatype if dt in eltype.datatype] if not datatype: @@ -359,16 +324,26 @@ def dtype(cls, dataset, dimension): else: return data.dtype + @classmethod + def replace_value(cls, data, nodata): + """ + Replace `nodata` value in data with NaN + """ + data = data.astype('float64') + mask = data != nodata + if hasattr(data, 'where'): + return data.where(mask, np.NaN) + return np.where(mask, data, np.NaN) @classmethod def select_mask(cls, dataset, selection): """ Given a Dataset object and a dictionary with dimension keys and - selection keys (i.e tuple ranges, slices, sets, lists or literals) + selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. """ - mask = np.ones(len(dataset), dtype=np.bool) + mask = np.ones(len(dataset), dtype=np.bool_) for dim, sel in selection.items(): if isinstance(sel, tuple): sel = slice(*sel) @@ -398,7 +373,7 @@ def select_mask(cls, dataset, selection): index_mask = arr == sel if dataset.ndims == 1 and np.sum(index_mask) == 0: data_index = np.argmin(np.abs(arr - sel)) - mask = np.zeros(len(dataset), dtype=np.bool) + mask = np.zeros(len(dataset), dtype=np.bool_) mask[data_index] = True else: mask &= index_mask @@ -430,7 +405,7 @@ def range(cls, dataset, dimension): assert column.dtype.kind not in 'SUO' with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered') - return (np.nanmin(column), np.nanmax(column)) + return finite_range(column, np.nanmin(column), np.nanmax(column)) except (AssertionError, TypeError): column = [v for v in util.python2sort(column) if v is not None] if not len(column): diff --git a/holoviews/core/data/multipath.py b/holoviews/core/data/multipath.py index e3f6572f86..07e5c527fd 100644 --- a/holoviews/core/data/multipath.py +++ b/holoviews/core/data/multipath.py @@ -131,6 +131,16 @@ def _inner_dataset_template(cls, dataset, validate_vdims=True): kdims=dataset.kdims, vdims=vdims, _validate_vdims=validate_vdims) + @classmethod + def assign(cls, dataset, new_data): + ds = cls._inner_dataset_template(dataset) + assigned = [] + for i, d in enumerate(dataset.data): + ds.data = d + new = ds.interface.assign(ds, {k: v[i:i+1] for k, v in new_data.items()}) + assigned.append(new) + return assigned + @classmethod def dimension_type(cls, dataset, dim): if not dataset.data: @@ -551,7 +561,7 @@ def ensure_ring(geom, values=None): """ if values is None: values = geom - + breaks = np.where(np.isnan(geom.astype('float')).sum(axis=1))[0] starts = [0] + list(breaks+1) ends = list(breaks-1) + [len(geom)-1] diff --git a/holoviews/core/data/pandas.py b/holoviews/core/data/pandas.py index 028cfa9f6d..6567ad8b95 100644 --- a/holoviews/core/data/pandas.py +++ b/holoviews/core/data/pandas.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - try: import itertools.izip as zip except ImportError: @@ -14,6 +12,7 @@ from ..dimension import OrderedDict as cyODict from ..ndmapping import NdMapping, item_check, sorted_context from .. import util +from .util import finite_range class PandasInterface(Interface): @@ -159,7 +158,8 @@ def validate(cls, dataset, vdims=True): @classmethod def range(cls, dataset, dimension): - column = dataset.data[dataset.get_dimension(dimension, strict=True).name] + dimension = dataset.get_dimension(dimension, strict=True) + column = dataset.data[dimension.name] if column.dtype.kind == 'O': if (not isinstance(dataset.data, pd.DataFrame) or util.LooseVersion(pd.__version__) < '0.17.0'): @@ -174,7 +174,13 @@ def range(cls, dataset, dimension): return np.NaN, np.NaN return column.iloc[0], column.iloc[-1] else: - return (column.min(), column.max()) + if dimension.nodata is not None: + column = cls.replace_value(column, dimension.nodata) + cmin, cmax = finite_range(column, column.min(), column.max()) + if column.dtype.kind == 'M' and getattr(column.dtype, 'tz', None): + return (cmin.to_pydatetime().replace(tzinfo=None), + cmax.to_pydatetime().replace(tzinfo=None)) + return cmin, cmax @classmethod @@ -317,10 +323,14 @@ def values( ): dim = dataset.get_dimension(dim, strict=True) data = dataset.data[dim.name] + if keep_index: + return data + if data.dtype.kind == 'M' and getattr(data.dtype, 'tz', None): + dts = [dt.replace(tzinfo=None) for dt in data.dt.to_pydatetime()] + data = np.array(dts, dtype=data.dtype.base) if not expanded: - return data.unique() - - return data if keep_index else data.values + return pd.unique(data) + return data.values if hasattr(data, 'values') else data @classmethod diff --git a/holoviews/core/data/spatialpandas.py b/holoviews/core/data/spatialpandas.py index 9426811071..8276dcae52 100644 --- a/holoviews/core/data/spatialpandas.py +++ b/holoviews/core/data/spatialpandas.py @@ -1,7 +1,4 @@ -from __future__ import absolute_import, division - import sys -import warnings from collections import defaultdict @@ -16,12 +13,14 @@ class SpatialPandasInterface(MultiInterface): - types = () + base_interface = PandasInterface datatype = 'spatialpandas' multi = True + types = () + @classmethod def loaded(cls): return 'spatialpandas' in sys.modules @@ -30,20 +29,34 @@ def loaded(cls): def applies(cls, obj): if not cls.loaded(): return False - from spatialpandas import GeoDataFrame, GeoSeries - is_sdf = isinstance(obj, (GeoDataFrame, GeoSeries)) + is_sdf = isinstance(obj, cls.data_types()) if 'geopandas' in sys.modules and not 'geoviews' in sys.modules: import geopandas as gpd is_sdf |= isinstance(obj, (gpd.GeoDataFrame, gpd.GeoSeries)) return is_sdf @classmethod - def geo_column(cls, data): + def data_types(cls): + from spatialpandas import GeoDataFrame, GeoSeries + return (GeoDataFrame, GeoSeries) + + @classmethod + def series_type(cls): from spatialpandas import GeoSeries + return GeoSeries + + @classmethod + def frame_type(cls): + from spatialpandas import GeoDataFrame + return GeoDataFrame + + @classmethod + def geo_column(cls, data): col = 'geometry' - if col in data and isinstance(data[col], GeoSeries): + stypes = cls.series_type() + if col in data and isinstance(data[col], stypes): return col - cols = [c for c in data.columns if isinstance(data[c], GeoSeries)] + cols = [c for c in data.columns if isinstance(data[c], stypes)] if not cols: raise ValueError('No geometry column found in spatialpandas.GeoDataFrame, ' 'use the PandasInterface instead.') @@ -51,8 +64,7 @@ def geo_column(cls, data): @classmethod def init(cls, eltype, data, kdims, vdims): - import pandas as pd - from spatialpandas import GeoDataFrame, GeoSeries + from spatialpandas import GeoDataFrame if kdims is None: kdims = eltype.kdims @@ -60,7 +72,7 @@ def init(cls, eltype, data, kdims, vdims): if vdims is None: vdims = eltype.vdims - if isinstance(data, GeoSeries): + if isinstance(data, cls.series_type()): data = data.to_frame() if 'geopandas' in sys.modules: @@ -74,8 +86,8 @@ def init(cls, eltype, data, kdims, vdims): data = from_shapely(data) if isinstance(data, list): data = from_multi(eltype, data, kdims, vdims) - elif not isinstance(data, GeoDataFrame): - raise ValueError("SpatialPandasInterface only support spatialpandas DataFrames.") + elif not isinstance(data, cls.frame_type()): + raise ValueError("%s only support spatialpandas DataFrames." % cls.__name__) elif 'geometry' not in data: cls.geo_column(data) @@ -116,7 +128,7 @@ def dtype(cls, dataset, dimension): dim = dataset.get_dimension(dimension, strict=True) if dim in cls.geom_dims(dataset): col = cls.geo_column(dataset.data) - return dataset.data[col].values.numpy_dtype + return dataset.data[col].dtype.subtype return dataset.data[dim.name].dtype @classmethod @@ -157,43 +169,14 @@ def select(cls, dataset, selection_mask=None, **selection): elif selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) - df = df.iloc[selection_mask] + df = df[selection_mask] if indexed and len(df) == 1 and len(dataset.vdims) == 1: return df[dataset.vdims[0].name].iloc[0] return df @classmethod def select_mask(cls, dataset, selection): - mask = np.ones(len(dataset.data), dtype=np.bool) - for dim, k in selection.items(): - if isinstance(k, tuple): - k = slice(*k) - arr = dataset.data[dim].values - if isinstance(k, slice): - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', r'invalid value encountered') - if k.start is not None: - mask &= k.start <= arr - if k.stop is not None: - mask &= arr < k.stop - elif isinstance(k, (set, list)): - iter_slcs = [] - for ik in k: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', r'invalid value encountered') - iter_slcs.append(arr == ik) - mask &= np.logical_or.reduce(iter_slcs) - elif callable(k): - mask &= k(arr) - else: - index_mask = arr == k - if dataset.ndims == 1 and np.sum(index_mask) == 0: - data_index = np.argmin(np.abs(arr - k)) - mask = np.zeros(len(dataset), dtype=np.bool) - mask[data_index] = True - else: - mask &= index_mask - return mask + return cls.base_interface.select_mask(dataset, selection) @classmethod def geom_dims(cls, dataset): @@ -203,13 +186,7 @@ def geom_dims(cls, dataset): @classmethod def dimension_type(cls, dataset, dim): dim = dataset.get_dimension(dim) - col = cls.geo_column(dataset.data) - if dim in cls.geom_dims(dataset) and len(dataset.data): - arr = geom_to_array(dataset.data[col].iloc[0]) - ds = dataset.clone(arr, datatype=cls.subtypes, vdims=[]) - return ds.interface.dimension_type(ds, dim) - else: - return cls.dtype(dataset, dim).type + return cls.dtype(dataset, dim).type @classmethod def isscalar(cls, dataset, dim, per_geom=False): @@ -238,7 +215,7 @@ def range(cls, dataset, dim): else: return (bounds[1], bounds[3]) else: - return Interface.range(dataset, dim) + return cls.base_interface.range(dataset, dim) @classmethod def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): @@ -246,7 +223,7 @@ def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): if any(d in geo_dims for d in dimensions): raise DataError("SpatialPandasInterface does not allow grouping " "by geometry dimension.", cls) - return PandasInterface.groupby(dataset, dimensions, container_type, group_type, **kwargs) + return cls.base_interface.groupby(dataset, dimensions, container_type, group_type, **kwargs) @classmethod def aggregate(cls, columns, dimensions, function, **kwargs): @@ -270,7 +247,7 @@ def sort(cls, dataset, by=[], reverse=False): if any(d in geo_dims for d in by): raise DataError("SpatialPandasInterface does not allow sorting " "by geometry dimension.", cls) - return PandasInterface.sort(dataset, by, reverse) + return cls.base_interface.sort(dataset, by, reverse) @classmethod def length(cls, dataset): @@ -279,7 +256,7 @@ def length(cls, dataset): column = dataset.data[col_name] geom_type = cls.geom_type(dataset) if not isinstance(column.dtype, MultiPointDtype) and geom_type != 'Point': - return PandasInterface.length(dataset) + return cls.base_interface.length(dataset) length = 0 for i, geom in enumerate(column): if isinstance(geom, Point): @@ -290,11 +267,11 @@ def length(cls, dataset): @classmethod def nonzero(cls, dataset): - return bool(cls.length(dataset)) + return bool(len(dataset.data.head(1))) @classmethod def redim(cls, dataset, dimensions): - return PandasInterface.redim(dataset, dimensions) + return cls.base_interface.redim(dataset, dimensions) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): @@ -386,13 +363,18 @@ def values(cls, dataset, dimension, expanded=True, flat=True, compute=True, keep if isgeom and keep_index: return data[geom_col] elif not isgeom: + if is_points: + return data[dimension.name].values return get_value_array(data, dimension, expanded, keep_index, geom_col, is_points) elif not len(data): return np.array([]) geom_type = cls.geom_type(dataset) index = geom_dims.index(dimension) - return geom_array_to_array(data[geom_col].values, index, expanded, geom_type) + geom_series = data[geom_col] + if compute and hasattr(geom_series, 'compute'): + geom_series = geom_series.compute() + return geom_array_to_array(geom_series.values, index, expanded, geom_type) @classmethod def split(cls, dataset, start, end, datatype, **kwargs): @@ -444,6 +426,18 @@ def split(cls, dataset, start, end, datatype, **kwargs): objs.append(obj) return objs + @classmethod + def dframe(cls, dataset, dimensions): + if dimensions: + return dataset.data[dimensions] + else: + return dataset.data.copy() + + @classmethod + def as_dframe(cls, dataset): + return dataset.data + + def get_geom_type(gdf, col): @@ -604,7 +598,7 @@ def get_value_array(data, dimension, expanded, keep_index, geom_col, all_scalar = True arrays, scalars = [], [] for i, geom in enumerate(data[geom_col]): - length = geom_length(geom) + length = 1 if is_points else geom_length(geom) val = column.iloc[i] scalar = isscalar(val) if scalar: diff --git a/holoviews/core/data/spatialpandas_dask.py b/holoviews/core/data/spatialpandas_dask.py new file mode 100644 index 0000000000..4e03b72b60 --- /dev/null +++ b/holoviews/core/data/spatialpandas_dask.py @@ -0,0 +1,89 @@ +import sys + +import numpy as np + +from .dask import DaskInterface +from .interface import Interface +from .spatialpandas import SpatialPandasInterface + + +class DaskSpatialPandasInterface(SpatialPandasInterface): + + base_interface = DaskInterface + + datatype = 'dask_spatialpandas' + + @classmethod + def loaded(cls): + return 'spatialpandas.dask' in sys.modules + + @classmethod + def data_types(cls): + from spatialpandas.dask import DaskGeoDataFrame, DaskGeoSeries + return (DaskGeoDataFrame, DaskGeoSeries) + + @classmethod + def series_type(cls): + from spatialpandas.dask import DaskGeoSeries + return DaskGeoSeries + + @classmethod + def frame_type(cls): + from spatialpandas.dask import DaskGeoDataFrame + return DaskGeoDataFrame + + @classmethod + def init(cls, eltype, data, kdims, vdims): + import dask.dataframe as dd + data, dims, params = super().init( + eltype, data, kdims, vdims + ) + if not isinstance(data, cls.frame_type()): + data = dd.from_pandas(data, npartitions=1) + return data, dims, params + + @classmethod + def partition_values(cls, df, dataset, dimension, expanded, flat): + ds = dataset.clone(df, datatype=['spatialpandas']) + return ds.interface.values(ds, dimension, expanded, flat) + + @classmethod + def values(cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False): + if compute and not keep_index: + dtype = cls.dtype(dataset, dimension) + meta = np.array([], dtype=dtype.base) + return dataset.data.map_partitions( + cls.partition_values, meta=meta, dataset=dataset, + dimension=dimension, expanded=expanded, flat=flat + ).compute() + values = super().values( + dataset, dimension, expanded, flat, compute, keep_index + ) + if compute and not keep_index and hasattr(values, 'compute'): + return values.compute() + return values + + @classmethod + def split(cls, dataset, start, end, datatype, **kwargs): + ds = dataset.clone(dataset.data.compute(), datatype=['spatialpandas']) + return ds.interface.split(ds, start, end, datatype, **kwargs) + + @classmethod + def iloc(cls, dataset, index): + rows, cols = index + if rows is not None: + raise NotImplementedError + return super().iloc(dataset, index) + + @classmethod + def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): + return cls.base_interface.add_dimension(dataset, dimension, dim_pos, values, vdim) + + @classmethod + def dframe(cls, dataset, dimensions): + if dimensions: + return dataset.data[dimensions].compute() + else: + return dataset.data.compute() + +Interface.register(DaskSpatialPandasInterface) diff --git a/holoviews/core/data/util.py b/holoviews/core/data/util.py new file mode 100644 index 0000000000..bdee313137 --- /dev/null +++ b/holoviews/core/data/util.py @@ -0,0 +1,69 @@ +import numpy as np + +from .. import util + + +def finite_range(column, cmin, cmax): + try: + min_inf = np.isinf(cmin) + except TypeError: + min_inf = False + try: + max_inf = np.isinf(cmax) + except TypeError: + max_inf = False + if (min_inf or max_inf): + column = column[np.isfinite(column)] + if len(column): + cmin = np.nanmin(column) if min_inf else cmin + cmax = np.nanmax(column) if max_inf else cmax + if is_dask(column): + import dask.array as da + if min_inf and max_inf: + cmin, cmax = da.compute(cmin, cmax) + elif min_inf: + cmin = cmin.compute() + else: + cmax = cmax.compute() + else: + return cmin, cmax + if isinstance(cmin, np.ndarray) and cmin.shape == (): + cmin = cmin[()] + if isinstance(cmax, np.ndarray) and cmax.shape == (): + cmax = cmax[()] + cmin = cmin if np.isscalar(cmin) or isinstance(cmin, util.datetime_types) else cmin.item() + cmax = cmax if np.isscalar(cmax) or isinstance(cmax, util.datetime_types) else cmax.item() + return cmin, cmax + +def get_array_types(): + array_types = (np.ndarray,) + da = dask_array_module() + if da is not None: + array_types += (da.Array,) + return array_types + +def dask_array_module(): + try: + import dask.array as da + return da + except: + return None + +def is_dask(array): + da = dask_array_module() + if da is None: + return False + return da and isinstance(array, da.Array) + +def cached(method): + """ + Decorates an Interface method and using a cached version + """ + def cached(*args, **kwargs): + cache = getattr(args[1], '_cached') + if cache is None: + return method(*args, **kwargs) + else: + args = (cache,)+args[2:] + return getattr(cache.interface, method.__name__)(*args, **kwargs) + return cached diff --git a/holoviews/core/data/xarray.py b/holoviews/core/data/xarray.py index de69df791f..b5f01a3046 100644 --- a/holoviews/core/data/xarray.py +++ b/holoviews/core/data/xarray.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import sys import types @@ -12,14 +10,15 @@ from ..ndmapping import NdMapping, item_check, sorted_context from ..element import Element from .grid import GridInterface -from .interface import Interface, DataError, dask_array_module +from .interface import Interface, DataError +from .util import dask_array_module, finite_range def is_cupy(array): if 'cupy' not in sys.modules: return False from cupy import ndarray - return isinstance(array, ndarray) + return isinstance(array, ndarray) class XArrayInterface(GridInterface): @@ -82,11 +81,14 @@ def retrieve_unit_and_label(dim): dim = asdim(dim) coord = data[dim.name] unit = coord.attrs.get('units') if dim.unit is None else dim.unit + if isinstance(unit, tuple): + unit = unit[0] if 'long_name' in coord.attrs: spec = (dim.name, coord.attrs['long_name']) else: spec = (dim.name, dim.label) - return dim.clone(spec, unit=unit) + nodata = coord.attrs.get('NODATA') + return dim.clone(spec, unit=unit, nodata=nodata) packed = False if isinstance(data, xr.DataArray): @@ -99,8 +101,9 @@ def retrieve_unit_and_label(dim): elif data.name: vdim = Dimension(data.name) vdim.unit = data.attrs.get('units') + vdim.nodata = data.attrs.get('NODATA') label = data.attrs.get('long_name') - if label is not None: + if 'long_name' in data.attrs: vdim.label = label elif len(vdim_param.default) == 1: vdim = vdim_param.default[0] @@ -246,22 +249,27 @@ def persist(cls, dataset): @classmethod def range(cls, dataset, dimension): - dim = dataset.get_dimension(dimension, strict=True).name - if dataset._binned and dimension in dataset.kdims: + dimension = dataset.get_dimension(dimension, strict=True) + dim = dimension.name + edges = dataset._binned and dimension in dataset.kdims + if edges: data = cls.coords(dataset, dim, edges=True) - if data.dtype.kind == 'M': - dmin, dmax = data.min(), data.max() - else: - dmin, dmax = np.nanmin(data), np.nanmax(data) else: if cls.packed(dataset) and dim in dataset.vdims: data = dataset.data.values[..., dataset.vdims.index(dim)] else: data = dataset.data[dim] - if len(data): - dmin, dmax = data.min().data, data.max().data - else: - dmin, dmax = np.NaN, np.NaN + if dimension.nodata is not None: + data = cls.replace_value(data, dimension.nodata) + + if not len(data): + dmin, dmax = np.NaN, np.NaN + elif data.dtype.kind == 'M' or not edges: + dmin, dmax = data.min(), data.max() + if not edges: + dmin, dmax = dmin.data, dmax.data + else: + dmin, dmax = np.nanmin(data), np.nanmax(data) da = dask_array_module() if da and isinstance(dmin, da.Array): @@ -272,7 +280,7 @@ def range(cls, dataset, dimension): dmax = dmax[()] dmin = dmin if np.isscalar(dmin) or isinstance(dmin, util.datetime_types) else dmin.item() dmax = dmax if np.isscalar(dmax) or isinstance(dmax, util.datetime_types) else dmax.item() - return dmin, dmax + return finite_range(data, dmin, dmax) @classmethod @@ -405,9 +413,10 @@ def unpack_scalar(cls, dataset, data): Given a dataset object and data in the appropriate format for the interface, return a simple scalar. """ - if (not cls.packed(dataset) and len(data.data_vars) == 1 and - len(data[dataset.vdims[0].name].shape) == 0): - return data[dataset.vdims[0].name].item() + if not cls.packed(dataset) and len(data.data_vars) == 1: + array = data[dataset.vdims[0].name].squeeze() + if len(array.shape) == 0: + return array.item() return data @@ -528,6 +537,9 @@ def mask(cls, dataset, mask, mask_val=np.nan): @classmethod def select(cls, dataset, selection_mask=None, **selection): + if selection_mask is not None: + return dataset.data.where(selection_mask, drop=True) + validated = {} for k, v in selection.items(): dim = dataset.get_dimension(k, strict=True) @@ -658,7 +670,7 @@ def assign(cls, dataset, new_data): data = data.assign(vars) used_coords = set.intersection(*[set(var.coords) for var in data.data_vars.values()]) drop_coords = set.symmetric_difference(used_coords, prev_coords) - return data.drop([c for c in drop_coords if c in data.coords]), list(drop_coords) + return data.drop_vars([c for c in drop_coords if c in data.coords]), list(drop_coords) Interface.register(XArrayInterface) diff --git a/holoviews/core/dimension.py b/holoviews/core/dimension.py index db4d95c104..9d9cdeb45b 100644 --- a/holoviews/core/dimension.py +++ b/holoviews/core/dimension.py @@ -3,8 +3,6 @@ axis or map dimension. Also supplies the Dimensioned abstract baseclass for classes that accept Dimension values. """ -from __future__ import unicode_literals - import re import datetime as dt import weakref @@ -22,7 +20,7 @@ from .options import Store, Options, cleanup_custom_options from .pprint import PrettyPrinter from .tree import AttrTree -from .util import basestring, OrderedDict, bytes_to_unicode, unicode +from .util import OrderedDict, bytes_to_unicode # Alias parameter support for pickle loading @@ -60,7 +58,7 @@ def asdim(dimension): """ if isinstance(dimension, Dimension): return dimension - elif isinstance(dimension, (tuple, dict, basestring)): + elif isinstance(dimension, (tuple, dict, str)): return Dimension(dimension) else: raise ValueError('%s type could not be interpreted as Dimension. ' @@ -79,7 +77,7 @@ def dimension_name(dimension): """ if isinstance(dimension, Dimension): return dimension.name - elif isinstance(dimension, basestring): + elif isinstance(dimension, str): return dimension elif isinstance(dimension, tuple): return dimension[0] @@ -113,7 +111,7 @@ def process_dimensions(kdims, vdims): for group, dims in [('kdims', kdims), ('vdims', vdims)]: if dims is None: continue - elif isinstance(dims, (tuple, basestring, Dimension, dict)): + elif isinstance(dims, (tuple, str, Dimension, dict)): dims = [dims] elif not isinstance(dims, list): raise ValueError("%s argument expects a Dimension or list of dimensions, " @@ -121,7 +119,7 @@ def process_dimensions(kdims, vdims): "instances, not a %s type. Ensure you passed the data as the " "first argument." % (group, type(dims).__name__)) for dim in dims: - if not isinstance(dim, (tuple, basestring, Dimension, dict)): + if not isinstance(dim, (tuple, str, Dimension, dict)): raise ValueError('Dimensions must be defined as a tuple, ' 'string, dictionary or Dimension instance, ' 'found a %s type.' % type(dim).__name__) @@ -191,8 +189,13 @@ class Dimension(param.Parameterized): maximum allowed value (defined by the range parameter) is continuous with the minimum allowed value.""") - value_format = param.Callable(default=None, doc=""" - Formatting function applied to each value before display.""") + default = param.Parameter(default=None, doc=""" + Default value of the Dimension which may be useful for widget + or other situations that require an initial or default value.""") + + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN.""") range = param.Tuple(default=(None, None), doc=""" Specifies the minimum and maximum allowed values for a @@ -202,25 +205,24 @@ class Dimension(param.Parameterized): Specifies a minimum and maximum reference value, which may be overridden by the data.""") - type = param.Parameter(default=None, doc=""" - Optional type associated with the Dimension values. The type - may be an inbuilt constructor (such as int, str, float) or a - custom class object.""") - - default = param.Parameter(default=None, doc=""" - Default value of the Dimension which may be useful for widget - or other situations that require an initial or default value.""") - step = param.Number(default=None, doc=""" Optional floating point step specifying how frequently the underlying space should be sampled. May be used to define a discrete sampling over the range.""") + type = param.Parameter(default=None, doc=""" + Optional type associated with the Dimension values. The type + may be an inbuilt constructor (such as int, str, float) or a + custom class object.""") + unit = param.String(default=None, allow_None=True, doc=""" Optional unit string associated with the Dimension. For instance, the string 'm' may be used represent units of meters and 's' to represent units of seconds.""") + value_format = param.Callable(default=None, doc=""" + Formatting function applied to each value before display.""") + values = param.List(default=[], doc=""" Optional specification of the allowed value set for the dimension that may also be used to retain a categorical @@ -255,7 +257,7 @@ def __init__(self, spec, **params): all_params = dict(existing_params, **params) if isinstance(spec, tuple): - if not all(isinstance(s, basestring) for s in spec) or len(spec) != 2: + if not all(isinstance(s, str) for s in spec) or len(spec) != 2: raise ValueError("Dimensions specified as a tuple must be a tuple " "consisting of the name and label not: %s" % str(spec)) name, label = spec @@ -267,7 +269,7 @@ def __init__(self, spec, **params): 'Using label as supplied by keyword ({!r}), ignoring ' 'tuple value {!r}'.format(params['label'], label)) all_params['label'] = params['label'] - elif isinstance(spec, basestring): + elif isinstance(spec, str): all_params['name'] = spec all_params['label'] = params.get('label', spec) @@ -277,13 +279,13 @@ def __init__(self, spec, **params): raise ValueError('Dimension label cannot be None or the empty string') values = params.get('values', []) - if isinstance(values, basestring) and values == 'initial': + if isinstance(values, str) and values == 'initial': self.param.warning("The 'initial' string for dimension values " "is no longer supported.") values = [] all_params['values'] = list(util.unique_array(values)) - super(Dimension, self).__init__(**all_params) + super().__init__(**all_params) if self.default is not None: if self.values and self.default not in values: raise ValueError('%r default %s not found in declared values: %s' % @@ -294,7 +296,6 @@ def __init__(self, spec, **params): raise ValueError('%r default %s not in declared range: %s' % (self, self.default, self.range)) - @property def spec(self): """"Returns the Dimensions tuple specification @@ -304,13 +305,6 @@ def spec(self): """ return (self.name, self.label) - - def __call__(self, spec=None, **overrides): - self.param.warning('Dimension.__call__ method has been deprecated, ' - 'use the clone method instead.') - return self.clone(spec=spec, **overrides) - - def clone(self, spec=None, **overrides): """Clones the Dimension with new parameters @@ -328,7 +322,7 @@ def clone(self, spec=None, **overrides): if spec is None: spec = (self.name, overrides.get('label', self.label)) - if 'label' in overrides and isinstance(spec, basestring) : + if 'label' in overrides and isinstance(spec, str) : spec = (spec, overrides['label']) elif 'label' in overrides and isinstance(spec, tuple) : if overrides['label'] != spec[1]: @@ -348,8 +342,9 @@ def __setstate__(self, d): """ Compatibility for pickles before alias attribute was introduced. """ - super(Dimension, self).__setstate__(d) - self.label = self.name + super().__setstate__(d) + if '_label_param_value' not in d: + self.label = self.name def __eq__(self, other): "Implements equals operator including sanitized comparison." @@ -410,7 +405,7 @@ def pprint_value(self, value, print_unit=False): if formatter: if callable(formatter): formatted_value = formatter(value) - elif isinstance(formatter, basestring): + elif isinstance(formatter, str): if isinstance(value, (dt.datetime, dt.date)): formatted_value = value.strftime(formatter) elif isinstance(value, np.datetime64): @@ -420,7 +415,7 @@ def pprint_value(self, value, print_unit=False): else: formatted_value = formatter % value else: - formatted_value = unicode(bytes_to_unicode(value)) + formatted_value = bytes_to_unicode(value) if print_unit and self.unit is not None: formatted_value = formatted_value + ' ' + bytes_to_unicode(self.unit) @@ -505,7 +500,7 @@ def __init__(self, data, id=None, plot_id=None, **params): util.group_sanitizer.add_aliases(**{alias:long_name}) params['group'] = long_name - super(LabelledData, self).__init__(**params) + super().__init__(**params) if not util.group_sanitizer.allowable(self.group): raise ValueError("Supplied group %r contains invalid characters." % self.group) @@ -756,7 +751,7 @@ def __setstate__(self, d): self.param.warning("Could not unpickle custom style information.") d['_id'] = opts_id self.__dict__.update(d) - super(LabelledData, self).__setstate__({}) + super().__setstate__({}) class Dimensioned(LabelledData): @@ -846,7 +841,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): if 'cdims' in params: params['cdims'] = {d if isinstance(d, Dimension) else Dimension(d): val for d, val in params['cdims'].items()} - super(Dimensioned, self).__init__(data, **params) + super().__init__(data, **params) self.ndims = len(self.kdims) cdims = [(d.name, val) for d, val in self.cdims.items()] self._cached_constants = OrderedDict(cdims) @@ -953,7 +948,7 @@ def get_dimension(self, dimension, default=None, strict=False): Returns: Dimension object for the requested dimension or default """ - if dimension is not None and not isinstance(dimension, (int, basestring, Dimension)): + if dimension is not None and not isinstance(dimension, (int, str, Dimension)): raise TypeError('Dimension lookup supports int, string, ' 'and Dimension instances, cannot lookup ' 'Dimensions using %s type.' % type(dimension).__name__) @@ -1200,8 +1195,6 @@ def range(self, dimension, data_range=True, dimension_range=True): if not dimension_range: return lower, upper return util.dimension_range(lower, upper, dimension.range, dimension.soft_range) - - def __repr__(self): return PrettyPrinter.pprint(self) @@ -1209,18 +1202,7 @@ def __str__(self): return repr(self) def __unicode__(self): - return unicode(PrettyPrinter.pprint(self)) - - def __call__(self, options=None, **kwargs): - self.param.warning( - 'Use of __call__ to set options will be deprecated ' - 'in the next major release (1.14.0). Use the equivalent .opts ' - 'method instead.') - - if not kwargs and options is None: - return self.opts.clear() - - return self.opts(options, **kwargs) + return PrettyPrinter.pprint(self) def options(self, *args, **kwargs): """Applies simplified option definition returning a new object. @@ -1265,7 +1247,7 @@ def options(self, *args, **kwargs): if len(args) == 0 and len(kwargs)==0: options = None - elif args and isinstance(args[0], basestring): + elif args and isinstance(args[0], str): options = {args[0]: kwargs} elif args and isinstance(args[0], list): if kwargs: @@ -1350,17 +1332,6 @@ def __init__(self, items=None, identifier=None, parent=None, **kwargs): AttrTree.__init__(self, items, identifier, parent, **kwargs) Dimensioned.__init__(self, self.data, **params) - - @classmethod - def from_values(cls, vals): - "Deprecated method to construct tree from list of objects" - name = cls.__name__ - param.main.param.warning("%s.from_values is deprecated, the %s " - "constructor may now be used directly." - % (name, name)) - return cls(items=cls._process_items(vals)) - - @classmethod def _process_items(cls, vals): "Processes list of items assigning unique paths to each." @@ -1460,7 +1431,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): vals = np.concatenate(values) return vals if expanded else util.unique_array(vals) else: - return super(ViewableTree, self).dimension_values( + return super().dimension_values( dimension, expanded, flat) def __len__(self): diff --git a/holoviews/core/element.py b/holoviews/core/element.py index fc9f4d5dae..85cb269d28 100644 --- a/holoviews/core/element.py +++ b/holoviews/core/element.py @@ -85,29 +85,16 @@ def __nonzero__(self): """ return True - def __contains__(self, dimension): "Whether element contains the Dimension" return dimension in self.dimensions() - def __iter__(self): "Disable iterator interface." raise NotImplementedError('Iteration on Elements is not supported.') - __bool__ = __nonzero__ - - @classmethod - def collapse_data(cls, data, function=None, kdims=None, **kwargs): - """ - Deprecated method to perform collapse operations, which may - now be performed through concatenation and aggregation. - """ - raise NotImplementedError("Collapsing not implemented for %s." % cls.__name__) - - def closest(self, coords, **kwargs): """Snap list or dict of coordinates to closest position. @@ -123,7 +110,6 @@ def closest(self, coords, **kwargs): """ raise NotImplementedError - def sample(self, samples=[], bounds=None, closest=False, **sample_values): """Samples values at supplied coordinates. @@ -259,48 +245,6 @@ def array(self, dimensions=None): return np.column_stack(columns) - ###################### - # Deprecations # - ###################### - - def table(self, datatype=None): - "Deprecated method to convert any Element to a Table." - self.param.warning( - "The table method is deprecated and should no " - "longer be used. Instead cast the %s to a " - "a Table directly." % type(self).__name__) - - if datatype and not isinstance(datatype, list): - datatype = [datatype] - from ..element import Table - return Table(self, **(dict(datatype=datatype) if datatype else {})) - - - def mapping(self, kdims=None, vdims=None, **kwargs): - "Deprecated method to convert data to dictionary" - self.param.warning( - "The mapping method is deprecated and should no " - "longer be used. Use another one of the common " - "formats instead, e.g. .dframe, .array or .columns.") - - length = len(self) - if not kdims: kdims = self.kdims - if kdims: - keys = zip(*[self.dimension_values(dim.name) - for dim in self.kdims]) - else: - keys = [()]*length - - if not vdims: vdims = self.vdims - if vdims: - values = zip(*[self.dimension_values(dim.name) - for dim in vdims]) - else: - values = [()]*length - return OrderedDict(zip(keys, values)) - - - class Tabular(Element): """ Baseclass to give an elements providing an API to generate a @@ -438,7 +382,7 @@ def __init__(self, data=None, **params): if 'vdims' not in params: params['vdims'] = data.vdims data = data.mapping() - super(Collator, self).__init__(data, **params) + super().__init__(data, **params) def __call__(self): diff --git a/holoviews/core/io.py b/holoviews/core/io.py index a95d0d06c3..89d94bca25 100644 --- a/holoviews/core/io.py +++ b/holoviews/core/io.py @@ -12,8 +12,6 @@ objects for a report then generating a PDF or collecting HoloViews objects to dump to HDF5. """ -from __future__ import absolute_import - import re, os, time, string, zipfile, tarfile, shutil, itertools, pickle from collections import defaultdict @@ -82,7 +80,7 @@ class Exporter(param.ParameterizedFunction): Pickling: Native Python, supported by HoloViews. Rendering: Any plotting backend may be used (default uses matplotlib) - Storage: Saving to a database (e.g SQL), HDF5 etc. + Storage: Saving to a database (e.g. SQL), HDF5 etc. """ # Mime-types that need encoding as utf-8 upon export @@ -177,7 +175,7 @@ class Importer(param.ParameterizedFunction): Unpickling: Native Python, supported by HoloViews. Servers: Loading data over a network connection. - Storage: Loading from a database (e.g SQL), HDF5 etc. + Storage: Loading from a database (e.g. SQL), HDF5 etc. """ def __call__(self, data): @@ -575,7 +573,7 @@ class FileArchive(Archive): export_name = param.String(default='{timestamp}', doc=""" The name assigned to the overall export. If an archive file is - used, this is the correspond filename (e.g of the exporter zip + used, this is the correspond filename (e.g. of the exporter zip file). Alternatively, if unpack=False, this is the name of the output directory. Lastly, for archives of a single file, this is the basename of the output file. @@ -612,7 +610,7 @@ def parse_fields(cls, formatter): raise SyntaxError("Could not parse formatter %r" % formatter) def __init__(self, **params): - super(FileArchive, self).__init__(**params) + super().__init__(**params) # Items with key: (basename,ext) and value: (data, info) self._files = OrderedDict() self._validate_formatters() @@ -859,5 +857,5 @@ def clear(self): "Clears the file archive" self._files.clear() - + diff --git a/holoviews/core/layout.py b/holoviews/core/layout.py index aa24b2335c..39d5ae77e9 100644 --- a/holoviews/core/layout.py +++ b/holoviews/core/layout.py @@ -23,6 +23,13 @@ def __add__(self, obj): "Compose objects into a Layout" return Layout([self, obj]) + def __radd__(self, other): + if isinstance(other, int): + raise TypeError("unsupported operand type(s) for +: 'int' and 'Overlay'. " + "If you are trying to use a reduction like `sum(elements)` " + "to combine a list of elements, we recommend you use " + "`Layout(elements)` (and similarly `Overlay(elements)` for " + "making an overlay from a list) instead.") def __lshift__(self, other): "Compose objects into an AdjointLayout" @@ -46,7 +53,7 @@ class Empty(Dimensioned, Composable): group = param.String(default='Empty') def __init__(self): - super(Empty, self).__init__(None) + super().__init__(None) @@ -91,7 +98,7 @@ def __init__(self, data, **params): else: data = OrderedDict() - super(AdjointLayout, self).__init__(data, **params) + super().__init__(data, **params) def __mul__(self, other, reverse=False): @@ -180,7 +187,7 @@ def relabel(self, label=None, group=None, depth=1): Returns: Returns relabelled object """ - return super(AdjointLayout, self).relabel(label=label, group=group, depth=depth) + return super().relabel(label=label, group=group, depth=depth) def get(self, key, default=None): @@ -309,6 +316,14 @@ def __add__(self, obj): "Composes plot into a Layout with another object." return Layout([self, obj]) + def __radd__(self, other): + if isinstance(other, int): + raise TypeError("unsupported operand type(s) for +: 'int' and 'Overlay'. " + "If you are trying to use a reduction like `sum(elements)` " + "to combine a list of elements, we recommend you use " + "`Layout(elements)` (and similarly `Overlay(elements)` for " + "making an overlay from a list) instead.") + return super().__radd__(self, other) def __len__(self): "Number of items in the AdjointLayout" @@ -329,8 +344,8 @@ class NdLayout(UniformNdMapping): def __init__(self, initial_items=None, kdims=None, **params): self._max_cols = 4 self._style = None - super(NdLayout, self).__init__(initial_items=initial_items, kdims=kdims, - **params) + super().__init__(initial_items=initial_items, kdims=kdims, + **params) @property @@ -379,6 +394,15 @@ def __add__(self, obj): return Layout([self, obj]) + def __radd__(self, other): + if isinstance(other, int): + raise TypeError("unsupported operand type(s) for +: 'int' and 'Overlay'. " + "If you are trying to use a reduction like `sum(elements)` " + "to combine a list of elements, we recommend you use " + "`Layout(elements)` (and similarly `Overlay(elements)` for " + "making an overlay from a list) instead.") + return super().__radd__(self, other) + @property def last(self): """ @@ -410,7 +434,7 @@ def clone(self, *args, **overrides): Returns: Cloned NdLayout object """ - clone = super(NdLayout, self).clone(*args, **overrides) + clone = super().clone(*args, **overrides) clone._max_cols = self._max_cols clone.id = self.id return clone @@ -434,7 +458,7 @@ class Layout(ViewableTree): def __init__(self, items=None, identifier=None, parent=None, **kwargs): self.__dict__['_max_cols'] = 4 - super(Layout, self).__init__(items, identifier, parent, **kwargs) + super().__init__(items, identifier, parent, **kwargs) def decollate(self): """Packs Layout of DynamicMaps into a single DynamicMap that returns a Layout @@ -482,7 +506,7 @@ def __getitem__(self, key): if idx >= len(keys) or col >= self._max_cols: raise KeyError('Index %s is outside available item range' % str(key)) key = keys[idx] - return super(Layout, self).__getitem__(key) + return super().__getitem__(key) def clone(self, *args, **overrides): @@ -498,7 +522,7 @@ def clone(self, *args, **overrides): Returns: Cloned Layout object """ - clone = super(Layout, self).clone(*args, **overrides) + clone = super().clone(*args, **overrides) clone._max_cols = self._max_cols return clone @@ -531,7 +555,7 @@ def relabel(self, label=None, group=None, depth=1): Returns: Returns relabelled object """ - return super(Layout, self).relabel(label, group, depth) + return super().relabel(label, group, depth) def grid_items(self): return {tuple(np.unravel_index(idx, self.shape)): (path, item) diff --git a/holoviews/core/ndmapping.py b/holoviews/core/ndmapping.py index 55455f21e6..56d26d669d 100644 --- a/holoviews/core/ndmapping.py +++ b/holoviews/core/ndmapping.py @@ -12,8 +12,10 @@ from . import util from .dimension import OrderedDict, Dimension, Dimensioned, ViewableElement, asdim -from .util import (unique_iterator, sanitize_identifier, dimension_sort, - basestring, wrap_tuple, process_ellipses, get_ndmapping_label) +from .util import ( + unique_iterator, sanitize_identifier, dimension_sort, wrap_tuple, + process_ellipses, get_ndmapping_label +) class item_check(object): """ @@ -99,7 +101,7 @@ def __init__(self, initial_items=None, kdims=None, **params): params = dict(util.get_param_values(initial_items), **dict(params)) if kdims is not None: params['kdims'] = kdims - super(MultiDimensionalMapping, self).__init__(OrderedDict(), **dict(params)) + super().__init__(OrderedDict(), **dict(params)) if type(initial_items) is dict and not self.sort: raise ValueError('If sort=False the data must define a fixed ' 'ordering, please supply a list of items or ' @@ -266,8 +268,7 @@ def clone(self, data=None, shared_data=True, *args, **overrides): Cloned object """ with item_check(not shared_data and self._check_items): - return super(MultiDimensionalMapping, self).clone(data, shared_data, - *args, **overrides) + return super().clone(data, shared_data, *args, **overrides) def groupby(self, dimensions, container_type=None, group_type=None, **kwargs): @@ -337,7 +338,7 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): dims.insert(dim_pos, dimension) dimensions = dict(kdims=dims) - if isinstance(dim_val, basestring) or not hasattr(dim_val, '__iter__'): + if isinstance(dim_val, str) or not hasattr(dim_val, '__iter__'): dim_val = cycle([dim_val]) else: if not len(dim_val) == len(self): @@ -579,47 +580,6 @@ def __contains__(self, key): def __len__(self): return len(self.data) - ###################### - # Deprecations # - ###################### - - def table(self, datatype=None, **kwargs): - """ - Deprecated method to convert an MultiDimensionalMapping of - Elements to a Table. - """ - self.param.warning("The table method is deprecated and should no " - "longer be used. If using a HoloMap use " - "HoloMap.collapse() instead to return a Dataset.") - - from .data.interface import Interface - from ..element.tabular import Table - new_data = [(key, value.table(datatype=datatype, **kwargs)) - for key, value in self.data.items()] - tables = self.clone(new_data) - return Interface.concatenate(tables, new_type=Table) - - - def dframe(self): - """ - Deprecated method to convert a MultiDimensionalMapping to - a pandas DataFrame. Conversion to a dataframe now only - supported by specific subclasses such as UniformNdMapping - types. - """ - self.param.warning("The MultiDimensionalMapping.dframe method is " - "deprecated and should no longer be used. " - "Use a more specific subclass which does support " - "the dframe method instead, e.g. a HoloMap.") - try: - import pandas - except ImportError: - raise Exception("Cannot build a DataFrame without the pandas library.") - labels = self.dimensions('key', True) + [self.group] - return pandas.DataFrame( - [dict(zip(labels, k + (v,))) for (k, v) in self.data.items()]) - - class NdMapping(MultiDimensionalMapping): """ @@ -646,9 +606,9 @@ def __getitem__(self, indexslice): raise IndexError("Boolean index must match length of sliced object") selection = zip(indexslice, self.data.items()) return self.clone([item for c, item in selection if c]) - elif indexslice == () and not self.kdims: + elif isinstance(indexslice, tuple) and indexslice == () and not self.kdims: return self.data[()] - elif indexslice in [Ellipsis, ()]: + elif (isinstance(indexslice, tuple) and indexslice == ()) or indexslice is Ellipsis: return self elif any(Ellipsis is sl for sl in wrap_tuple(indexslice)): indexslice = process_ellipses(self, indexslice) @@ -808,7 +768,7 @@ def __init__(self, initial_items=None, kdims=None, group=None, label=None, **par self._type = None self._group_check, self.group = None, group self._label_check, self.label = None, label - super(UniformNdMapping, self).__init__(initial_items, kdims=kdims, **params) + super().__init__(initial_items, kdims=kdims, **params) def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): @@ -1015,7 +975,7 @@ def _item_check(self, dim_vals, data): elif self.type is not None and (type(data) != self.type): raise AssertionError("%s must only contain one type of object, not both %s and %s." % (self.__class__.__name__, type(data).__name__, self.type.__name__)) - super(UniformNdMapping, self)._item_check(dim_vals, data) + super()._item_check(dim_vals, data) def __mul__(self, other, reverse=False): diff --git a/holoviews/core/operation.py b/holoviews/core/operation.py index 9622b83024..e8b63e7edc 100644 --- a/holoviews/core/operation.py +++ b/holoviews/core/operation.py @@ -3,9 +3,11 @@ the purposes of analysis or visualization. """ import param + from .dimension import ViewableElement from .element import Element from .layout import Layout +from .options import Store from .overlay import NdOverlay, Overlay from .spaces import Callable, HoloMap from . import util, Dataset @@ -57,7 +59,7 @@ class Operation(param.ParameterizedFunction): visualization should update this stream with range changes originating from the newly generated axes.""") - streams = param.List(default=[], doc=""" + streams = param.ClassSelector(default=[], class_=(dict, list), doc=""" List of streams that are applied if dynamic=True, allowing for dynamic interaction with the plot.""") @@ -77,6 +79,9 @@ class Operation(param.ParameterizedFunction): # the input of the operation to the result _propagate_dataset = True + # Options to transfer from the input element to the transformed element + _transfer_options = [] + @classmethod def search(cls, element, pattern): """ @@ -127,7 +132,6 @@ def _apply(self, element, key=None): for hook in self._preprocess_hooks: kwargs.update(hook(self, element)) - element_pipeline = getattr(element, '_pipeline', None) if hasattr(element, '_in_method'): @@ -138,6 +142,12 @@ def _apply(self, element, key=None): if hasattr(element, '_in_method') and not in_method: element._in_method = in_method + if self._transfer_options: + for backend in Store.loaded_backends(): + Store.transfer_options( + element, ret, backend, self._transfer_options, level=1 + ) + for hook in self._postprocess_hooks: ret = hook(self, ret, **kwargs) @@ -168,6 +178,9 @@ def process_element(self, element, key, **params): The process_element method allows a single element to be operated on given an externally supplied key. """ + if self._per_element and not isinstance(element, Element): + return element.clone({k: self.process_element(el, key, **params) + for k, el in element.items()}) if hasattr(self, 'p'): if self._allow_extra_keywords: extras = self.p._extract_extra_keywords(params) @@ -203,6 +216,7 @@ def __call__(self, element, **kwargs): kwargs['streams'] = self.p.streams kwargs['per_element'] = self._per_element kwargs['link_dataset'] = self._propagate_dataset + kwargs['link_inputs'] = self.p.link_inputs return element.apply(self, **kwargs) @@ -220,4 +234,4 @@ class OperationCallable(Callable): def __init__(self, callable, **kwargs): if 'operation' not in kwargs: raise ValueError('An OperationCallable must have an operation specified') - super(OperationCallable, self).__init__(callable, **kwargs) + super().__init__(callable, **kwargs) diff --git a/holoviews/core/options.py b/holoviews/core/options.py index 8dde88f9d9..57d5435efd 100644 --- a/holoviews/core/options.py +++ b/holoviews/core/options.py @@ -9,7 +9,7 @@ Cycle: Used to define infinite cycles over a finite set of elements, using - either an explicit list or some pre-defined collection (e.g from + either an explicit list or some pre-defined collection (e.g. from matplotlib rcParams). For instance, a Cycle object can be used loop a set of display colors for multiple curves on a single axis. @@ -32,19 +32,22 @@ extension together. """ -import pickle -import traceback import difflib import inspect +import pickle +import traceback + from contextlib import contextmanager from collections import defaultdict import numpy as np - import param + from .accessors import Opts # noqa (clean up in 2.0) from .tree import AttrTree -from .util import sanitize_identifier, group_sanitizer,label_sanitizer, basestring, OrderedDict +from .util import ( + OrderedDict, group_sanitizer, label_sanitizer, sanitize_identifier +) from .pprint import InfoPrinter @@ -74,14 +77,16 @@ def cleanup_custom_options(id, weakref=None): if not weakrefs: Store._weakrefs.pop(id, None) except Exception as e: - raise Exception('Cleanup of custom options tree with id %s ' - 'failed with the following exception: %s, ' - 'an unreferenced orphan tree may persist in ' - 'memory' % (e, id)) + raise Exception( + f"Cleanup of custom options tree with id '{id}' failed " + f"with the following exception: {e}, an unreferenced " + "orphan tree may persist in memory." + ) + def lookup_options(obj, group, backend): """ - Given a HoloViews object, a plot option group (e.g 'style') and + Given a HoloViews object, a plot option group (e.g. 'style') and backend, return the corresponding Options object. """ plot_class = None @@ -100,7 +105,6 @@ def lookup_options(obj, group, backend): return node - class CallbackError(RuntimeError): """ An error raised during a callback. @@ -115,8 +119,7 @@ class SkipRendering(Exception): """ def __init__(self, message="", warn=True): self.warn = warn - super(SkipRendering, self).__init__(message) - + super().__init__(message) class OptionError(Exception): @@ -126,23 +129,24 @@ class OptionError(Exception): readable message for the user if caught and processed appropriately. """ + def __init__(self, invalid_keyword, allowed_keywords, group_name=None, path=None): - super(OptionError, self).__init__(self.message(invalid_keyword, - allowed_keywords, - group_name, path)) + super().__init__(self.message(invalid_keyword, + allowed_keywords, + group_name, path)) self.invalid_keyword = invalid_keyword self.allowed_keywords = allowed_keywords self.group_name =group_name self.path = path - def message(self, invalid_keyword, allowed_keywords, group_name, path): - msg = ("Invalid option %s, valid options are: %s" - % (repr(invalid_keyword), str(allowed_keywords))) + msg = ( + f"Invalid option {invalid_keyword!r}, valid options are: " + f"{allowed_keywords}." + ) if path and group_name: - msg = ("Invalid key for group %r on path %r;\n" - % (group_name, path)) + msg + msg = f"Invalid key for group {group_name!r} on path {path};\n{msg}" return msg def format_options_error(self): @@ -159,30 +163,23 @@ def format_options_error(self): similarity = 'Similar' loaded_backends = Store.loaded_backends() - target = 'for {0}'.format(target) if target else '' + target = f'for {target}' if target else '' if len(loaded_backends) == 1: - loaded=' in loaded backend {0!r}'.format(loaded_backends[0]) + loaded = f' in loaded backend {loaded_backends[0]!r}' else: - backend_list = ', '.join(['%r'% b for b in loaded_backends[:-1]]) - loaded=' in loaded backends {0} and {1!r}'.format(backend_list, - loaded_backends[-1]) - - suggestion = ("If you believe this keyword is correct, please make sure " - "the backend has been imported or loaded with the " - "hv.extension.") - - group = '{0} option'.format(self.group_name) if self.group_name else 'keyword' - msg=('Unexpected {group} {kw} {target}{loaded}.\n\n' - '{similarity} keywords in the currently active ' - '{current_backend} renderer are: {matches}\n\n{suggestion}') - return msg.format(kw="'%s'" % self.invalid_keyword, - target=target, - group=group, - loaded=loaded, similarity=similarity, - current_backend=repr(Store.current_backend), - matches=matches, - suggestion=suggestion) + backend_list = ', '.join([repr(b) for b in loaded_backends[:-1]]) + loaded = f' in loaded backends {backend_list} and {loaded_backends[-1]!r}' + + group = f'{self.group_name} option' if self.group_name else 'keyword' + return ( + f"Unexpected {group} '{self.invalid_keyword}' {target}" + f"{loaded}.\n\n{similarity} keywords in the currently " + f"active '{Store.current_backend}' renderer are: " + f"{matches}\n\nIf you believe this keyword is correct, " + "please make sure the backend has been imported or loaded " + "with the hv.extension." + ) class AbbreviatedException(Exception): @@ -201,10 +198,11 @@ def __init__(self, etype, value, traceback): self.msg = str(value) def __str__(self): - abbrev = '%s: %s' % (self.etype.__name__, self.msg) - msg = ('To view the original traceback, catch this exception ' - 'and call print_traceback() method.') - return '%s\n\n%s' % (abbrev, msg) + return ( + f'{self.etype.__name__}: {self.msg}\n\n' + 'To view the original traceback, catch this exception and ' + 'call print_traceback() method.' + ) def print_traceback(self): """ @@ -235,8 +233,10 @@ class parameters on Options. """ settings = (Options.skip_invalid, Options.warn_on_skip) (Options.skip_invalid, Options.warn_on_skip) = (skip_invalid, warn_on_skip) - yield - (Options.skip_invalid, Options.warn_on_skip) = settings + try: + yield + finally: + (Options.skip_invalid, Options.warn_on_skip) = settings class Keywords(param.Parameterized): @@ -260,10 +260,10 @@ class Keywords(param.Parameterized): def __init__(self, values=[], target=None): - strings = [isinstance(v, (str,basestring)) for v in values] + strings = [isinstance(v, str) for v in values] if False in strings: - raise ValueError('All keywords must be strings: {0}'.format(values)) - super(Keywords, self).__init__(values=sorted(values), + raise ValueError(f'All keywords must be strings: {values}') + super().__init__(values=sorted(values), target=target) def __add__(self, other): @@ -316,12 +316,12 @@ class Cycle(param.Parameterized): def __init__(self, cycle=None, **params): if cycle is not None: - if isinstance(cycle, basestring): + if isinstance(cycle, str): params['key'] = cycle else: params['values'] = cycle params['key'] = None - super(Cycle, self).__init__(**params) + super().__init__(**params) self.values = self._get_values() @@ -428,7 +428,7 @@ def _get_values(self): -class Options(param.Parameterized): +class Options: """ An Options object holds a collection of keyword options. In addition, Options support (optional) keyword validation as well as @@ -439,25 +439,14 @@ class Options(param.Parameterized): can create a new Options object inheriting the parent options. """ - allowed_keywords = param.ClassSelector(class_=Keywords, doc=""" - Optional list of strings corresponding to the allowed keywords.""") - - key = param.String(default=None, allow_None=True, doc=""" - Optional specification of the options key name. For instance, - key could be 'plot' or 'style'.""") - - merge_keywords = param.Boolean(default=True, doc=""" - Whether to merge with the existing keywords if the corresponding - node already exists""") - - skip_invalid = param.Boolean(default=True, doc=""" - Whether all Options instances should skip invalid keywords or - raise and exception. May only be specified at the class level.""") + # Whether all Options instances should skip invalid keywords or + # raise and exception. + skip_invalid = True - warn_on_skip = param.Boolean(default=True, doc=""" - Whether all Options instances should generate warnings when - skipping over invalid keywords or not. May only be specified at - the class level.""") + # Whether all Options instances should generate warnings when + # skipping over invalid keywords or not. May only be specified at + # the class level. + warn_on_skip = True _option_groups = ['style', 'plot', 'norm', 'output'] @@ -491,8 +480,9 @@ def __init__(self, key=None, allowed_keywords=[], merge_keywords=True, allowed_keywords = (allowed_keywords if isinstance(allowed_keywords, Keywords) else Keywords(allowed_keywords)) - super(Options, self).__init__(allowed_keywords=allowed_keywords, - merge_keywords=merge_keywords, key=key) + self.allowed_keywords = allowed_keywords + self.merge_keywords = merge_keywords + self.key = key def keywords_target(self, target): """ @@ -523,12 +513,10 @@ def __call__(self, allowed_keywords=None, **kwargs): inherited_style = dict(allowed_keywords=allowed_keywords, **kwargs) return self.__class__(**dict(self.kwargs, **inherited_style)) - def keys(self): "The keyword names across the supplied options." return sorted(list(self.kwargs.keys())) - def max_cycles(self, num): """ Truncates all contained Palette objects to a maximum number @@ -539,7 +527,6 @@ def max_cycles(self, num): for kw, arg in self.kwargs.items()} return self(max_cycles=num, **kwargs) - @property def cyclic(self): "Returns True if the options cycle, otherwise False" @@ -561,7 +548,6 @@ def __getitem__(self, index): static = {k:v for k,v in self.kwargs.items() if not isinstance(v, Cycle)} return dict(static, **options) - @property def options(self): "Access of the options keywords when no cycles are defined." @@ -571,16 +557,15 @@ def options(self): raise Exception("The options property may only be used" " with non-cyclic Options.") - def __repr__(self): - kws = ', '.join("%s=%r" % (k,self.kwargs[k]) for k in sorted(self.kwargs.keys())) - + kws = ', '.join(f"{k}={self.kwargs[k]!r}" for k in sorted(self.kwargs.keys())) + cls_name = type(self).__name__ if self.key and self.key[0].isupper() and kws: - return "%s(%s, %s)" % (self.__class__.__name__, repr(self.key), kws) + return f"{cls_name}({self.key!r}, {kws})" elif self.key and self.key[0].isupper(): - return "%s(%s)" % (self.__class__.__name__, repr(self.key)) + return f"{cls_name}({self.key!r})" else: - return "%s(%s)" % (self.__class__.__name__, kws) + return f"{cls_name}({kws})" def __str__(self): return repr(self) @@ -601,8 +586,8 @@ class OptionTree(AttrTree): inheritance for a given group up to the root of the tree. When constructing an OptionTree, you can specify the option groups - as a list (i.e empty initial option groups at the root) or as a - dictionary (e.g groups={'style':Option()}). You can also + as a list (i.e. empty initial option groups at the root) or as a + dictionary (e.g. groups={'style':Option()}). You can also initialize the OptionTree with the options argument together with the **kwargs - see StoreOptions.merge_options for more information on the options specification syntax. @@ -614,12 +599,13 @@ class OptionTree(AttrTree): """ def __init__(self, items=None, identifier=None, parent=None, - groups=None, options=None, **kwargs): + groups=None, options=None, backend=None, **kwargs): if groups is None: raise ValueError('Please supply groups list or dictionary') _groups = {g:Options() for g in groups} if isinstance(groups, list) else groups + self.__dict__['backend'] = backend self.__dict__['groups'] = _groups self.__dict__['_instantiated'] = False AttrTree.__init__(self, items, identifier, parent) @@ -630,12 +616,13 @@ def __init__(self, items=None, identifier=None, parent=None, if root_groups and isinstance(groups, list): self.__dict__['groups'] = {g:Options(**root_groups.get(g,{})) for g in _groups.keys()} elif root_groups: - raise Exception("Group specification as a dictionary only supported if " - "the root node '.' syntax not used in the options.") + raise Exception( + "Group specification as a dictionary only supported if " + "the root node '.' syntax not used in the options." + ) if options: StoreOptions.apply_customizations(options, self) - def _merge_options(self, identifier, group_name, options): """ Computes a merged Options object for the given group @@ -643,23 +630,26 @@ def _merge_options(self, identifier, group_name, options): new Options which are passed in. """ if group_name not in self.groups: - raise KeyError("Group %s not defined on SettingTree" % group_name) + raise KeyError(f"Group {group_name} not defined on SettingTree.") if identifier in self.children: current_node = self[identifier] group_options = current_node.groups[group_name] else: - #When creating a node (nothing to merge with) ensure it is empty - group_options = Options(group_name, - allowed_keywords=self.groups[group_name].allowed_keywords) + # When creating a node (nothing to merge with) ensure it is empty + group_options = Options( + group_name, allowed_keywords=self.groups[group_name].allowed_keywords + ) override_kwargs = dict(options.kwargs) old_allowed = group_options.allowed_keywords override_kwargs['allowed_keywords'] = options.allowed_keywords + old_allowed try: - return (group_options(**override_kwargs) - if options.merge_keywords else Options(group_name, **override_kwargs)) + if options.merge_keywords: + return group_options(**override_kwargs) + else: + return Options(group_name, **override_kwargs) except OptionError as e: raise OptionError(e.invalid_keyword, e.allowed_keywords, @@ -669,8 +659,7 @@ def _merge_options(self, identifier, group_name, options): def __getitem__(self, item): if item in self.groups: return self.groups[item] - return super(OptionTree, self).__getitem__(item) - + return super().__getitem__(item) def __getattr__(self, identifier): """ @@ -679,10 +668,13 @@ def __getattr__(self, identifier): """ try: return super(AttrTree, self).__getattr__(identifier) - except AttributeError: pass + except AttributeError: + pass - if identifier.startswith('_'): raise AttributeError(str(identifier)) - elif self.fixed==True: raise AttributeError(self._fixed_error % identifier) + if identifier.startswith('_'): + raise AttributeError(str(identifier)) + elif self.fixed==True: + raise AttributeError(self._fixed_error % identifier) valid_id = sanitize_identifier(identifier, escape=False) if valid_id in self.children: @@ -695,15 +687,22 @@ def __getattr__(self, identifier): def __setattr__(self, identifier, val): + # Invalidate the lookup cache whenever an option is changed + Store._lookup_cache[self.backend] = {} + identifier = sanitize_identifier(identifier, escape=False) new_groups = {} if isinstance(val, dict): group_items = val elif isinstance(val, Options) and val.key is None: - raise AttributeError("Options object needs to have a group name specified.") + raise AttributeError( + "Options object needs to have a group name specified." + ) elif isinstance(val, Options) and val.key[0].isupper(): - raise AttributeError("OptionTree only accepts Options using keys that are one of %s." % - ', '.join(repr(el) for el in Options._option_groups)) + groups = ', '.join(repr(el) for el in Options._option_groups) + raise AttributeError( + f"OptionTree only accepts Options using keys that are one of {groups}." + ) elif isinstance(val, Options): group_items = {val.key: val} elif isinstance(val, OptionTree): @@ -719,17 +718,16 @@ def __setattr__(self, identifier, val): if new_groups: data = self[identifier].items() if identifier in self.children else None - new_node = OptionTree(data, identifier=identifier, parent=self, groups=new_groups) + new_node = OptionTree(data, identifier=identifier, parent=self, groups=new_groups, backend=self.backend) else: raise ValueError('OptionTree only accepts a dictionary of Options.') - super(OptionTree, self).__setattr__(identifier, new_node) + super().__setattr__(identifier, new_node) if isinstance(val, OptionTree): for subtree in val: self[identifier].__setattr__(subtree.identifier, subtree) - def find(self, path, mode='node'): """ Find the closest node or path to an the arbitrary path that is @@ -751,7 +749,6 @@ def find(self, path, mode='node'): continue return item if mode == 'node' else item.path - def closest(self, obj, group, defaults=True, backend=None): """ This method is designed to be called from the root of the @@ -761,13 +758,23 @@ def closest(self, obj, group, defaults=True, backend=None): In addition, closest supports custom options by checking the object """ - components = (obj.__class__.__name__, - group_sanitizer(obj.group), - label_sanitizer(obj.label)) - target = '.'.join((c for c in components if c)) - return self.find(components).options( + opts_spec = ( + obj.__class__.__name__, + group_sanitizer(obj.group), + label_sanitizer(obj.label) + ) + # Try to get a cache hit in the backend lookup cache + backend = backend or Store.current_backend + cache = Store._lookup_cache.get(backend, {}) + cache_key = opts_spec+(group, defaults, id(self.root)) + if cache_key in cache: + return cache[cache_key] + + target = '.'.join((c for c in opts_spec if c)) + options = self.find(opts_spec).options( group, target=target, defaults=defaults, backend=backend) - + cache[cache_key] = options + return options def options(self, group, target=None, defaults=True, backend=None): """ @@ -778,19 +785,20 @@ def options(self, group, target=None, defaults=True, backend=None): target = self.path if self.groups.get(group, None) is None: return None - if self.parent is None and target and (self is not Store.options(backend=backend)) and defaults: + options = Store.options(backend=backend) + if self.parent is None and target and (self is not options) and defaults: root_name = self.__class__.__name__ replacement = root_name + ('' if len(target) == len(root_name) else '.') - option_key = target.replace(replacement,'') - match = Store.options(backend=backend).find(option_key) - if match is not Store.options(backend=backend): + option_key = target.replace(replacement, '') + match = options.find(option_key) + if match is not options: return match.options(group) else: - return Options() + return EMPTY_OPTIONS elif self.parent is None: return self.groups[group] - parent_opts = self.parent.options(group,target, defaults, backend=backend) + parent_opts = self.parent.options(group, target, defaults, backend=backend) return Options(**dict(parent_opts.kwargs, **self.groups[group].kwargs)) def __repr__(self): @@ -819,17 +827,20 @@ def __repr__(self): especs.append((t, kws)) if especs: - format_kws = [(t,'dict(%s)' - % ', '.join('%s=%r' % (k,v) for k,v in sorted(kws.items()))) - for t,kws in especs] + format_kws = [ + (t, 'dict(%s)' % ', '.join(f'{k}={v}' for k, v in sorted(kws.items()))) + for t, kws in especs + ] ljust = max(len(t) for t,_ in format_kws) sep = (tab*2) if len(format_kws) >1 else '' - entries = sep + esep.join([sep+'%r : %s' % (t.ljust(ljust),v) for t,v in format_kws]) - gspecs.append(('%s%s={\n%s}' if len(format_kws)>1 else '%s%s={%s}') % (tab,group, entries)) + entries = sep + esep.join([f'{sep}{t.ljust(ljust)} : {v}' for t,v in format_kws]) + gspecs.append(('%s%s={\n%s}' if len(format_kws)>1 else '%s%s={%s}') % (tab, group, entries)) - return 'OptionTree(groups=%s,\n%s\n)' % (groups.keys(), gsep.join(gspecs)) + return f'OptionTree(groups={groups.keys()},\n{gsep.join(gspecs)}\n)' +EMPTY_OPTIONS = Options() + class Compositor(param.Parameterized): """ @@ -892,12 +903,17 @@ def strongest_match(cls, overlay, mode, backend=None): The best match is defined as the compositor operation with the highest match value as returned by the match_level method. """ - match_strength = [(op.match_level(overlay), op) for op in cls.definitions - if op.mode == mode and (not op.backends or backend in op.backends)] - matches = [(match[0], op, match[1]) for (match, op) in match_strength if match is not None] - if matches == []: return None - else: return sorted(matches)[0] - + match_strength = [ + (op.match_level(overlay), op) for op in cls.definitions + if op.mode == mode and (not op.backends or backend in op.backends) + ] + matches = [ + (match[0], op, match[1]) for (match, op) in match_strength + if match is not None + ] + if matches == []: + return None + return sorted(matches)[0] @classmethod def collapse_element(cls, overlay, ranges=None, mode='data', backend=None): @@ -928,6 +944,8 @@ def collapse_element(cls, overlay, ranges=None, mode='data', backend=None): sliced = overlay.clone(values[start:stop]) items = sliced.traverse(lambda x: x, [Element]) if applicable_op and all(el in processed[applicable_op] for el in items): + if unpack and len(overlay) == 1: + return overlay.values()[0] return overlay result = applicable_op.apply(sliced, ranges, backend) if applicable_op.group: @@ -946,7 +964,6 @@ def collapse_element(cls, overlay, ranges=None, mode='data', backend=None): return overlay prev_ids = new_ids - @classmethod def collapse(cls, holomap, ranges=None, mode='data'): """ @@ -963,7 +980,6 @@ def collapse(cls, holomap, ranges=None, mode='data'): clone[key] = cls.collapse_element(overlay, ranges, mode) return clone - @classmethod def map(cls, obj, mode='data', backend=None): """ @@ -982,7 +998,6 @@ def map(cls, obj, mode='data', backend=None): element_patterns) return obj - @classmethod def register(cls, compositor): defined_patterns = [op.pattern for op in cls.definitions] @@ -992,7 +1007,6 @@ def register(cls, compositor): if compositor.operation not in cls.operations: cls.operations.append(compositor.operation) - def __init__(self, pattern, operation, group, mode, transfer_options=False, transfer_parameters=False, output_type=None, backends=None, **kwargs): self._pattern_spec, labels = [], [] @@ -1012,15 +1026,14 @@ def __init__(self, pattern, operation, group, mode, transfer_options=False, self.label = '' self._output_type = output_type - super(Compositor, self).__init__(group=group, - pattern=pattern, - operation=operation, - mode=mode, - backends=backends or [], - kwargs=kwargs, - transfer_options=transfer_options, - transfer_parameters=transfer_parameters) - + super().__init__(group=group, + pattern=pattern, + operation=operation, + mode=mode, + backends=backends or [], + kwargs=kwargs, + transfer_options=transfer_options, + transfer_parameters=transfer_parameters) @property def output_type(self): @@ -1030,7 +1043,6 @@ def output_type(self): """ return self._output_type or self.operation.output_type - def _slice_match_level(self, overlay_items): """ Find the match strength for a list of overlay items that must @@ -1055,7 +1067,6 @@ def _slice_match_level(self, overlay_items): return None return level - def match_level(self, overlay): """ Given an overlay, return the match level and applicable slice @@ -1080,7 +1091,6 @@ def match_level(self, overlay): return (best_lvl, match_slice) if best_lvl != 0 else None - def apply(self, value, input_ranges, backend=None): """ Apply the compositor on the input with the given input ranges. @@ -1096,7 +1106,7 @@ def apply(self, value, input_ranges, backend=None): if k in self.operation.param}) transformed = self.operation(value, input_ranges=input_ranges, **kwargs) - if self.transfer_options: + if self.transfer_options and value is not transformed: Store.transfer_options(value, transformed, backend) return transformed @@ -1105,7 +1115,7 @@ class Store(object): """ The Store is what links up HoloViews objects to their corresponding options and to the appropriate classes of the chosen - backend (e.g for rendering). + backend (e.g. for rendering). In addition, Store supports pickle operations that automatically pickle and unpickle the corresponding options for a HoloViews @@ -1133,11 +1143,14 @@ class Store(object): _weakrefs = {} _options_context = False + # Backend option caches + _lookup_cache = {} + # A list of hooks to call after registering the plot and style options option_setters = [] # A dictionary of custom OptionTree by custom object id by backend - _custom_options = {'matplotlib':{}} + _custom_options = {'matplotlib': {}} load_counter_offset = None save_option_state = False @@ -1158,6 +1171,7 @@ def options(cls, backend=None, val=None): if val is None: return cls._options[backend] else: + cls._lookup_cache[backend] = {} cls._options[backend] = val @classmethod @@ -1249,7 +1263,6 @@ def info(cls, obj, ansi=True, backend='matplotlib', visualization=True, listed.append(c) return info - @classmethod def lookup_options(cls, backend, obj, group, defaults=True): # Current custom_options dict may not have entry for obj.id @@ -1276,30 +1289,37 @@ def lookup(cls, backend, obj): "multiple custom trees (ids %s)" % idlist) return cls._custom_options[backend][list(ids)[0]] - @classmethod - def transfer_options(cls, obj, new_obj, backend=None): + def transfer_options(cls, obj, new_obj, backend=None, names=None, level=3): """ Transfers options for all backends from one object to another. Drops any options defined in the supplied drop list. """ + if obj is new_obj: + return backend = cls.current_backend if backend is None else backend type_name = type(new_obj).__name__ group = type_name if obj.group == type(obj).__name__ else obj.group - spec = '.'.join([s for s in (type_name, group, obj.label) if s]) + spec = '.'.join([s for s in (type_name, group, obj.label)[:level] if s]) options = [] for group in Options._option_groups: opts = cls.lookup_options(backend, obj, group) - if opts and opts.kwargs: options.append(Options(group, **opts.kwargs)) + if not opts: + continue + new_opts = cls.lookup_options(backend, new_obj, group, defaults=False) + existing = new_opts.kwargs if new_opts else {} + filtered = {k: v for k, v in opts.kwargs.items() + if (names is None or k in names) and k not in existing} + if filtered: + options.append(Options(group, **filtered)) if options: StoreOptions.set_options(new_obj, {spec: options}, backend) - @classmethod def add_style_opts(cls, component, new_options, backend=None): """ Given a component such as an Element (e.g. Image, Curve) or a - container (e.g Layout) specify new style options to be + container (e.g. Layout) specify new style options to be accepted by the corresponding plotting class. Note: This is supplied for advanced users who know which @@ -1308,18 +1328,23 @@ def add_style_opts(cls, component, new_options, backend=None): """ backend = cls.current_backend if backend is None else backend if component not in cls.registry[backend]: - raise ValueError("Component %r not registered to a plotting class" % component) + raise ValueError( + f"Component {component!r} not registered to a plotting class." + ) if not isinstance(new_options, list) or not all(isinstance(el, str) for el in new_options): - raise ValueError("Please supply a list of style option keyword strings") + raise ValueError( + "Please supply a list of style option keyword strings" + ) with param.logging_level('CRITICAL'): for option in new_options: if option not in cls.registry[backend][component].style_opts: plot_class = cls.registry[backend][component] plot_class.style_opts = sorted(plot_class.style_opts+[option]) - cls._options[backend][component.name] = Options('style', merge_keywords=True, allowed_keywords=new_options) - + cls._options[backend][component.name] = Options( + 'style', merge_keywords=True, allowed_keywords=new_options + ) @classmethod def register(cls, associations, backend, style_aliases={}): @@ -1333,7 +1358,7 @@ def register(cls, associations, backend, style_aliases={}): groups = Options._option_groups if backend not in cls._options: - cls._options[backend] = OptionTree([], groups=groups) + cls._options[backend] = OptionTree([], groups=groups, backend=backend) if backend not in cls._custom_options: cls._custom_options[backend] = {} @@ -1353,17 +1378,17 @@ def register(cls, associations, backend, style_aliases={}): plot_opts = Keywords(plot_opts, target=view_class.__name__) style_opts = Keywords(style_opts, target=view_class.__name__) - opt_groups = {'plot': Options(allowed_keywords=plot_opts), - 'output': Options(allowed_keywords=Options._output_allowed_kws), - 'style': Options(allowed_keywords=style_opts), - 'norm': Options(framewise=False, axiswise=False, - allowed_keywords=['framewise', - 'axiswise'])} + opt_groups = { + 'plot': Options(allowed_keywords=plot_opts), + 'output': Options(allowed_keywords=Options._output_allowed_kws), + 'style': Options(allowed_keywords=style_opts), + 'norm': Options(framewise=False, axiswise=False, + allowed_keywords=['framewise', 'axiswise']) + } name = view_class.__name__ cls._options[backend][name] = opt_groups - @classmethod def set_display_hook(cls, group, objtype, hook): """ @@ -1374,7 +1399,6 @@ def set_display_hook(cls, group, objtype, hook): """ cls._display_hooks[group][objtype] = hook - @classmethod def render(cls, obj): """ @@ -1400,7 +1424,6 @@ def render(cls, obj): return data, metadata - class StoreOptions(object): """ A collection of utilities for advanced users for creating and @@ -1463,7 +1486,6 @@ def get_object_ids(cls, obj): return set(el for el in obj.traverse(lambda x: getattr(x, 'id', None))) - @classmethod def tree_to_dict(cls, tree): """ @@ -1518,7 +1540,6 @@ def restore_ids(cls, obj, ids): ids = iter(ids) obj.traverse(lambda o: setattr(o, 'id', next(ids))) - @classmethod def apply_customizations(cls, spec, options): """ @@ -1526,13 +1547,13 @@ def apply_customizations(cls, spec, options): """ for key in sorted(spec.keys()): if isinstance(spec[key], (list, tuple)): - customization = {v.key:v for v in spec[key]} + customization = {v.key: v for v in spec[key]} else: - customization = {k:(Options(**v) if isinstance(v, dict) else v) + customization = {k: (Options(**v) if isinstance(v, dict) else v) for k,v in spec[key].items()} # Set the Keywords target on Options from the {type} part of the key. - customization = {k:v.keywords_target(key.split('.')[0]) + customization = {k: v.keywords_target(key.split('.')[0]) for k,v in customization.items()} options[str(key)] = customization return options @@ -1546,17 +1567,20 @@ def validate_spec(cls, spec, backends=None): currently loaded backend. Only useful when invalid keywords generate exceptions instead of - skipping i.e Options.skip_invalid is False. + skipping, i.e. Options.skip_invalid is False. """ - loaded_backends = Store.loaded_backends() if backends is None else backends + loaded_backends = Store.loaded_backends() if backends is None else backends error_info = {} backend_errors = defaultdict(set) for backend in loaded_backends: cls.start_recording_skipped() with options_policy(skip_invalid=True, warn_on_skip=False): - options = OptionTree(items=Store.options(backend).data.items(), - groups=Store.options(backend).groups) + options = OptionTree( + items=Store.options(backend).data.items(), + groups=Store.options(backend).groups, + backend=backend + ) cls.apply_customizations(spec, options) for error in cls.stop_recording_skipped(): @@ -1566,7 +1590,6 @@ def validate_spec(cls, spec, backends=None): error_info[error_key+(backend,)] = error.allowed_keywords backend_errors[error_key].add(backend) - for ((keyword, target, group_name), backends) in backend_errors.items(): # If the keyword failed for the target across all loaded backends... if set(backends) == set(loaded_backends): @@ -1594,10 +1617,11 @@ def expand_compositor_keys(cls, spec): keys. For instance a compositor operation returning a group string 'Image' of element type RGB expands to 'RGB.Image'. """ - expanded_spec={} + expanded_spec = {} applied_keys = [] - compositor_defs = {el.group:el.output_type.__name__ - for el in Compositor.definitions} + compositor_defs = { + el.group: el.output_type.__name__ for el in Compositor.definitions + } for key, val in spec.items(): if key not in compositor_defs: expanded_spec[key] = val @@ -1608,7 +1632,6 @@ def expand_compositor_keys(cls, spec): expanded_spec[str(type_name+'.'+key)] = val return expanded_spec, applied_keys - @classmethod def create_custom_trees(cls, obj, options=None): """ @@ -1623,10 +1646,11 @@ def create_custom_trees(cls, obj, options=None): clones, id_mapping = {}, [] obj_ids = cls.get_object_ids(obj) offset = cls.id_offset() - obj_ids = [None] if len(obj_ids)==0 else obj_ids + obj_ids = [None] if len(obj_ids) == 0 else obj_ids used_obj_types = [(opt.split('.')[0],) for opt in options] available_options = Store.options() + backend = Store.current_backend used_options = {} for obj_type in available_options: if obj_type in used_obj_types: @@ -1640,12 +1664,16 @@ def create_custom_trees(cls, obj, options=None): for tree_id in obj_ids: if tree_id is not None and tree_id in custom_options: original = custom_options[tree_id] - clone = OptionTree(items = original.items(), - groups = original.groups) + clone = OptionTree( + items=original.items(), + groups=original.groups, + backend=original.backend + ) clones[tree_id + offset + 1] = clone id_mapping.append((tree_id, tree_id + offset + 1)) else: - clone = OptionTree(groups=available_options.groups) + clone = OptionTree(groups=available_options.groups, + backend=backend) clones[offset] = clone id_mapping.append((tree_id, offset)) @@ -1656,7 +1684,6 @@ def create_custom_trees(cls, obj, options=None): return {k: cls.apply_customizations(options, t) if options else t for k,t in clones.items()}, id_mapping - @classmethod def merge_options(cls, groups, options=None,**kwargs): """ @@ -1673,8 +1700,9 @@ def merge_options(cls, groups, options=None,**kwargs): if (options is not None and set(options.keys()) <= groups): kwargs, options = options, None elif (options is not None and any(k in groups for k in options)): - raise Exception("All keys must be a subset of %s" - % ', '.join(groups)) + raise Exception( + f"All keys must be a subset of {', '.join(groups)}." + ) options = {} if (options is None) else dict(**options) all_keys = set(k for d in kwargs.values() for k in d) @@ -1726,18 +1754,19 @@ def options(cls, obj, options=None, **kwargs): See holoviews.core.options.set_options function for more information on the options specification format. """ - if (options is None) and kwargs == {}: yield - else: + if (options is not None) or kwargs: Store._options_context = True optstate = cls.state(obj) groups = Store.options().groups.keys() options = cls.merge_options(groups, options, **kwargs) cls.set_options(obj, options) - yield - if options is not None: - Store._options_context = True - cls.state(obj, state=optstate) + try: + yield + finally: + if options is not None: + Store._options_context = True + cls.state(obj, state=optstate) @classmethod def id_offset(cls): @@ -1750,10 +1779,9 @@ def id_offset(cls): store_ids = Store.custom_options(backend=backend).keys() max_id = max(store_ids)+1 if len(store_ids) > 0 else 0 max_ids.append(max_id) - # If no backends defined (e.g plotting not imported) return zero + # If no backends defined (e.g. plotting not imported) return zero return max(max_ids) if len(max_ids) else 0 - @classmethod def update_backends(cls, id_mapping, custom_trees, backend=None): """ @@ -1765,6 +1793,7 @@ def update_backends(cls, id_mapping, custom_trees, backend=None): backend = Store.current_backend if backend is None else backend # Update the custom option entries for the current backend Store.custom_options(backend=backend).update(custom_trees) + Store._lookup_cache[backend] = {} # Propagate option ids for non-selected backends for b in Store.loaded_backends(): @@ -1776,7 +1805,6 @@ def update_backends(cls, id_mapping, custom_trees, backend=None): if tree is not None: backend_trees[new_id] = tree - @classmethod def set_options(cls, obj, options=None, backend=None, **kwargs): """ @@ -1821,7 +1849,8 @@ def set_options(cls, obj, options=None, backend=None, **kwargs): # {'Image.Channel:{'plot': Options(size=50), # 'style': Options('style', cmap='Blues')]} - options = cls.merge_options(Store.options(backend=backend).groups.keys(), options, **kwargs) + groups = Store.options(backend=backend).groups.keys() + options = cls.merge_options(groups, options, **kwargs) spec, compositor_applied = cls.expand_compositor_keys(options) custom_trees, id_mapping = cls.create_custom_trees(obj, spec) cls.update_backends(id_mapping, custom_trees, backend=backend) @@ -1829,7 +1858,10 @@ def set_options(cls, obj, options=None, backend=None, **kwargs): # Propagate ids to the objects not_used = [] for (match_id, new_id) in id_mapping: - applied = cls.propagate_ids(obj, match_id, new_id, compositor_applied+list(spec.keys()), backend=backend) + key = compositor_applied+list(spec.keys()) + applied = cls.propagate_ids( + obj, match_id, new_id, key, backend=backend + ) if not applied: not_used.append(new_id) diff --git a/holoviews/core/overlay.py b/holoviews/core/overlay.py index 50e6cecb5c..86f358f3ea 100644 --- a/holoviews/core/overlay.py +++ b/holoviews/core/overlay.py @@ -116,7 +116,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): values.append(el.dimension_values(dimension)) found = True if not found: - return super(CompositeOverlay, self).dimension_values(dimension, expanded, flat) + return super().dimension_values(dimension, expanded, flat) values = [v for v in values if v is not None and len(v)] if not values: return np.array() @@ -140,7 +140,7 @@ def __init__(self, items=None, group=None, label=None, **params): self.__dict__['_fixed'] = False self.__dict__['_group'] = group self.__dict__['_label'] = label - super(Overlay, self).__init__(items, **params) + super().__init__(items, **params) def __getitem__(self, key): """ @@ -169,7 +169,7 @@ def get(self, identifier, default=None): return values[identifier] else: return default - return super(Overlay, self).get(identifier, default) + return super().get(identifier, default) def __add__(self, other): @@ -276,25 +276,6 @@ def ddims(self): def shape(self): raise NotImplementedError - # Deprecated methods - - def collapse(self, function): - "Deprecated method to collapse layers in the Overlay." - self.param.warning('Overlay.collapse is deprecated, to' - 'collapse multiple elements use a HoloMap.') - - elements = list(self) - types = [type(el) for el in elements] - values = [el.group for el in elements] - if not len(set(types)) == 1 and len(set(values)) == 1: - raise Exception("Overlay is not homogeneous in type or group " - "and cannot be collapsed.") - else: - return elements[0].clone(types[0].collapse_data([el.data for el in elements], - function, self.kdims)) - - - class NdOverlay(Overlayable, UniformNdMapping, CompositeOverlay): """ @@ -309,7 +290,7 @@ class NdOverlay(Overlayable, UniformNdMapping, CompositeOverlay): _deep_indexable = True def __init__(self, overlays=None, kdims=None, **params): - super(NdOverlay, self).__init__(overlays, kdims=kdims, **params) + super().__init__(overlays, kdims=kdims, **params) def decollate(self): """Packs NdOverlay of DynamicMaps into a single DynamicMap that returns an diff --git a/holoviews/core/pprint.py b/holoviews/core/pprint.py index c0ebb8e277..7b50bfb53a 100644 --- a/holoviews/core/pprint.py +++ b/holoviews/core/pprint.py @@ -9,17 +9,17 @@ far too large to be practical. Instead, all HoloViews objects can be represented as tree structures, showing how to access and index into your data. - -In addition, there are several different ways of """ -import sys, re +import re import textwrap + import param -# IPython not required to import ParamPager + from param.ipython import ParamPager from param.parameterized import bothmethod -from holoviews.core.util import group_sanitizer, label_sanitizer + +from .util import group_sanitizer, label_sanitizer @@ -277,12 +277,7 @@ class PrettyPrinter(param.Parameterized): @bothmethod def pprint(cls_or_slf, node): - reprval = cls_or_slf.serialize(cls_or_slf.recurse(node)) - if sys.version_info.major == 2: - return str(reprval.encode("utf8")) - else: - return str(reprval) - + return cls_or_slf.serialize(cls_or_slf.recurse(node)) @bothmethod def serialize(cls_or_slf, lines): diff --git a/holoviews/core/spaces.py b/holoviews/core/spaces.py index 118504038e..0b00b66b62 100644 --- a/holoviews/core/spaces.py +++ b/holoviews/core/spaces.py @@ -19,7 +19,7 @@ from .ndmapping import UniformNdMapping, NdMapping, item_check from .overlay import Overlay, CompositeOverlay, NdOverlay, Overlayable from .options import Store, StoreOptions -from ..streams import Stream +from ..streams import Stream, Params, streams_list_from_dict @@ -38,7 +38,7 @@ class HoloMap(UniformNdMapping, Overlayable): data_type = (ViewableElement, NdMapping, Layout) def __init__(self, initial_items=None, kdims=None, group=None, label=None, **params): - super(HoloMap, self).__init__(initial_items, kdims, group, label, **params) + super().__init__(initial_items, kdims, group, label, **params) @property def opts(self): @@ -143,14 +143,6 @@ def options(self, *args, **kwargs): for k, v in self.data.items()]) return self.clone(data) - - def split_overlays(self): - "Deprecated method to split overlays inside the HoloMap." - self.param.warning("split_overlays is deprecated and is now " - "a private method.") - return self._split_overlays() - - def _split_overlays(self): "Splits overlays inside the HoloMap into list of HoloMaps" if not issubclass(self.type, CompositeOverlay): @@ -170,7 +162,6 @@ def _split_overlays(self): keys.append(k) return keys, maps - def _dimension_keys(self): """ Helper for __mul__ that returns the list of keys together with @@ -179,7 +170,6 @@ def _dimension_keys(self): return [tuple(zip([d.name for d in self.kdims], [k] if self.ndims == 1 else k)) for k in self.keys()] - def _dynamic_mul(self, dimensions, other, keys): """ Implements dynamic version of overlaying operation overlaying @@ -369,121 +359,6 @@ def decollate(self): from .decollate import decollate return decollate(self) - - def sample(self, samples=[], bounds=None, **sample_values): - """Samples element values at supplied coordinates. - - Allows sampling of element with a list of coordinates matching - the key dimensions, returning a new object containing just the - selected samples. Supports multiple signatures: - - Sampling with a list of coordinates, e.g.: - - ds.sample([(0, 0), (0.1, 0.2), ...]) - - Sampling a range or grid of coordinates, e.g.: - - 1D: ds.sample(3) - 2D: ds.sample((3, 3)) - - Sampling by keyword, e.g.: - - ds.sample(x=0) - - Args: - samples: List of nd-coordinates to sample - bounds: Bounds of the region to sample - Defined as two-tuple for 1D sampling and four-tuple - for 2D sampling. - closest: Whether to snap to closest coordinates - **kwargs: Coordinates specified as keyword pairs - Keywords of dimensions and scalar coordinates - - Returns: - A Table containing the sampled coordinates - """ - self.param.warning('The HoloMap.sample method is deprecated, ' - 'for equivalent functionality use ' - 'HoloMap.apply.sample().collapse().') - - dims = self.last.ndims - if isinstance(samples, tuple) or np.isscalar(samples): - if dims == 1: - xlim = self.last.range(0) - lower, upper = (xlim[0], xlim[1]) if bounds is None else bounds - edges = np.linspace(lower, upper, samples+1) - linsamples = [(l+u)/2.0 for l,u in zip(edges[:-1], edges[1:])] - elif dims == 2: - (rows, cols) = samples - if bounds: - (l,b,r,t) = bounds - else: - l, r = self.last.range(0) - b, t = self.last.range(1) - - xedges = np.linspace(l, r, cols+1) - yedges = np.linspace(b, t, rows+1) - xsamples = [(lx+ux)/2.0 for lx,ux in zip(xedges[:-1], xedges[1:])] - ysamples = [(ly+uy)/2.0 for ly,uy in zip(yedges[:-1], yedges[1:])] - - Y,X = np.meshgrid(ysamples, xsamples) - linsamples = list(zip(X.flat, Y.flat)) - else: - raise NotImplementedError("Regular sampling not implemented " - "for elements with more than two dimensions.") - - samples = list(util.unique_iterator(self.last.closest(linsamples))) - - sampled = self.clone([(k, view.sample(samples, closest=False, - **sample_values)) - for k, view in self.data.items()]) - - from ..element import Table - return Table(sampled.collapse()) - - - def reduce(self, dimensions=None, function=None, spread_fn=None, **reduce_map): - """Applies reduction to elements along the specified dimension(s). - - Allows reducing the values along one or more key dimension - with the supplied function. Supports two signatures: - - Reducing with a list of dimensions, e.g.: - - ds.reduce(['x'], np.mean) - - Defining a reduction using keywords, e.g.: - - ds.reduce(x=np.mean) - - Args: - dimensions: Dimension(s) to apply reduction on - Defaults to all key dimensions - function: Reduction operation to apply, e.g. numpy.mean - spreadfn: Secondary reduction to compute value spread - Useful for computing a confidence interval, spread, or - standard deviation. - **reductions: Keyword argument defining reduction - Allows reduction to be defined as keyword pair of - dimension and function - - Returns: - The Dataset after reductions have been applied. - """ - self.param.warning('The HoloMap.reduce method is deprecated, ' - 'for equivalent functionality use ' - 'HoloMap.apply.reduce().collapse().') - - from ..element import Table - reduced_items = [(k, v.reduce(dimensions, function, spread_fn, **reduce_map)) - for k, v in self.items()] - if not isinstance(reduced_items[0][1], Table): - params = dict(util.get_param_values(self.last), - kdims=self.kdims, vdims=self.last.vdims) - return Table(reduced_items, **params) - return Table(self.clone(reduced_items).collapse()) - - def relabel(self, label=None, group=None, depth=1): """Clone object and apply new group and/or label. @@ -499,8 +374,7 @@ def relabel(self, label=None, group=None, depth=1): Returns: Returns relabelled object """ - return super(HoloMap, self).relabel(label=label, group=group, depth=depth) - + return super().relabel(label=label, group=group, depth=depth) def hist(self, dimension=None, num_bins=20, bin_range=None, adjoin=True, individually=True, **kwargs): @@ -575,7 +449,7 @@ class Callable(param.Parameterized): stream requires it and is triggering. A Callable may also specify a stream_mapping which specifies the - objects that are associated with interactive (i.e linked) streams + objects that are associated with interactive (i.e. linked) streams when composite objects such as Layouts are returned from the callback. This is required for building interactive, linked visualizations (for the backends that support them) when returning @@ -628,8 +502,8 @@ class Callable(param.Parameterized): the Callable, e.g. when it returns a Layout.""") def __init__(self, callable, **params): - super(Callable, self).__init__(callable=callable, - **dict(params, name=util.callable_name(callable))) + super().__init__(callable=callable, + **dict(params, name=util.callable_name(callable))) self._memoized = {} self._is_overlay = False self.args = None @@ -797,8 +671,6 @@ def dynamicmap_memoization(callable_obj, streams): callable_obj._stream_memoization &= not any(s.transient and s._triggering for s in streams) try: yield - except: - raise finally: callable_obj._stream_memoization = memoization_state @@ -885,7 +757,10 @@ class DynamicMap(HoloMap): List of Stream instances to associate with the DynamicMap. The set of parameter values across these streams will be supplied as keyword arguments to the callback when the events are received, - updating the streams.""" ) + updating the streams. Can also be supplied as a dictionary that + maps parameters or panel widgets to callback argument names that + will then be automatically converted to the equivalent list + format.""") cache_size = param.Integer(default=500, doc=""" The number of entries to cache for fast access. This is an LRU @@ -897,11 +772,13 @@ class DynamicMap(HoloMap): If True, stream parameters are passed to callback as positional arguments. Each positional argument is a dict containing the contents of a stream. The positional stream arguments follow the positional arguments for each kdim, - and they are ordered to match the order of the DynamicMap's streams list. + and they are ordered to match the order of the DynamicMap's streams list. """) def __init__(self, callback, initial_items=None, streams=None, **params): streams = (streams or []) + if isinstance(streams, dict): + streams = streams_list_from_dict(streams) # If callback is a parameterized method and watch is disabled add as stream if (params.get('watch', True) and (util.is_param_method(callback, has_deps=True) or @@ -919,7 +796,7 @@ def __init__(self, callback, initial_items=None, streams=None, **params): 'are not Stream instances: {objs}') raise TypeError(msg.format(objs = ', '.join('%r' % el for el in invalid))) - super(DynamicMap, self).__init__(initial_items, callback=callback, streams=valid, **params) + super().__init__(initial_items, callback=callback, streams=valid, **params) if self.callback.noargs: prefix = 'DynamicMaps using generators (or callables without arguments)' @@ -942,6 +819,11 @@ def __init__(self, callback, initial_items=None, streams=None, **params): for stream in self.streams: if stream.source is None: stream.source = self + if isinstance(stream, Params): + for p in stream.parameters: + if isinstance(p.owner, Stream) and p.owner.source is None: + p.owner.source = self + self.periodic = periodic(self) @property @@ -1317,7 +1199,7 @@ def __getitem__(self, key): empty = self._stream_parameters() == [] and self.kdims==[] if dimensionless or empty: raise KeyError('Using dimensionless streams disables DynamicMap cache') - cache = super(DynamicMap,self).__getitem__(key) + cache = super().__getitem__(key) except KeyError: cache = None @@ -1375,7 +1257,7 @@ def select(self, selection_specs=None, **kwargs): """ if selection_specs is not None and not isinstance(selection_specs, (list, tuple)): selection_specs = [selection_specs] - selection = super(DynamicMap, self).select(selection_specs=selection_specs, **kwargs) + selection = super().select(selection_specs=selection_specs, **kwargs) def dynamic_select(obj, **dynkwargs): if selection_specs is not None: matches = any(obj.matches(spec) for spec in selection_specs) @@ -1427,7 +1309,7 @@ def map(self, map_fn, specs=None, clone=True, link_inputs=True): Returns: Returns the object after the map_fn has been applied """ - deep_mapped = super(DynamicMap, self).map(map_fn, specs, clone) + deep_mapped = super().map(map_fn, specs, clone) if isinstance(deep_mapped, type(self)): from ..util import Dynamic def apply_map(obj, **dynkwargs): @@ -1454,7 +1336,7 @@ def relabel(self, label=None, group=None, depth=1): Returns: Returns relabelled object """ - relabelled = super(DynamicMap, self).relabel(label, group, depth) + relabelled = super().relabel(label, group, depth) if depth > 0: from ..util import Dynamic def dynamic_relabel(obj, **dynkwargs): @@ -1467,14 +1349,6 @@ def dynamic_relabel(obj, **dynkwargs): return dmap return relabelled - - def split_overlays(self): - "Deprecated method to split overlays inside the DynamicMap." - self.param.warning("split_overlays is deprecated and is now " - "a private method.") - return self._split_overlays() - - def _split_overlays(self): """ Splits a DynamicMap into its components. Only well defined for @@ -1791,8 +1665,13 @@ def dynamic_hist(obj, **dynkwargs): if isinstance(obj, (NdOverlay, Overlay)): index = kwargs.get('index', 0) obj = obj.get(index) - return obj.hist(num_bins=num_bins, bin_range=bin_range, - adjoin=False, **kwargs) + return obj.hist( + dimension=dimension, + num_bins=num_bins, + bin_range=bin_range, + adjoin=False, + **kwargs + ) from ..util import Dynamic hist = Dynamic(self, streams=self.streams, link_inputs=False, @@ -1823,7 +1702,7 @@ def reindex(self, kdims=[], force=False): if dropped: raise ValueError("DynamicMap does not allow dropping dimensions, " "reindex may only be used to reorder dimensions.") - return super(DynamicMap, self).reindex(kdims, force) + return super().reindex(kdims, force) def drop_dimension(self, dimensions): @@ -1859,7 +1738,7 @@ class GridSpace(UniformNdMapping): kdims = param.List(default=[Dimension("X"), Dimension("Y")], bounds=(1,2)) def __init__(self, initial_items=None, kdims=None, **params): - super(GridSpace, self).__init__(initial_items, kdims=kdims, **params) + super().__init__(initial_items, kdims=kdims, **params) if self.ndims > 2: raise Exception('Grids can have no more than two dimensions.') @@ -1927,7 +1806,7 @@ def keys(self, full_grid=False): Returns: List of keys """ - keys = super(GridSpace, self).keys() + keys = super().keys() if self.ndims == 1 or not full_grid: return keys dim1_keys = list(OrderedDict.fromkeys(k[0] for k in keys)) diff --git a/holoviews/core/tree.py b/holoviews/core/tree.py index bcd095b7e1..d70befd38b 100644 --- a/holoviews/core/tree.py +++ b/holoviews/core/tree.py @@ -1,7 +1,4 @@ -try: - from cyordereddict import OrderedDict -except: - from collections import OrderedDict +from collections import OrderedDict from . import util from .pprint import PrettyPrinter @@ -70,6 +67,13 @@ def __init__(self, items=None, identifier=None, parent=None, dir_mode='default') for path, item in items: self.set_path(path, item) + @property + def root(self): + root = self + while root.parent is not None: + root = root.parent + return root + @property def path(self): "Returns the path up to the root for the current node." @@ -223,7 +227,7 @@ def __setattr__(self, identifier, val): if util.tree_attribute(identifier) and self.fixed and shallow: raise AttributeError(self._fixed_error % identifier) - super(AttrTree, self).__setattr__(identifier, val) + super().__setattr__(identifier, val) if util.tree_attribute(identifier): if not identifier in self.children: @@ -237,8 +241,9 @@ def __getattr__(self, identifier): with the chosen attribute path. """ try: - return super(AttrTree, self).__getattr__(identifier) - except AttributeError: pass + return super().__getattr__(identifier) + except AttributeError: + pass # Attributes starting with __ get name mangled if identifier.startswith('_' + type(self).__name__) or identifier.startswith('__'): diff --git a/holoviews/core/util.py b/holoviews/core/util.py index 5d92bddeb1..a485c148ef 100644 --- a/holoviews/core/util.py +++ b/holoviews/core/util.py @@ -1,4 +1,5 @@ import sys, warnings, operator +import builtins as builtins # noqa (compatibility) import json import time import types @@ -9,9 +10,10 @@ import unicodedata import datetime as dt -from collections import defaultdict +from collections.abc import Iterable # noqa +from collections import defaultdict, OrderedDict # noqa (compatibility) from contextlib import contextmanager -from distutils.version import LooseVersion as _LooseVersion +from distutils.version import LooseVersion from functools import partial from threading import Thread, Event from types import FunctionType @@ -19,64 +21,20 @@ import numpy as np import param -try: - from cyordereddict import OrderedDict -except: - from collections import OrderedDict - -# Python3 compatibility -if sys.version_info.major >= 3: - import builtins as builtins # noqa (compatibility) - - if sys.version_info.minor > 3: - from collections.abc import Iterable # noqa (compatibility) - else: - from collections import Iterable # noqa (compatibility) - - basestring = str - unicode = str - long = int - cmp = lambda a, b: (a>b)-(a 4 else RuntimeError # noqa - _getargspec = inspect.getfullargspec - get_keywords = operator.attrgetter('varkw') - LooseVersion = _LooseVersion -else: - import __builtin__ as builtins # noqa (compatibility) - from collections import Iterable # noqa (compatibility) - - basestring = basestring - unicode = unicode - from itertools import izip - generator_types = (izip, xrange, types.GeneratorType) # noqa - RecursionError = RuntimeError - _getargspec = inspect.getargspec - get_keywords = operator.attrgetter('keywords') - - class LooseVersion(_LooseVersion): - """ - Subclassed to avoid unicode issues in python2 - """ - - def __init__ (self, vstring=None): - if isinstance(vstring, unicode): - vstring = str(vstring) - self.parse(vstring) - - def __cmp__(self, other): - if isinstance(other, unicode): - other = str(other) - if isinstance(other, basestring): - other = LooseVersion(other) - return cmp(self.version, other.version) +# Python 2 builtins +long = int +unicode = str +cmp = lambda a, b: (a>b)-(a= '0.24.0': + if pandas_version >= '1.3.0': + from pandas.core.dtypes.dtypes import DatetimeTZDtype as DatetimeTZDtypeType + from pandas.core.dtypes.generic import ABCSeries, ABCIndex as ABCIndexClass + elif pandas_version >= '0.24.0': from pandas.core.dtypes.dtypes import DatetimeTZDtype as DatetimeTZDtypeType from pandas.core.dtypes.generic import ABCSeries, ABCIndexClass elif pandas_version > '0.20.0': @@ -103,9 +64,12 @@ def __cmp__(self, other): if pandas_version > '0.23.0': from pandas.core.dtypes.generic import ABCExtensionArray arraylike_types = arraylike_types + (ABCExtensionArray,) + if pandas_version > '1.0': + from pandas.core.arrays.masked import BaseMaskedArray + masked_types = (BaseMaskedArray,) except Exception as e: - param.main.warning('pandas could not register all extension types ' - 'imports failed with the following error: %s' % e) + param.main.param.warning('pandas could not register all extension types ' + 'imports failed with the following error: %s' % e) try: import cftime @@ -121,7 +85,7 @@ class VersionError(Exception): def __init__(self, msg, version=None, min_version=None, **kwargs): self.version = version self.min_version = min_version - super(VersionError, self).__init__(msg, **kwargs) + super().__init__(msg, **kwargs) class Config(param.ParameterizedFunction): @@ -149,6 +113,24 @@ class Config(param.ParameterizedFunction): recommended that users switch this on to update any uses of __call__ as it will be deprecated in future.""") + default_cmap = param.String(default='kbc_r', doc=""" + Global default colormap. Prior to HoloViews 1.14.0, the default + value was 'fire' which can be set for backwards compatibility.""") + + default_gridded_cmap = param.String(default='kbc_r', doc=""" + Global default colormap for gridded elements (i.e. Image, Raster + and QuadMesh). Can be set to 'fire' to match raster defaults + prior to HoloViews 1.14.0 while allowing the default_cmap to be + the value of 'kbc_r' used in HoloViews >= 1.14.0""") + + default_heatmap_cmap = param.String(default='kbc_r', doc=""" + Global default colormap for HeatMap elements. Prior to HoloViews + 1.14.0, the default value was the 'RdYlBu_r' colormap.""") + + raise_deprecated_tilesource_exception = param.Boolean(default=False, + doc=""" Whether deprecated tile sources should raise a + deprecation exception instead of issuing warnings.""") + def __call__(self, **params): self.param.set_param(**params) return self @@ -175,7 +157,7 @@ class HashableJSON(json.JSONEncoder): their id. One limitation of this approach is that dictionaries with composite - keys (e.g tuples) are not supported due to the JSON spec. + keys (e.g. tuples) are not supported due to the JSON spec. """ string_hashable = (dt.datetime,) repr_hashable = () @@ -289,7 +271,7 @@ def __init__(self, period, count, callback, timeout=None, block=False): raise ValueError('When using a non-blocking thread, please specify ' 'either a count or a timeout') - super(periodic, self).__init__() + super().__init__() self.period = period self.callback = callback self.count = count @@ -306,7 +288,7 @@ def completed(self): def start(self): self._start_time = time.time() if self.block is False: - super(periodic,self).start() + super().start() else: self.run() @@ -378,20 +360,24 @@ def argspec(callable_obj): if (isinstance(callable_obj, type) and issubclass(callable_obj, param.ParameterizedFunction)): # Parameterized function.__call__ considered function in py3 but not py2 - spec = _getargspec(callable_obj.__call__) + spec = inspect.getfullargspec(callable_obj.__call__) args = spec.args[1:] - elif inspect.isfunction(callable_obj): # functions and staticmethods - spec = _getargspec(callable_obj) + elif inspect.isfunction(callable_obj): # functions and staticmethods + spec = inspect.getfullargspec(callable_obj) args = spec.args elif isinstance(callable_obj, partial): # partials arglen = len(callable_obj.args) - spec = _getargspec(callable_obj.func) + spec = inspect.getfullargspec(callable_obj.func) args = [arg for arg in spec.args[arglen:] if arg not in callable_obj.keywords] elif inspect.ismethod(callable_obj): # instance and class methods - spec = _getargspec(callable_obj) + spec = inspect.getfullargspec(callable_obj) args = spec.args[1:] - else: # callable objects + elif isinstance(callable_obj, type) and issubclass(callable_obj, param.Parameterized): + return argspec(callable_obj.__init__) + elif callable(callable_obj): # callable objects return argspec(callable_obj.__call__) + else: + raise ValueError("Cannot determine argspec for non-callable type.") return inspect.ArgSpec(args=args, varargs=spec.varargs, @@ -399,14 +385,13 @@ def argspec(callable_obj): defaults=spec.defaults) - def validate_dynamic_argspec(callback, kdims, streams): """ Utility used by DynamicMap to ensure the supplied callback has an appropriate signature. If validation succeeds, returns a list of strings to be zipped with - the positional arguments i.e kdim values. The zipped values can then + the positional arguments, i.e. kdim values. The zipped values can then be merged with the stream values to pass everything to the Callable as keywords. @@ -486,14 +471,7 @@ def callable_name(callable_obj): elif inspect.isfunction(callable_obj): # functions and staticmethods return callable_obj.__name__ elif inspect.ismethod(callable_obj): # instance and class methods - meth = callable_obj - if sys.version_info < (3,0): - owner = meth.im_class if meth.im_self is None else meth.im_self - if meth.__name__ == '__call__': - return type(owner).__name__ - return '.'.join([owner.__name__, meth.__name__]) - else: - return meth.__func__.__qualname__.replace('.__call__', '') + return callable_obj.__func__.__qualname__.replace('.__call__', '') elif isinstance(callable_obj, types.GeneratorType): return callable_obj.__name__ else: @@ -505,11 +483,11 @@ def callable_name(callable_obj): def process_ellipses(obj, key, vdim_selection=False): """ Helper function to pad a __getitem__ key with the right number of - empty slices (i.e :) when the key contains an Ellipsis (...). + empty slices (i.e. :) when the key contains an Ellipsis (...). If the vdim_selection flag is true, check if the end of the key contains strings or Dimension objects in obj. If so, extra padding - will not be applied for the value dimensions (i.e the resulting key + will not be applied for the value dimensions (i.e. the resulting key will be exactly one longer than the number of kdims). Note: this flag should not be used for composite types. """ @@ -528,7 +506,7 @@ def process_ellipses(obj, key, vdim_selection=False): padlen = dim_count - (len(head) + len(tail)) if vdim_selection: - # If the end of the key (i.e the tail) is in vdims, pad to len(kdims)+1 + # If the end of the key (i.e. the tail) is in vdims, pad to len(kdims)+1 if wrapped_key[-1] in obj.vdims: padlen = (len(obj.kdims) +1 ) - len(head+tail) return head + ((slice(None),) * padlen) + tail @@ -539,7 +517,7 @@ def bytes_to_unicode(value): Safely casts bytestring to unicode """ if isinstance(value, bytes): - return unicode(value.decode('utf-8')) + return value.decode('utf-8') return value @@ -549,7 +527,7 @@ def get_method_owner(method): """ if isinstance(method, partial): method = method.func - return method.__self__ if sys.version_info.major >= 3 else method.im_self + return method.__self__ def capitalize_unicode_name(s): @@ -568,13 +546,7 @@ def capitalize_unicode_name(s): class sanitize_identifier_fn(param.ParameterizedFunction): """ Sanitizes group/label values for use in AttrTree attribute - access. Depending on the version parameter, either sanitization - appropriate for Python 2 (no unicode gn identifiers allowed) or - Python 3 (some unicode allowed) is used. - - Note that if you are using Python 3, you can switch to version 2 - for compatibility but you cannot enable relaxed sanitization if - you are using Python 2. + access. Special characters are sanitized using their (lowercase) unicode name using the unicodedata module. For instance: @@ -587,11 +559,6 @@ class sanitize_identifier_fn(param.ParameterizedFunction): names appropriately. """ - version = param.ObjectSelector(sys.version_info.major, objects=[2,3], doc=""" - The sanitization version. If set to 2, more aggressive - sanitization appropriate for Python 2 is applied. Otherwise, - if set to 3, more relaxed, Python 3 sanitization is used.""") - capitalize = param.Boolean(default=True, doc=""" Whether the first letter should be converted to uppercase. Note, this will only be applied to ASCII characters @@ -673,15 +640,14 @@ def allowable(self_or_cls, name, disable_leading_underscore=None): return (name not in self_or_cls.disallowed) and not isrepr @param.parameterized.bothmethod - def prefixed(self, identifier, version): + def prefixed(self, identifier): """ Whether or not the identifier will be prefixed. Strings that require the prefix are generally not recommended. """ invalid_starting = ['Mn', 'Mc', 'Nd', 'Pc'] if identifier.startswith('_'): return True - return((identifier[0] in string.digits) if version==2 - else (unicodedata.category(identifier[0]) in invalid_starting)) + return unicodedata.category(identifier[0]) in invalid_starting @param.parameterized.bothmethod def remove_diacritics(self_or_cls, identifier): @@ -716,7 +682,7 @@ def shortened_character_name(self_or_cls, c, eliminations=[], substitutions={}, return ' '.join(name.strip().split()).replace(' ','_').replace('-','_') - def __call__(self, name, escape=True, version=None): + def __call__(self, name, escape=True): if name in [None, '']: return name elif name in self.aliases: @@ -724,17 +690,14 @@ def __call__(self, name, escape=True, version=None): elif name in self._lookup_table: return self._lookup_table[name] name = bytes_to_unicode(name) - version = self.version if version is None else version if not self.allowable(name): raise AttributeError("String %r is in the disallowed list of attribute names: %r" % (name, self.disallowed)) - if version == 2: - name = self.remove_diacritics(name) if self.capitalize and name and name[0] in string.ascii_lowercase: name = name[0].upper()+name[1:] - sanitized = (self.sanitize_py2(name) if version==2 else self.sanitize_py3(name)) - if self.prefixed(name, version): + sanitized = self.sanitize_py3(name) + if self.prefixed(name): sanitized = self.prefix + sanitized self._lookup_table[name] = sanitized return sanitized @@ -755,12 +718,6 @@ def _process_underscores(self, tokens): processed.append(token) return processed - def sanitize_py2(self, name): - # This fix works but masks an issue in self.sanitize (py2) - prefix = '_' if name.startswith('_') else '' - valid_chars = string.ascii_letters+string.digits+'_' - return prefix + str('_'.join(self.sanitize(name, lambda c: c in valid_chars))) - def sanitize_py3(self, name): if not name.isidentifier(): @@ -806,7 +763,7 @@ def isscalar(val): def isnumeric(val): - if isinstance(val, (basestring, bool, np.bool_)): + if isinstance(val, (str, bool, np.bool_)): return False try: float(val) @@ -860,6 +817,10 @@ def isfinite(val): """ is_dask = is_dask_array(val) if not np.isscalar(val) and not is_dask: + if isinstance(val, np.ma.core.MaskedArray): + return ~val.mask & isfinite(val.data) + elif isinstance(val, masked_types): + return ~val.isna() & isfinite(val._data) val = asarray(val, strict=False) if val is None: @@ -880,7 +841,7 @@ def isfinite(val): return finite elif isinstance(val, datetime_types+timedelta_types): return not isnat(val) - elif isinstance(val, (basestring, bytes)): + elif isinstance(val, (str, bytes)): return True finite = np.isfinite(val) if pd and pandas_version >= '1.0.0': @@ -1065,13 +1026,13 @@ def max_extents(extents, zrange=False): upper = [v for v in arr[uidx] if v is not None and not is_nan(v)] if lower and isinstance(lower[0], datetime_types): extents[lidx] = np.min(lower) - elif any(isinstance(l, basestring) for l in lower): + elif any(isinstance(l, str) for l in lower): extents[lidx] = np.sort(lower)[0] elif lower: extents[lidx] = np.nanmin(lower) if upper and isinstance(upper[0], datetime_types): extents[uidx] = np.max(upper) - elif any(isinstance(u, basestring) for u in upper): + elif any(isinstance(u, str) for u in upper): extents[uidx] = np.sort(upper)[-1] elif upper: extents[uidx] = np.nanmax(upper) @@ -1251,10 +1212,7 @@ def dimension_sort(odict, kdims, vdims, key_index): sortkws['key'] = lambda x: tuple(cached_values[dim.name].index(x[t][d]) if dim.values else x[t][d] for i, (dim, t, d) in enumerate(indexes)) - if sys.version_info.major == 3: - return python2sort(odict.items(), **sortkws) - else: - return sorted(odict.items(), **sortkws) + return python2sort(odict.items(), **sortkws) # Copied from param should make param version public @@ -1605,8 +1563,6 @@ def disable_constant(parameterized): p.constant = False try: yield - except: - raise finally: for (p, const) in zip(params, constants): p.constant = const @@ -1618,7 +1574,7 @@ def get_ndmapping_label(ndmapping, attr): label attribute from an NdMapping. """ label = None - els = itervalues(ndmapping.data) + els = iter(ndmapping.data.values()) while label is None: try: el = next(els) @@ -1647,10 +1603,19 @@ def stream_name_mapping(stream, exclude_params=['name'], reverse=False): If reverse is True, the mapping is from the renamed strings to the original stream parameter names. """ - filtered = [k for k in stream.param if k not in exclude_params] - mapping = {k:stream._rename.get(k,k) for k in filtered} + from ..streams import Params + if isinstance(stream, Params): + mapping = {} + for p in stream.parameters: + if isinstance(p, str): + mapping[p] = stream._rename.get(p, p) + else: + mapping[p.name] = stream._rename.get((p.owner, p.name), p.name) + else: + filtered = [k for k in stream.param if k not in exclude_params] + mapping = {k: stream._rename.get(k, k) for k in filtered} if reverse: - return {v:k for k,v in mapping.items()} + return {v: k for k,v in mapping.items()} else: return mapping @@ -1761,16 +1726,6 @@ def drop_streams(streams, kdims, keys): return dims, ([wrap_tuple(k) for k in keys] if len(inds) == 1 else list(keys)) -def itervalues(obj): - "Get value iterator from dictionary for Python 2 and 3" - return iter(obj.values()) if sys.version_info.major == 3 else obj.itervalues() - - -def iterkeys(obj): - "Get key iterator from dictionary for Python 2 and 3" - return iter(obj.keys()) if sys.version_info.major == 3 else obj.iterkeys() - - def get_unique_keys(ndmapping, dimensions): inds = [ndmapping.get_dimension_index(dim) for dim in dimensions] getter = operator.itemgetter(*inds) @@ -1943,7 +1898,7 @@ def arglexsort(arrays): def dimensioned_streams(dmap): """ Given a DynamicMap return all streams that have any dimensioned - parameters i.e parameters also listed in the key dimensions. + parameters, i.e. parameters also listed in the key dimensions. """ dimensioned = [] for stream in dmap.streams: @@ -2070,15 +2025,15 @@ def parse_datetime_selection(sel): """ Parses string selection specs as datetimes. """ - if isinstance(sel, basestring) or isdatetime(sel): + if isinstance(sel, str) or isdatetime(sel): sel = parse_datetime(sel) if isinstance(sel, slice): - if isinstance(sel.start, basestring) or isdatetime(sel.start): + if isinstance(sel.start, str) or isdatetime(sel.start): sel = slice(parse_datetime(sel.start), sel.stop) - if isinstance(sel.stop, basestring) or isdatetime(sel.stop): + if isinstance(sel.stop, str) or isdatetime(sel.stop): sel = slice(sel.start, parse_datetime(sel.stop)) if isinstance(sel, (set, list)): - sel = [parse_datetime(v) if isinstance(v, basestring) else v for v in sel] + sel = [parse_datetime(v) if isinstance(v, str) else v for v in sel] return sel @@ -2092,12 +2047,13 @@ def dt_to_int(value, time_unit='us'): if isinstance(value, pd.Timestamp): try: value = value.to_datetime64() - except: + except Exception: value = np.datetime64(value.to_pydatetime()) elif isinstance(value, cftime_types): return cftime_to_timestamp(value, time_unit) - if isinstance(value, dt.date): + # date class is a parent for datetime class + if isinstance(value, dt.date) and not isinstance(value, dt.datetime): value = dt.datetime(*value.timetuple()[:6]) # Handle datetime64 separately @@ -2106,7 +2062,7 @@ def dt_to_int(value, time_unit='us'): value = np.datetime64(value, 'ns') tscale = (np.timedelta64(1, time_unit)/np.timedelta64(1, 'ns')) return value.tolist()/tscale - except: + except Exception: # If it can't handle ns precision fall back to datetime value = value.tolist() @@ -2117,8 +2073,12 @@ def dt_to_int(value, time_unit='us'): try: # Handle python3 - return int(value.timestamp() * tscale) - except: + if value.tzinfo is None: + _epoch = dt.datetime(1970, 1, 1) + else: + _epoch = dt.datetime(1970, 1, 1, tzinfo=dt.timezone.utc) + return int((value - _epoch).total_seconds() * tscale) + except Exception: # Handle python2 return (time.mktime(value.timetuple()) + value.microsecond / 1e6) * tscale @@ -2139,13 +2099,13 @@ def cftime_to_timestamp(date, time_unit='us'): time_unit since 1970-01-01 00:00:00 """ import cftime - utime = cftime.utime('microseconds since 1970-01-01 00:00:00') if time_unit == 'us': tscale = 1 else: tscale = (np.timedelta64(1, 'us')/np.timedelta64(1, time_unit)) - return utime.date2num(date)*tscale + return cftime.date2num(date,'microseconds since 1970-01-01 00:00:00', + calendar='standard')*tscale def search_indices(values, source): """ @@ -2208,11 +2168,13 @@ def closest_match(match, specs, depth=0): if spec[0] == match[0]: new_specs.append((i, spec[1:])) else: - if all(isinstance(s[0], basestring) for s in [spec, match]): + if all(isinstance(s[0], str) for s in [spec, match]): match_length = max(i for i in range(len(match[0])) if match[0].startswith(spec[0][:i])) elif is_number(match[0]) and is_number(spec[0]): - match_length = -abs(match[0]-spec[0]) + m = bool(match[0]) if isinstance(match[0], np.bool_) else match[0] + s = bool(spec[0]) if isinstance(spec[0], np.bool_) else spec[0] + match_length = -abs(m-s) else: match_length = 0 match_lengths.append((i, match_length, spec[0])) diff --git a/holoviews/element/annotation.py b/holoviews/element/annotation.py index ca6be2a222..90a37d64ee 100644 --- a/holoviews/element/annotation.py +++ b/holoviews/element/annotation.py @@ -2,7 +2,7 @@ import numpy as np import param -from ..core.util import datetime_types, basestring +from ..core.util import datetime_types from ..core import Dimension, Element2D, Element from ..core.data import Dataset @@ -30,7 +30,7 @@ class Annotation(Element2D): _auxiliary_component = True def __init__(self, data, **params): - super(Annotation, self).__init__(data, **params) + super().__init__(data, **params) def __len__(self): return 1 @@ -68,7 +68,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): elif index == 1: return [] if np.isscalar(self.data) else np.array([self.data[1]]) else: - return super(Annotation, self).dimension_values(dimension) + return super().dimension_values(dimension) # Note: This version of clone is identical in path.BaseShape # Consider implementing a mix-in class if it is needed again. @@ -95,7 +95,9 @@ class VLine(Annotation): __pos_params = ['x'] def __init__(self, x, **params): - super(VLine, self).__init__(x, x=x, **params) + if isinstance(x, np.ndarray) and x.size == 1: + x = np.atleast_1d(x)[0] + super().__init__(x, x=x, **params) def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. @@ -114,7 +116,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): elif index == 1: return np.array([np.nan]) else: - return super(VLine, self).dimension_values(dimension) + return super().dimension_values(dimension) class HLine(Annotation): @@ -128,7 +130,9 @@ class HLine(Annotation): __pos_params = ['y'] def __init__(self, y, **params): - super(HLine, self).__init__(y, y=y, **params) + if isinstance(y, np.ndarray) and y.size == 1: + y = np.atleast_1d(y)[0] + super().__init__(y, y=y, **params) def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. @@ -147,7 +151,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): elif index == 1: return np.array([self.data]) else: - return super(HLine, self).dimension_values(dimension) + return super().dimension_values(dimension) class Slope(Annotation): @@ -160,7 +164,7 @@ class Slope(Annotation): __pos_params = ['slope', 'y_intercept'] def __init__(self, slope, y_intercept, kdims=None, vdims=None, **params): - super(Slope, self).__init__( + super().__init__( (slope, y_intercept), slope=slope, y_intercept=y_intercept, kdims=kdims, vdims=vdims, **params) @@ -200,7 +204,7 @@ class VSpan(Annotation): __pos_params = ['x1', 'x2'] def __init__(self, x1=None, x2=None, **params): - super(VSpan, self).__init__([x1, x2], x1=x1, x2=x2, **params) + super().__init__([x1, x2], x1=x1, x2=x2, **params) def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. @@ -219,7 +223,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): elif index == 1: return np.array([np.nan, np.nan]) else: - return super(VSpan, self).dimension_values(dimension) + return super().dimension_values(dimension) class HSpan(Annotation): @@ -236,7 +240,7 @@ class HSpan(Annotation): __pos_params = ['y1', 'y2'] def __init__(self, y1=None, y2=None, **params): - super(HSpan, self).__init__([y1, y2], y1=y1, y2=y2, **params) + super().__init__([y1, y2], y1=y1, y2=y2, **params) def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. @@ -255,7 +259,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): elif index == 1: return np.array(self.data) else: - return super(HSpan, self).dimension_values(dimension) + return super().dimension_values(dimension) @@ -278,8 +282,7 @@ class Spline(Annotation): group = param.String(default='Spline', constant=True) def __init__(self, spline_points, **params): - super(Spline, self).__init__(spline_points, **params) - + super().__init__(spline_points, **params) def clone(self, data=None, shared_data=True, new_type=None, *args, **overrides): """Clones the object, overriding data and parameters. @@ -312,7 +315,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): if index in [0, 1]: return np.array([point[index] for point in self.data[0]]) else: - return super(Spline, self).dimension_values(dimension) + return super().dimension_values(dimension) @@ -352,22 +355,21 @@ def __init__(self, x, y, text='', direction='<', points=40, arrowstyle='->', **params): info = (x, y, text, direction, points, arrowstyle) - super(Arrow, self).__init__(info, x=x, y=y, - text=text, direction=direction, - points=points, arrowstyle=arrowstyle, - **params) + super().__init__(info, x=x, y=y, + text=text, direction=direction, + points=points, arrowstyle=arrowstyle, + **params) def __setstate__(self, d): """ Add compatibility for unpickling old Arrow types with different .data format. """ - super(Arrow, self).__setstate__(d) + super().__setstate__(d) if len(self.data) == 5: direction, text, (x, y), points, arrowstyle = self.data self.data = (x, y, text, direction, points, arrowstyle) - def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. @@ -385,7 +387,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): elif index == 1: return np.array([self.y]) else: - return super(Arrow, self).dimension_values(dimension) + return super().dimension_values(dimension) @@ -394,10 +396,10 @@ class Text(Annotation): Draw a text annotation at the specified position with custom fontsize, alignment and rotation. """ - x = param.ClassSelector(default=0, class_=(Number, basestring) + datetime_types, doc=""" + x = param.ClassSelector(default=0, class_=(Number, str) + datetime_types, doc=""" The x-position of the arrow which make be numeric or a timestamp.""") - y = param.ClassSelector(default=0, class_=(Number, basestring) + datetime_types, doc=""" + y = param.ClassSelector(default=0, class_=(Number, str) + datetime_types, doc=""" The y-position of the arrow which make be numeric or a timestamp.""") text = param.String(default='', doc="The text to be displayed.") @@ -423,9 +425,9 @@ class Text(Annotation): def __init__(self, x, y, text, fontsize=12, halign='center', valign='center', rotation=0, **params): info = (x, y, text, fontsize, halign, valign, rotation) - super(Text, self).__init__(info, x=x, y=y, text=text, - fontsize=fontsize, rotation=rotation, - halign=halign, valign=valign, **params) + super().__init__(info, x=x, y=y, text=text, + fontsize=fontsize, rotation=rotation, + halign=halign, valign=valign, **params) @@ -440,10 +442,10 @@ class Div(Element): def __init__(self, data, **params): if data is None: data = '' - if not isinstance(data, basestring): + if not isinstance(data, str): raise ValueError("Div element html data must be a string " "type, found %s type." % type(data).__name__) - super(Div, self).__init__(data, **params) + super().__init__(data, **params) diff --git a/holoviews/element/chart.py b/holoviews/element/chart.py index f7fc40b73c..aca7f264d4 100644 --- a/holoviews/element/chart.py +++ b/holoviews/element/chart.py @@ -2,8 +2,8 @@ import param from ..core import util -from ..core import Dimension, Dataset, Element2D -from ..core.data import GridInterface +from ..core import Dimension, Dataset, Element2D, NdOverlay, Overlay +from ..core.dimension import process_dimensions from .geom import Rectangles, Points, VectorField # noqa: backward compatible import from .selection import Selection1DExpr, Selection2DExpr @@ -41,8 +41,15 @@ class Chart(Dataset, Element2D): # Enables adding index if 1D array like data is supplied _auto_indexable_1d = True + _max_kdim_count = 1 # Remove once kdims has bounds=(1,1) instead of warning __abstract = True + def __init__(self, data, kdims=None, vdims=None, **params): + params.update(process_dimensions(kdims, vdims)) + if len(params.get('kdims', [])) == self._max_kdim_count + 1: + self.param.warning('Chart elements should only be supplied a single kdim') + super().__init__(data, **params) + def __getitem__(self, index): return super(Chart, self).__getitem__(index) @@ -126,8 +133,7 @@ def range(self, dim, data_range=True, dimension_range=True): if not dimension_range: return (lower, upper) return util.dimension_range(lower, upper, dim.range, dim.soft_range) - return super(ErrorBars, self).range(dim, data_range) - + return super().range(dim, data_range) class Spread(ErrorBars): @@ -142,7 +148,6 @@ class Spread(ErrorBars): group = param.String(default='Spread', constant=True) - class Bars(Selection1DExpr, Chart): """ Bars is a Chart element representing categorical observations @@ -156,6 +161,7 @@ class Bars(Selection1DExpr, Chart): kdims = param.List(default=[Dimension('x')], bounds=(1,3)) + _max_kdim_count = 3 class Histogram(Selection1DExpr, Chart): @@ -179,39 +185,14 @@ class Histogram(Selection1DExpr, Chart): _binned = True - def __init__(self, data, edges=None, **params): + def __init__(self, data, **params): if data is None: data = [] - if edges is not None: - self.param.warning( - "Histogram edges should be supplied as a tuple " - "along with the values, passing the edges will " - "be deprecated in holoviews 2.0.") - data = (edges, data) - elif isinstance(data, tuple) and len(data) == 2 and len(data[0])+1 == len(data[1]): + if (isinstance(data, tuple) and len(data) == 2 and + len(data[0])+1 == len(data[1])): data = data[::-1] - super(Histogram, self).__init__(data, **params) - def __setstate__(self, state): - """ - Ensures old-style Histogram types without an interface can be unpickled. - - Note: Deprecate as part of 2.0 - """ - if 'interface' not in state: - self.interface = GridInterface - x, y = state['_kdims_param_value'][0], state['_vdims_param_value'][0] - state['data'] = {x.name: state['data'][1], y.name: state['data'][0]} - super(Dataset, self).__setstate__(state) - - - @property - def values(self): - "Property to access the Histogram values provided for backward compatibility" - self.param.warning('Histogram.values is deprecated in favor of ' - 'common dimension_values method.') - return self.dimension_values(1) - + super().__init__(data, **params) @property def edges(self): @@ -239,7 +220,6 @@ class Spikes(Selection1DExpr, Chart): _auto_indexable_1d = False - class Area(Curve): """ Area is a Chart element representing the area under a curve or @@ -255,7 +235,7 @@ class Area(Curve): group = param.String(default='Area', constant=True) @classmethod - def stack(cls, areas): + def stack(cls, areas, baseline_name='Baseline'): """ Stacks an (Nd)Overlay of Area or Curve Elements by offsetting their baselines. To stack a HoloMap or DynamicMap use the map @@ -263,12 +243,22 @@ def stack(cls, areas): """ if not len(areas): return areas - baseline = np.zeros(len(areas.values()[0])) + is_overlay = isinstance(areas, Overlay) + if is_overlay: + areas = NdOverlay({i: el for i, el in enumerate(areas)}) + df = areas.dframe(multi_index=True) + levels = list(range(areas.ndims)) + vdim = areas.last.vdims[0] + vdims = [vdim, baseline_name] + baseline = None stacked = areas.clone(shared_data=False) - vdims = [areas.values()[0].vdims[0], 'Baseline'] - for k, area in areas.items(): - x, y = (area.dimension_values(i) for i in range(2)) - stacked[k] = area.clone((x, y+baseline, baseline), vdims=vdims, - new_type=Area) - baseline = baseline + y - return stacked + for key, sdf in df.groupby(level=levels): + sdf = sdf.droplevel(levels).reindex(index=df.index.levels[-1], fill_value=0) + if baseline is None: + sdf[baseline_name] = 0 + else: + sdf[vdim.name] = sdf[vdim.name] + baseline + sdf[baseline_name] = baseline + baseline = sdf[vdim.name] + stacked[key] = areas[key].clone(sdf, vdims=vdims) + return Overlay(stacked.values()) if is_overlay else stacked diff --git a/holoviews/element/chart3d.py b/holoviews/element/chart3d.py index 233852de5a..c52a97715a 100644 --- a/holoviews/element/chart3d.py +++ b/holoviews/element/chart3d.py @@ -38,7 +38,7 @@ def __init__(self, data, kdims=None, vdims=None, extents=None, **params): Image.__init__(self, data, kdims=kdims, vdims=vdims, extents=extents, **params) def _get_selection_expr_for_stream_value(self, **kwargs): - expr, bbox, _ = super(Surface, self)._get_selection_expr_for_stream_value(**kwargs) + expr, bbox, _ = super()._get_selection_expr_for_stream_value(**kwargs) return expr, bbox, None @@ -66,23 +66,10 @@ def __getitem__(self, slc): return Points.__getitem__(self, slc) -class Trisurface(TriSurface): - """ - Old name for TriSurface. Retaining for backwards compatibility - until holoviews 2.0. - """ - - group = param.String(default='Trisurface', constant=True) - - def __init__(self, *args, **kwargs): - self.param.warning('Please use TriSurface element instead') - super(TriSurface, self).__init__(*args, **kwargs) - - class Scatter3D(Element3D, Points): """ Scatter3D is a 3D element representing the position of a collection - of coordinates in a 3D space. The key dimensions represent the + of coordinates in a 3D space. The key dimensions represent the position of each coordinate along the x-, y- and z-axis while the value dimensions can optionally supply additional information. """ @@ -101,7 +88,6 @@ def __getitem__(self, slc): return Points.__getitem__(self, slc) - class Path3D(Element3D, Path): """ Path3D is a 3D element representing a line through 3D space. The diff --git a/holoviews/element/comparison.py b/holoviews/element/comparison.py index 4677d9253e..4753efd108 100644 --- a/holoviews/element/comparison.py +++ b/holoviews/element/comparison.py @@ -65,11 +65,8 @@ def assertEqual(cls, first, second, msg=None): if type(first) is type(second): asserter = cls.equality_type_funcs.get(type(first)) - try: basestring = basestring # Python 2 - except NameError: basestring = str # Python 3 - if asserter is not None: - if isinstance(asserter, basestring): + if isinstance(asserter, str): asserter = getattr(cls, asserter) if asserter is None: @@ -101,7 +98,6 @@ def register(cls): # Float comparisons cls.equality_type_funcs[float] = cls.compare_floats - cls.equality_type_funcs[np.float] = cls.compare_floats cls.equality_type_funcs[np.float32] = cls.compare_floats cls.equality_type_funcs[np.float64] = cls.compare_floats @@ -171,7 +167,6 @@ def register(cls): cls.equality_type_funcs[Scatter] = cls.compare_scatter cls.equality_type_funcs[Scatter3D] = cls.compare_scatter3d cls.equality_type_funcs[TriSurface] = cls.compare_trisurface - cls.equality_type_funcs[Trisurface] = cls.compare_trisurface cls.equality_type_funcs[Histogram] = cls.compare_histogram cls.equality_type_funcs[Bars] = cls.compare_bars cls.equality_type_funcs[Spikes] = cls.compare_spikes @@ -303,10 +298,6 @@ def compare_dimensions(cls, dim1, dim2, msg=None): dim1_params = dict(dim1.param.get_param_values()) dim2_params = dict(dim2.param.get_param_values()) - # Special handling of deprecated 'initial' values argument - dim1_params['values'] = [] if dim1.values=='initial' else dim1.values - dim2_params['values'] = [] if dim2.values=='initial' else dim2.values - if set(dim1_params.keys()) != set(dim2_params.keys()): raise cls.failureException("Dimension parameter sets mismatched: %s != %s" % (set(dim1_params.keys()), set(dim2_params.keys()))) @@ -610,7 +601,7 @@ def compare_segments(cls, el1, el2, msg='Segments'): @classmethod def compare_boxes(cls, el1, el2, msg='Rectangles'): cls.compare_dataset(el1, el2, msg) - + #=========# # Graphs # #=========# @@ -700,7 +691,7 @@ def compare_tables(cls, el1, el2, msg='Table'): @classmethod def compare_dataframe(cls, df1, df2, msg='DFrame'): - from pandas.util.testing import assert_frame_equal + from pandas.testing import assert_frame_equal try: assert_frame_equal(df1, df2) except AssertionError as e: diff --git a/holoviews/element/graphs.py b/holoviews/element/graphs.py index fe6f3ccb9f..40fbcc1c23 100644 --- a/holoviews/element/graphs.py +++ b/holoviews/element/graphs.py @@ -6,7 +6,7 @@ from ..core import Dimension, Dataset, Element2D from ..core.accessors import Redim -from ..core.util import max_range, search_indices +from ..core.util import is_dataframe, max_range, search_indices from ..core.operation import Operation from .chart import Points from .path import Path @@ -21,7 +21,7 @@ class RedimGraph(Redim): """ def __call__(self, specs=None, **dimensions): - redimmed = super(RedimGraph, self).__call__(specs, **dimensions) + redimmed = super().__call__(specs, **dimensions) new_data = (redimmed.data,) if self._obj.nodes: new_data = new_data + (self._obj.nodes.redim(specs, **dimensions),) @@ -29,8 +29,6 @@ def __call__(self, specs=None, **dimensions): new_data = new_data + (self._obj.edgepaths.redim(specs, **dimensions),) return redimmed.clone(new_data) -redim_graph = RedimGraph # pickle compatibility - remove in 2.0 - class layout_nodes(Operation): """ @@ -152,17 +150,15 @@ def __init__(self, data, kdims=None, vdims=None, **params): self._nodes = nodes self._edgepaths = edgepaths - super(Graph, self).__init__(edges, kdims=kdims, vdims=vdims, **params) + super().__init__(edges, kdims=kdims, vdims=vdims, **params) if node_info is not None: self._add_node_info(node_info) self._validate() - @property def redim(self): return RedimGraph(self, mode='dataset') - def _add_node_info(self, node_info): nodes = self.nodes.clone(datatype=['pandas', 'dictionary']) if isinstance(node_info, self.node_type): @@ -240,7 +236,6 @@ def _validate(self): 'to the Graph (%d) matches the number of ' 'edgepaths (%d)' % (nedges, npaths)) - def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): if data is None: @@ -252,9 +247,8 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, data = (data, self.nodes) if self._edgepaths: data = data + (self.edgepaths,) - return super(Graph, self).clone(data, shared_data, new_type, link, - *args, **overrides) - + return super().clone(data, shared_data, new_type, link, + *args, **overrides) def select(self, selection_expr=None, selection_specs=None, selection_mode='edges', **selection): """ @@ -276,7 +270,7 @@ def select(self, selection_expr=None, selection_specs=None, selection_mode='edge The first positional argument to the Dataset.select method is expected to be a holoviews.util.transform.dim expression. Use the selection_specs keyword argument to specify a selection specification""") - + selection = {dim: sel for dim, sel in selection.items() if dim in self.dimensions('ranges')+['selection_mask']} if (selection_specs and not any(self.matches(sp) for sp in selection_specs) @@ -340,7 +334,6 @@ def select(self, selection_expr=None, selection_specs=None, selection_mode='edge paths = self._edgepaths return self.clone((data, nodes, paths)) - @property def _split_edgepaths(self): if len(self) == len(self.edgepaths.data): @@ -348,7 +341,6 @@ def _split_edgepaths(self): else: return self.edgepaths.clone(split_path(self.edgepaths)) - def range(self, dimension, data_range=True, dimension_range=True): if self.nodes and dimension in self.nodes.dimensions(): node_range = self.nodes.range(dimension, data_range, dimension_range) @@ -356,11 +348,10 @@ def range(self, dimension, data_range=True, dimension_range=True): path_range = self._edgepaths.range(dimension, data_range, dimension_range) return max_range([node_range, path_range]) return node_range - return super(Graph, self).range(dimension, data_range, dimension_range) - + return super().range(dimension, data_range, dimension_range) def dimensions(self, selection='all', label=False): - dimensions = super(Graph, self).dimensions(selection, label) + dimensions = super().dimensions(selection, label) if selection == 'ranges': if self._nodes is not None: node_dims = self.nodes.dimensions(selection, label) @@ -373,14 +364,13 @@ def dimensions(self, selection='all', label=False): return dimensions+node_dims return dimensions - @property def nodes(self): """ Computes the node positions the first time they are requested if no explicit node information was supplied. """ - + if self._nodes is None: from ..operation.element import chain self._nodes = layout_nodes(self, only_nodes=True) @@ -388,7 +378,6 @@ def nodes(self): self._nodes._pipeline = chain.instance() return self._nodes - @property def edgepaths(self): """ @@ -403,7 +392,6 @@ def edgepaths(self): paths = connect_edges_pd(self) return self.edge_type(paths, kdims=self.nodes.kdims[:2]) - @classmethod def from_networkx(cls, G, positions, nodes=None, **kwargs): """ @@ -499,7 +487,6 @@ def from_networkx(cls, G, positions, nodes=None, **kwargs): return cls((edge_data, nodes), vdims=edge_vdims) - class TriMesh(Graph): """ A TriMesh represents a mesh of triangles represented as the @@ -531,7 +518,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): else: edges, nodes, edgepaths = data, None, None - super(TriMesh, self).__init__(edges, kdims=kdims, vdims=vdims, **params) + super().__init__(edges, kdims=kdims, vdims=vdims, **params) if nodes is None: if len(self) == 0: nodes = [] @@ -545,15 +532,20 @@ def __init__(self, data, kdims=None, vdims=None, **params): # Add index to make it a valid Nodes object nodes = self.node_type(Dataset(nodes).add_dimension('index', 2, np.arange(len(nodes)))) elif not isinstance(nodes, Dataset) or nodes.ndims in [2, 3]: - # Try assuming data contains just coordinates (2 columns) - try: - points = self.point_type(nodes) - ds = Dataset(points).add_dimension('index', 2, np.arange(len(points))) - nodes = self.node_type(ds) - except: - raise ValueError("Nodes argument could not be interpreted, expected " - "data with two or three columns representing the " - "x/y positions and optionally the node indices.") + if is_dataframe(nodes): + coords = list(nodes.columns)[:2] + index = nodes.index.name or 'index' + nodes = self.node_type(nodes, coords+[index]) + else: + try: + points = self.point_type(nodes) + ds = Dataset(points).add_dimension('index', 2, np.arange(len(points))) + nodes = self.node_type(ds) + except Exception: + raise ValueError( + "Nodes argument could not be interpreted, expected " + "data with two or three columns representing the " + "x/y positions and optionally the node indices.") if edgepaths is not None and not isinstance(edgepaths, self.edge_type): edgepaths = self.edge_type(edgepaths) @@ -569,7 +561,7 @@ def from_vertices(cls, data): try: from scipy.spatial import Delaunay except: - raise ImportError("Generating triangles from points requires, " + raise ImportError("Generating triangles from points requires " "SciPy to be installed.") if not isinstance(data, Points): data = Points(data) @@ -614,9 +606,9 @@ def select(self, selection_specs=None, **selection): """ # Ensure that edgepaths are initialized so they can be selected on self.edgepaths - return super(TriMesh, self).select(selection_specs=None, - selection_mode='nodes', - **selection) + return super().select(selection_specs=None, + selection_mode='nodes', + **selection) @@ -802,12 +794,10 @@ def __init__(self, data, kdims=None, vdims=None, compute=True, **params): self._edgepaths = edgepaths self._validate() - @property def edgepaths(self): return self._edgepaths - @property def nodes(self): return self._nodes diff --git a/holoviews/element/path.py b/holoviews/element/path.py index f80c5bdb8a..ede8e66dc8 100644 --- a/holoviews/element/path.py +++ b/holoviews/element/path.py @@ -10,8 +10,7 @@ import param from ..core import Dataset from ..core.data import MultiInterface -from ..core.dimension import Dimension, asdim -from ..core.util import OrderedDict, disable_constant +from ..core.util import OrderedDict from .geom import Geometry from .selection import SelectionPolyExpr @@ -56,7 +55,9 @@ class Path(SelectionPolyExpr, Geometry): group = param.String(default="Path", constant=True) - datatype = param.ObjectSelector(default=['multitabular', 'spatialpandas']) + datatype = param.ObjectSelector(default=[ + 'multitabular', 'spatialpandas', 'dask_spatialpandas'] + ) def __init__(self, data, kdims=None, vdims=None, **params): if isinstance(data, tuple) and len(data) == 2: @@ -80,8 +81,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): paths.append(path.data) data = paths - super(Path, self).__init__(data, kdims=kdims, vdims=vdims, **params) - + super().__init__(data, kdims=kdims, vdims=vdims, **params) def __getitem__(self, key): if isinstance(key, np.ndarray): @@ -98,7 +98,6 @@ def __getitem__(self, key): ystart, ystop = ykey.start, ykey.stop return self.clone(extents=(xstart, ystart, xstop, ystop)) - def select(self, selection_expr=None, selection_specs=None, **selection): """Applies selection by dimension name @@ -147,8 +146,8 @@ def select(self, selection_expr=None, selection_specs=None, **selection): xdim, ydim = self.kdims[:2] x_range = selection.pop(xdim.name, None) y_range = selection.pop(ydim.name, None) - sel = super(Path, self).select(selection_expr, selection_specs, - **selection) + sel = super().select(selection_expr, selection_specs, + **selection) if x_range is None and y_range is None: return sel x_range = x_range if isinstance(x_range, slice) else slice(None) @@ -177,33 +176,6 @@ def split(self, start=None, end=None, datatype=None, **kwargs): return [obj] return self.interface.split(self, start, end, datatype, **kwargs) - # Deprecated methods - - @classmethod - def collapse_data(cls, data_list, function=None, kdims=None, **kwargs): - param.main.param.warning( - 'Path.collapse_data is deprecated, collapsing may now ' - 'be performed through concatenation and aggregation.') - if function is None: - return [path for paths in data_list for path in paths] - else: - raise Exception("Path types are not uniformly sampled and" - "therefore cannot be collapsed with a function.") - - - def __setstate__(self, state): - """ - Ensures old-style unpickled Path types without an interface - use the MultiInterface. - - Note: Deprecate as part of 2.0 - """ - self.__dict__ = state - if 'interface' not in state: - self.interface = MultiInterface - super(Dataset, self).__setstate__(state) - - class Contours(Path): """ @@ -237,40 +209,18 @@ class Contours(Path): representation where all paths are separated by NaN values. """ - level = param.Number(default=None, doc=""" - Optional level associated with the set of Contours.""") - vdims = param.List(default=[], constant=True, doc=""" Contours optionally accept a value dimension, corresponding to the supplied values.""") group = param.String(default='Contours', constant=True) - _level_vdim = Dimension('Level') # For backward compatibility - def __init__(self, data, kdims=None, vdims=None, **params): data = [] if data is None else data - if params.get('level') is not None: - self.param.warning( - "The level parameter on %s elements is deprecated, " - "supply the value dimension(s) as columns in the data.", - type(self).__name__) - vdims = vdims or [self._level_vdim] - params['vdims'] = [] - else: - params['vdims'] = vdims - super(Contours, self).__init__(data, kdims=kdims, **params) - if params.get('level') is not None: - with disable_constant(self): - self.vdims = [asdim(d) for d in vdims] - - def dimension_values(self, dim, expanded=True, flat=True): - dimension = self.get_dimension(dim, strict=True) - if dimension in self.vdims and self.level is not None: - if expanded: - return np.full(len(self), self.level) - return np.array([self.level]) - return super(Contours, self).dimension_values(dim, expanded, flat) + super(Contours, self).__init__(data, kdims=kdims, vdims=vdims, **params) + all_scalar = all(self.interface.isunique(self, vdim, per_geom=True) for vdim in self.vdims) + if not all_scalar: + raise ValueError("All value dimensions on a Contours element must be scalar") @@ -326,8 +276,6 @@ class Polygons(Contours): Polygons optionally accept a value dimension, corresponding to the supplied value.""") - _level_vdim = Dimension('Value') - # Defines which key the DictInterface uses to look for holes _hole_key = 'holes' @@ -365,7 +313,7 @@ def __new__(cls, *args, **kwargs): return super(Dataset, cls).__new__(cls) def __init__(self, **params): - super(BaseShape, self).__init__([], **params) + super().__init__([], **params) self.interface = MultiInterface def clone(self, *args, **overrides): @@ -415,7 +363,6 @@ class Box(BaseShape): __pos_params = ['x','y', 'height'] def __init__(self, x, y, spec, **params): - if isinstance(spec, tuple): if 'aspect' in params: raise ValueError('Aspect parameter not supported when supplying ' @@ -425,7 +372,7 @@ def __init__(self, x, y, spec, **params): width, height = params.get('width', spec), spec params['width']=params.get('width',width) - super(Box, self).__init__(x=x, y=y, height=height, **params) + super().__init__(x=x, y=y, height=height, **params) half_width = (self.width * self.aspect)/ 2.0 half_height = self.height / 2.0 @@ -494,7 +441,7 @@ def __init__(self, x, y, spec, **params): width, height = params.get('width', spec), spec params['width']=params.get('width',width) - super(Ellipse, self).__init__(x=x, y=y, height=height, **params) + super().__init__(x=x, y=y, height=height, **params) angles = np.linspace(0, 2*np.pi, self.samples) half_width = (self.width * self.aspect)/ 2.0 half_height = self.height / 2.0 @@ -529,7 +476,7 @@ def __init__(self, lbrt, **params): if not isinstance(lbrt, tuple): lbrt = (-lbrt, -lbrt, lbrt, lbrt) - super(Bounds, self).__init__(lbrt=lbrt, **params) + super().__init__(lbrt=lbrt, **params) (l,b,r,t) = self.lbrt xdim, ydim = self.kdims self.data = [OrderedDict([(xdim.name, np.array([l, l, r, r, l])), diff --git a/holoviews/element/raster.py b/holoviews/element/raster.py index 136ecaf048..16e596b92e 100644 --- a/holoviews/element/raster.py +++ b/holoviews/element/raster.py @@ -5,7 +5,7 @@ import param from ..core import util, config, Dimension, Element2D, Overlay, Dataset -from ..core.data import ImageInterface, GridInterface +from ..core.data import ImageInterface from ..core.data.interface import DataError from ..core.dimension import dimension_name from ..core.boundingregion import BoundingRegion, BoundingBox @@ -48,8 +48,7 @@ def __init__(self, data, kdims=None, vdims=None, extents=None, **params): if extents is None: (d1, d2) = data.shape[:2] extents = (0, 0, d2, d1) - super(Raster, self).__init__(data, kdims=kdims, vdims=vdims, extents=extents, **params) - + super().__init__(data, kdims=kdims, vdims=vdims, extents=extents, **params) def __getitem__(self, slices): if slices in self.dimensions(): return self.dimension_values(slices) @@ -71,7 +70,6 @@ def __getitem__(self, slices): return self.clone(np.expand_dims(data, axis=slc_types.index(True)), extents=None) - def range(self, dim, data_range=True, dimension_range=True): idx = self.get_dimension_index(dim) if data_range and idx == 2: @@ -82,8 +80,7 @@ def range(self, dim, data_range=True, dimension_range=True): if not dimension_range: return lower, upper return util.dimension_range(lower, upper, dimension.range, dimension.soft_range) - return super(Raster, self).range(dim, data_range, dimension_range) - + return super().range(dim, data_range, dimension_range) def dimension_values(self, dim, expanded=True, flat=True): """ @@ -101,20 +98,7 @@ def dimension_values(self, dim, expanded=True, flat=True): arr = self.data.T return arr.flatten() if flat else arr else: - return super(Raster, self).dimension_values(dim) - - - @classmethod - def collapse_data(cls, data_list, function, kdims=None, **kwargs): - param.main.param.warning( - 'Raster.collapse_data is deprecated, collapsing ' - 'may now be performed through concatenation ' - 'and aggregation.') - if isinstance(function, np.ufunc): - return function.reduce(data_list) - else: - return function(np.dstack(data_list), axis=-1, **kwargs) - + return super().dimension_values(dim) def sample(self, samples=[], bounds=None, **sample_values): """ @@ -398,20 +382,6 @@ def _validate(self, data_bounds, supplied_bounds): 'are supplied, otherwise they must match the data. To change ' 'the displayed extents set the range on the x- and y-dimensions.') - - def __setstate__(self, state): - """ - Ensures old-style unpickled Image types without an interface - use the ImageInterface. - - Note: Deprecate as part of 2.0 - """ - self.__dict__ = state - if isinstance(self.data, np.ndarray): - self.interface = ImageInterface - super(Dataset, self).__setstate__(state) - - def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): """ @@ -426,15 +396,13 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, sheet_params = dict(bounds=self.bounds, xdensity=self.xdensity, ydensity=self.ydensity) overrides = dict(sheet_params, **overrides) - return super(Image, self).clone(data, shared_data, new_type, link, + return super().clone(data, shared_data, new_type, link, *args, **overrides) - def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): - agg = super(Image, self).aggregate(dimensions, function, spreadfn, **kwargs) + agg = super().aggregate(dimensions, function, spreadfn, **kwargs) return Curve(agg) if isinstance(agg, Dataset) and len(self.vdims) == 1 else agg - def select(self, selection_specs=None, **selection): """ Allows selecting data by the slices, sets and scalar values @@ -529,32 +497,12 @@ def range(self, dim, data_range=True, dimension_range=True): l, b, r, t = self.bounds.lbrt() return (b, t) if idx else (l, r) else: - return super(Image, self).range(dim, data_range, dimension_range) - - - def table(self, datatype=None): - """ - Converts the data Element to a Table, optionally may - specify a supported data type. The default data types - are 'numpy' (for homogeneous data), 'dataframe', and - 'dictionary'. - """ - self.param.warning( - "The table method is deprecated and should no longer " - "be used. Instead cast the %s to a a Table directly." - % type(self).__name__) - if datatype and not isinstance(datatype, list): - datatype = [datatype] - from ..element import Table - return self.clone(self.columns(), new_type=Table, - **(dict(datatype=datatype) if datatype else {})) - + return super().range(dim, data_range, dimension_range) def _coord2matrix(self, coord): return self.sheet2matrixidx(*coord) - class RGB(Image): """ RGB represents a regularly spaced 2D grid of an underlying @@ -612,34 +560,33 @@ def rgb(self): """ return self - @classmethod def load_image(cls, filename, height=1, array=False, bounds=None, bare=False, **kwargs): - """ - Returns an raster element or raw numpy array from a PNG image - file, using matplotlib. - - The specified height determines the bounds of the raster - object in sheet coordinates: by default the height is 1 unit - with the width scaled appropriately by the image aspect ratio. - - Note that as PNG images are encoded as RGBA, the red component - maps to the first channel, the green component maps to the - second component etc. For RGB elements, this mapping is - trivial but may be important for subclasses e.g. for HSV - elements. - - Setting bare=True will apply options disabling axis labels - displaying just the bare image. Any additional keyword - arguments will be passed to the Image object. + """Load an image from a file and return an RGB element or array + + Args: + filename: Filename of the image to be loaded + height: Determines the bounds of the image where the width + is scaled relative to the aspect ratio of the image. + array: Whether to return an array (rather than RGB default) + bounds: Bounds for the returned RGB (overrides height) + bare: Whether to hide the axes + kwargs: Additional kwargs to the RGB constructor + + Returns: + RGB element or array """ try: - from matplotlib import pyplot as plt + from PIL import Image except: - raise ImportError("RGB.load_image requires matplotlib.") + raise ImportError("RGB.load_image requires PIL (or Pillow).") - data = plt.imread(filename) - if array: return data + with open(filename, 'rb') as f: + data = np.array(Image.open(f)) + data = data / 255. + + if array: + return data (h, w, _) = data.shape if bounds is None: @@ -647,10 +594,10 @@ def load_image(cls, filename, height=1, array=False, bounds=None, bare=False, ** xoffset, yoffset = w*f/2, h*f/2 bounds=(-xoffset, -yoffset, xoffset, yoffset) rgb = cls(data, bounds=bounds, **kwargs) - if bare: rgb = rgb(plot=dict(xaxis=None, yaxis=None)) + if bare: + rgb.opts(xaxis=None, yaxis=None) return rgb - def __init__(self, data, kdims=None, vdims=None, **params): if isinstance(data, Overlay): images = data.values() @@ -676,8 +623,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): (isinstance(data, dict) and tuple(dimension_name(vd) for vd in vdims)+(alpha.name,) in data)): # Handle all forms of packed value dimensions vdims.append(alpha) - super(RGB, self).__init__(data, kdims=kdims, vdims=vdims, **params) - + super().__init__(data, kdims=kdims, vdims=vdims, **params) class HSV(RGB): @@ -782,7 +728,7 @@ class QuadMesh(Selection2DExpr, Dataset, Element2D): def __init__(self, data, kdims=None, vdims=None, **params): if data is None or isinstance(data, list) and data == []: data = ([], [], np.zeros((0, 0))) - super(QuadMesh, self).__init__(data, kdims, vdims, **params) + super().__init__(data, kdims, vdims, **params) if not self.interface.gridded: raise DataError("%s type expects gridded data, %s is columnar. " "To display columnar data as gridded use the HeatMap " @@ -790,22 +736,6 @@ def __init__(self, data, kdims=None, vdims=None, **params): "np.histogram2d)." % (type(self).__name__, self.interface.__name__)) - - def __setstate__(self, state): - """ - Ensures old-style QuadMesh types without an interface can be unpickled. - - Note: Deprecate as part of 2.0 - """ - if 'interface' not in state: - self.interface = GridInterface - x, y = state['_kdims_param_value'] - z = state['_vdims_param_value'][0] - data = state['data'] - state['data'] = {x.name: data[0], y.name: data[1], z.name: data[2]} - super(Dataset, self).__setstate__(state) - - def trimesh(self): """ Converts a QuadMesh into a TriMesh. @@ -880,7 +810,7 @@ class HeatMap(Selection2DExpr, Dataset, Element2D): vdims = param.List(default=[Dimension('z')], constant=True) def __init__(self, data, kdims=None, vdims=None, **params): - super(HeatMap, self).__init__(data, kdims=kdims, vdims=vdims, **params) + super().__init__(data, kdims=kdims, vdims=vdims, **params) self._gridded = None @property @@ -914,7 +844,7 @@ def range(self, dim, data_range=True, dimension_range=True): try: self.gridded._binned = True if self.gridded is self: - return super(HeatMap, self).range(dim, data_range, dimension_range) + return super().range(dim, data_range, dimension_range) else: drange = self.gridded.range(dim, data_range, dimension_range) except: @@ -923,4 +853,4 @@ def range(self, dim, data_range=True, dimension_range=True): self.gridded._binned = False if drange is not None: return drange - return super(HeatMap, self).range(dim, data_range, dimension_range) + return super().range(dim, data_range, dimension_range) diff --git a/holoviews/element/sankey.py b/holoviews/element/sankey.py index 10c21c90d7..07f351c195 100644 --- a/holoviews/element/sankey.py +++ b/holoviews/element/sankey.py @@ -1,5 +1,3 @@ -from __future__ import division - from collections import Counter from functools import cmp_to_key from itertools import cycle @@ -10,7 +8,7 @@ from ..core.dimension import Dimension from ..core.data import Dataset from ..core.operation import Operation -from ..core.util import OrderedDict, unique_array, RecursionError, get_param_values +from ..core.util import OrderedDict, get_param_values, unique_array from .graphs import Graph, Nodes, EdgePaths from .util import quadratic_bezier @@ -366,5 +364,5 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): if data is None: overrides['sankey'] = self._sankey - return super(Sankey, self).clone(data, shared_data, new_type, link, - *args, **overrides) + return super().clone(data, shared_data, new_type, link, + *args, **overrides) diff --git a/holoviews/element/selection.py b/holoviews/element/selection.py index 8d9ae07442..5b83ba1176 100644 --- a/holoviews/element/selection.py +++ b/holoviews/element/selection.py @@ -5,7 +5,7 @@ import numpy as np -from ..core import util, NdOverlay +from ..core import Dataset, NdOverlay, util from ..streams import SelectionXY, Selection1D, Lasso from ..util.transform import dim from .annotation import HSpan, VSpan @@ -18,7 +18,7 @@ class SelectionIndexExpr(object): _selection_streams = (Selection1D,) def __init__(self, *args, **kwargs): - super(SelectionIndexExpr, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._index_skip = False def _empty_region(self): @@ -26,15 +26,20 @@ def _empty_region(self): def _get_index_selection(self, index, index_cols): self._index_skip = True + if not index: + return None, None, None + ds = self.clone(kdims=index_cols, new_type=Dataset) if len(index_cols) == 1: index_dim = index_cols[0] - vals = dim(index_dim).apply(self.iloc[index], expanded=False) + vals = dim(index_dim).apply(ds.iloc[index], expanded=False) + if vals.dtype.kind == 'O' and all(isinstance(v, np.ndarray) for v in vals): + vals = [v for arr in vals for v in util.unique_iterator(arr)] expr = dim(index_dim).isin(list(util.unique_iterator(vals))) else: get_shape = dim(self.dataset.get_dimension(index_cols[0]), np.shape) index_cols = [dim(self.dataset.get_dimension(c), np.ravel) for c in index_cols] vals = dim(index_cols[0], util.unique_zip, *index_cols[1:]).apply( - self.iloc[index], expanded=True, flat=True + ds.iloc[index], expanded=True, flat=True ) contains = dim(index_cols[0], util.lzip, *index_cols[1:]).isin(vals, object=True) expr = dim(contains, np.reshape, get_shape) diff --git a/holoviews/element/stats.py b/holoviews/element/stats.py index cd0a309860..1b5d9a1d80 100644 --- a/holoviews/element/stats.py +++ b/holoviews/element/stats.py @@ -28,7 +28,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): kdims = kdims or data.dimensions()[:len(self.kdims)] data = tuple(data.dimension_values(d) for d in kdims) params.update(dict(kdims=kdims, vdims=[], _validate_vdims=False)) - super(StatisticsElement, self).__init__(data, **params) + super().__init__(data, **params) if not vdims: self.vdims = [Dimension('Density')] elif len(vdims) > 1: @@ -52,7 +52,6 @@ def dataset(self): return Dataset(self, _validate_vdims=False, **self._dataset) return self._dataset - def range(self, dim, data_range=True, dimension_range=True): """Return the lower and upper bounds of values along dimension. @@ -67,8 +66,7 @@ def range(self, dim, data_range=True, dimension_range=True): Tuple containing the lower and upper bound """ iskdim = self.get_dimension(dim) not in self.vdims - return super(StatisticsElement, self).range(dim, iskdim, dimension_range) - + return super().range(dim, iskdim, dimension_range) def dimension_values(self, dim, expanded=True, flat=True): """Return the values along the requested dimension. @@ -91,7 +89,6 @@ def dimension_values(self, dim, expanded=True, flat=True): return np.full(len(self), np.NaN) return self.interface.values(self, dim, expanded, flat) - def get_dimension_type(self, dim): """Get the type of the requested dimension. @@ -113,7 +110,6 @@ def get_dimension_type(self, dim): return np.float64 return self.interface.dimension_type(self, dim) - def dframe(self, dimensions=None, multi_index=False): """Convert dimension values to DataFrame. @@ -137,8 +133,7 @@ def dframe(self, dimensions=None, multi_index=False): 'dimensions. Could not return data for %s ' 'dimension(s).' % (type(self).__name__, ', '.join([d.name for d in vdims]))) - return super(StatisticsElement, self).dframe(dimensions, False) - + return super().dframe(dimensions, False) def columns(self, dimensions=None): """Convert dimension values to a dictionary. @@ -165,7 +160,6 @@ def columns(self, dimensions=None): return OrderedDict([(d.name, self.dimension_values(d)) for d in dimensions]) - class Bivariate(Selection2DExpr, StatisticsElement): """ Bivariate elements are containers for two dimensional data, which @@ -181,7 +175,6 @@ class Bivariate(Selection2DExpr, StatisticsElement): vdims = param.List(default=[Dimension('Density')], bounds=(0,1)) - class Distribution(Selection1DExpr, StatisticsElement): """ Distribution elements provides a representation for a @@ -197,7 +190,6 @@ class Distribution(Selection1DExpr, StatisticsElement): vdims = param.List(default=[Dimension('Density')], bounds=(0, 1)) - class BoxWhisker(Selection1DExpr, Dataset, Element2D): """ BoxWhisker represent data as a distributions highlighting the @@ -239,5 +231,3 @@ class HexTiles(Selection2DExpr, Dataset, Element2D): kdims = param.List(default=[Dimension('x'), Dimension('y')], bounds=(2, 2)) - - diff --git a/holoviews/element/tabular.py b/holoviews/element/tabular.py index a4cf236f40..5ebe99b6d9 100644 --- a/holoviews/element/tabular.py +++ b/holoviews/element/tabular.py @@ -16,7 +16,7 @@ class ItemTable(Element): order. Tables store heterogeneous data with different labels. Dimension objects are also accepted as keys, allowing dimensional - information (e.g type and units) to be associated per heading. + information (e.g. type and units) to be associated per heading. """ kdims = param.List(default=[], bounds=(0, 0), doc=""" @@ -50,8 +50,7 @@ def __init__(self, data, **params): if not 'vdims' in params: params['vdims'] = list(data.keys()) str_keys = OrderedDict((dimension_name(k), v) for (k,v) in data.items()) - super(ItemTable, self).__init__(str_keys, **params) - + super().__init__(str_keys, **params) def __getitem__(self, heading): """ @@ -63,23 +62,12 @@ def __getitem__(self, heading): raise KeyError("%r not in available headings." % heading) return np.array(self.data.get(heading, np.NaN)) - - @classmethod - def collapse_data(cls, data, function, **kwargs): - param.main.param.warning( - 'ItemTable.collapse_data is deprecated and ' - 'should no longer be used.') - groups = np.vstack([np.array(odict.values()) for odict in data]).T - return OrderedDict(zip(data[0].keys(), function(groups, axis=-1, **kwargs))) - - def dimension_values(self, dimension, expanded=True, flat=True): dimension = self.get_dimension(dimension, strict=True).name if dimension in self.dimensions('value', label=True): return np.array([self.data.get(dimension, np.NaN)]) else: - return super(ItemTable, self).dimension_values(dimension) - + return super().dimension_values(dimension) def sample(self, samples=[]): if callable(samples): @@ -94,7 +82,6 @@ def reduce(self, dimensions=None, function=None, **reduce_map): raise NotImplementedError('ItemTables are for heterogeneous data, which' 'cannot be reduced.') - def pprint_cell(self, row, col): """ Get the formatted cell value for the given row and column indices. @@ -110,12 +97,10 @@ def pprint_cell(self, row, col): heading = self.vdims[row] return dim.pprint_value(self.data.get(heading.name, np.NaN)) - def hist(self, *args, **kwargs): raise NotImplementedError("ItemTables are not homogeneous and " "don't support histograms.") - def cell_type(self, row, col): """ Returns the cell type given a row and column index. The common diff --git a/holoviews/element/tiles.py b/holoviews/element/tiles.py index 5a66b26222..c7fde4774a 100644 --- a/holoviews/element/tiles.py +++ b/holoviews/element/tiles.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from types import FunctionType import param @@ -37,17 +35,17 @@ class Tiles(Element2D): group = param.String(default='Tiles', constant=True) - def __init__(self, data, kdims=None, vdims=None, **params): + def __init__(self, data=None, kdims=None, vdims=None, **params): try: from bokeh.models import MercatorTileSource except: MercatorTileSource = None if MercatorTileSource and isinstance(data, MercatorTileSource): data = data.url - elif not isinstance(data, util.basestring): + elif data is not None and not isinstance(data, str): raise TypeError('%s data should be a tile service URL not a %s type.' % (type(self).__name__, type(data).__name__) ) - super(Tiles, self).__init__(data, kdims=kdims, vdims=vdims, **params) + super().__init__(data, kdims=kdims, vdims=vdims, **params) def range(self, dim, data_range=True, dimension_range=True): return np.nan, np.nan @@ -91,7 +89,7 @@ def easting_northing_to_lon_lat(easting, northing): '© OpenStreetMap contributors, ' '© CartoDB' ), - ('stamen', 'com/t') : ( # to match both 'toner' and 'terrain' + ('stamen', 'net/t') : ( # to match both 'toner' and 'terrain' 'Map tiles by Stamen Design, ' 'under CC BY 3.0. ' 'Data by OpenStreetMap, ' @@ -125,33 +123,69 @@ def easting_northing_to_lon_lat(easting, northing): ('arcgis','USA_Topo') : ( '© Esri, ' 'NatGeo, i-cubed' + ), + ('arcgis', 'World_Street_Map') : ( + '© Esri — Source: Esri, DeLorme, NAVTEQ, USGS, Intermap, iPC, NRCAN, Esri Japan, METI, Esri China (Hong Kong), Esri (Thailand), TomTom, 2012' ) } +def deprecation_warning(name, url, reason): + def deprecated_tilesource_warning(): + if util.config.raise_deprecated_tilesource_exception: + raise DeprecationWarning('%s tile source is deprecated: %s' % (name, reason)) + param.main.param.warning('%s tile source is deprecated and is likely to be unusable: %s' % (name, reason)) + return Tiles(url, name=name) + return deprecated_tilesource_warning + + # CartoDB basemaps CartoDark = lambda: Tiles('https://cartodb-basemaps-4.global.ssl.fastly.net/dark_all/{Z}/{X}/{Y}.png', name="CartoDark") -CartoEco = lambda: Tiles('http://3.api.cartocdn.com/base-eco/{Z}/{X}/{Y}.png', name="CartoEco") CartoLight = lambda: Tiles('https://cartodb-basemaps-4.global.ssl.fastly.net/light_all/{Z}/{X}/{Y}.png', name="CartoLight") -CartoMidnight = lambda: Tiles('http://3.api.cartocdn.com/base-midnight/{Z}/{X}/{Y}.png', name="CartoMidnight") +CartoMidnight = deprecation_warning('CartoMidnight', + 'https://3.api.cartocdn.com/base-midnight/{Z}/{X}/{Y}.png', + 'no longer publicly available.') +CartoEco = deprecation_warning('CartoEco', + 'https://3.api.cartocdn.com/base-eco/{Z}/{X}/{Y}.png', + 'no longer publicly available.') + # Stamen basemaps -StamenTerrain = lambda: Tiles('http://tile.stamen.com/terrain/{Z}/{X}/{Y}.png', name="StamenTerrain") -StamenTerrainRetina = lambda: Tiles('http://tile.stamen.com/terrain/{Z}/{X}/{Y}@2x.png', name="StamenTerrainRetina") -StamenWatercolor = lambda: Tiles('http://tile.stamen.com/watercolor/{Z}/{X}/{Y}.jpg', name="StamenWatercolor") -StamenToner = lambda: Tiles('http://tile.stamen.com/toner/{Z}/{X}/{Y}.png', name="StamenToner") -StamenTonerBackground = lambda: Tiles('http://tile.stamen.com/toner-background/{Z}/{X}/{Y}.png', name="StamenTonerBackground") -StamenLabels = lambda: Tiles('http://tile.stamen.com/toner-labels/{Z}/{X}/{Y}.png', name="StamenLabels") +StamenTerrain = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/terrain/{Z}/{X}/{Y}.png', name="StamenTerrain") +StamenTerrainRetina = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/terrain/{Z}/{X}/{Y}@2x.png', name="StamenTerrainRetina") +StamenWatercolor = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/watercolor/{Z}/{X}/{Y}.jpg', name="StamenWatercolor") +StamenToner = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/toner/{Z}/{X}/{Y}.png', name="StamenToner") +StamenTonerRetina = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/toner/{Z}/{X}/{Y}@2x.png', name="StamenTonerRetina") +StamenTonerBackground = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/toner-background/{Z}/{X}/{Y}.png', name="StamenTonerBackground") +StamenTonerBackgroundRetina = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/toner-background/{Z}/{X}/{Y}@2x.png', name="StamenTonerBackgroundRetina") +StamenLabels = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/toner-labels/{Z}/{X}/{Y}.png', name="StamenLabels") +StamenLabelsRetina = lambda: Tiles('https://stamen-tiles.a.ssl.fastly.net/toner-labels/{Z}/{X}/{Y}@2x.png', name="StamenLabelsRetina") # Esri maps (see https://server.arcgisonline.com/arcgis/rest/services for the full list) EsriImagery = lambda: Tiles('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{Z}/{Y}/{X}.jpg', name="EsriImagery") EsriNatGeo = lambda: Tiles('https://server.arcgisonline.com/ArcGIS/rest/services/NatGeo_World_Map/MapServer/tile/{Z}/{Y}/{X}', name="EsriNatGeo") EsriUSATopo = lambda: Tiles('https://server.arcgisonline.com/ArcGIS/rest/services/USA_Topo_Maps/MapServer/tile/{Z}/{Y}/{X}', name="EsriUSATopo") EsriTerrain = lambda: Tiles('https://server.arcgisonline.com/ArcGIS/rest/services/World_Terrain_Base/MapServer/tile/{Z}/{Y}/{X}', name="EsriTerrain") -EsriReference = lambda: Tiles('http://server.arcgisonline.com/ArcGIS/rest/services/Reference/World_Reference_Overlay/MapServer/tile/{Z}/{Y}/{X}', name="EsriReference") +EsriStreet = lambda: Tiles('https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{Z}/{Y}/{X}') +EsriReference = lambda: Tiles('https://server.arcgisonline.com/ArcGIS/rest/services/Reference/World_Reference_Overlay/MapServer/tile/{Z}/{Y}/{X}', name="EsriReference") ESRI = EsriImagery # For backwards compatibility with gv 1.5 + +def wikimedia_replacement(): + if util.config.raise_deprecated_tilesource_exception: + raise DeprecationWarning('Wikipedia tile source no longer available outside ' + 'wikimedia domain as of April 2021.') + + param.main.param.warning('Wikipedia tile source no longer available outside ' + 'wikimedia domain as of April 2021; switching ' + 'to OpenStreetMap (OSM) tile source. ' + 'See release notes for HoloViews' + ' 1.14.4 for more details') + return Tiles('https://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png', name="OSM") + # Miscellaneous -OSM = lambda: Tiles('http://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png', name="OSM") -Wikipedia = lambda: Tiles('https://maps.wikimedia.org/osm-intl/{Z}/{X}/{Y}@2x.png', name="Wikipedia") +OSM = lambda: Tiles('https://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png', name="OSM") +Wikipedia = wikimedia_replacement -tile_sources = {k: v for k, v in locals().items() if isinstance(v, FunctionType) and k != 'ESRI'} +tile_sources = {k: v for k, v in locals().items() if isinstance(v, FunctionType) and k not in + ['ESRI', 'lon_lat_to_easting_northing', 'easting_northing_to_lon_lat', + 'deprecation_warning', 'wikimedia_replacement']} diff --git a/holoviews/ipython/__init__.py b/holoviews/ipython/__init__.py index da2297083b..22f4b9b711 100644 --- a/holoviews/ipython/__init__.py +++ b/holoviews/ipython/__init__.py @@ -37,7 +37,7 @@ class IPTestCase(ComparisonTestCase): """ def setUp(self): - super(IPTestCase, self).setUp() + super().setUp() try: import IPython from IPython.display import HTML, SVG @@ -84,7 +84,7 @@ class notebook_extension(extension): logo = param.Boolean(default=True, doc="Toggles display of HoloViews logo") inline = param.Boolean(default=True, doc=""" - Whether to inline JS and CSS resources. + Whether to inline JS and CSS resources. If disabled, resources are loaded from CDN if one is available.""") width = param.Number(default=None, bounds=(0, 100), doc=""" @@ -96,9 +96,9 @@ class notebook_extension(extension): format will be displayed). Although the 'html' format is supported across backends, other - formats supported by the current backend (e.g 'png' and 'svg' + formats supported by the current backend (e.g. 'png' and 'svg' using the matplotlib backend) may be used. This may be useful to - export figures to other formats such as PDF with nbconvert. """) + export figures to other formats such as PDF with nbconvert.""") allow_jedi_completion = param.Boolean(default=False, doc=""" Whether to allow jedi tab-completion to be enabled in IPython. @@ -113,7 +113,7 @@ class notebook_extension(extension): def __call__(self, *args, **params): comms = params.pop('comms', None) - super(notebook_extension, self).__call__(*args, **params) + super().__call__(*args, **params) # Abort if IPython not found try: ip = params.pop('ip', None) or get_ipython() # noqa (get_ipython) diff --git a/holoviews/ipython/archive.py b/holoviews/ipython/archive.py index 9fca5bd6dc..a6ea96d86c 100644 --- a/holoviews/ipython/archive.py +++ b/holoviews/ipython/archive.py @@ -83,7 +83,7 @@ class NotebookArchive(FileArchive): efields = FileArchive.efields.union({'notebook'}) def __init__(self, **params): - super(NotebookArchive, self).__init__(**params) + super().__init__(**params) self.nbversion = None self.notebook_name = None self.export_success = None @@ -159,8 +159,8 @@ def export(self, timestamp=None): if self._timestamp is None: raise Exception("No timestamp set. Has the archive been initialized?") if self.skip_notebook_export: - super(NotebookArchive, self).export(timestamp=self._timestamp, - info={'notebook':self.notebook_name}) + super().export(timestamp=self._timestamp, + info={'notebook':self.notebook_name}) return self.export_success = None @@ -191,9 +191,8 @@ def add(self, obj=None, filename=None, data=None, info={}, html=None): # Can only associate html for one exporter at a time for exporter in exporters: self.exporters = [exporter] - super(NotebookArchive, self).add(obj, filename, data, - info=dict(info, - notebook=self.notebook_name)) + info = dict(info, notebook=self.notebook_name) + super().add(obj, filename, data, info=info) # Only add substitution if file successfully added to archive. new_last_key = list(self._files.keys())[-1] if len(self) else None if new_last_key != initial_last_key: @@ -258,19 +257,19 @@ def _export_with_html(self): # pragma: no cover export_filename = self.snapshot_name # Add the html snapshot - super(NotebookArchive, self).add(filename=export_filename, - data=html, info={'file-ext':'html', - 'mime_type':'text/html', - 'notebook':self.notebook_name}) + info = {'file-ext': 'html', + 'mime_type':'text/html', + 'notebook':self.notebook_name} + super().add(filename=export_filename, data=html, info=info) # Add cleared notebook cleared = self._clear_notebook(node) - super(NotebookArchive, self).add(filename=export_filename, - data=cleared, info={'file-ext':'ipynb', - 'mime_type':'text/json', - 'notebook':self.notebook_name}) + info = {'file-ext':'ipynb', + 'mime_type':'text/json', + 'notebook':self.notebook_name} + super().add(filename=export_filename, data=cleared, info=info) # If store cleared_notebook... save here - super(NotebookArchive, self).export(timestamp=self._timestamp, - info={'notebook':self.notebook_name}) + super().export(timestamp=self._timestamp, + info={'notebook':self.notebook_name}) except: self.traceback = traceback.format_exc() else: diff --git a/holoviews/ipython/display_hooks.py b/holoviews/ipython/display_hooks.py index c4a1c6a85f..d6ecfd118d 100644 --- a/holoviews/ipython/display_hooks.py +++ b/holoviews/ipython/display_hooks.py @@ -117,16 +117,11 @@ def dynamic_optstate(element, state=None): @contextmanager def option_state(element): optstate = dynamic_optstate(element) - raised_exception = False try: yield except Exception: - raised_exception = True + dynamic_optstate(element, state=optstate) raise - finally: - if raised_exception: - dynamic_optstate(element, state=optstate) - def display_hook(fn): @@ -209,7 +204,7 @@ def map_display(vmap, max_frames): @display_hook def layout_display(layout, max_frames): if isinstance(layout, AdjointLayout): - layout = Layout(layout).opts(layout.opts.get('plot')) + layout = Layout(layout).opts(layout.opts.get('plot')) if not isinstance(layout, (Layout, NdLayout)): return None nframes = len(unique_dimkeys(layout)[1]) diff --git a/holoviews/ipython/magics.py b/holoviews/ipython/magics.py index 0be3d4bb8f..d6b6378311 100644 --- a/holoviews/ipython/magics.py +++ b/holoviews/ipython/magics.py @@ -113,7 +113,7 @@ class CompositorMagic(Magics): """ def __init__(self, *args, **kwargs): - super(CompositorMagic, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) lines = ['The %compositor line magic is used to define compositors.'] self.compositor.__func__.__doc__ = '\n'.join(lines + [CompositorSpec.__doc__]) @@ -415,10 +415,7 @@ def load_magics(ip): ip.register_magics(OutputMagic) docstring = Store.output_settings._generate_docstring() - if sys.version_info.major==2: - OutputMagic.output.__func__.__doc__ = docstring - else: - OutputMagic.output.__doc__ = docstring + OutputMagic.output.__doc__ = docstring if pyparsing is None: print("%opts magic unavailable (pyparsing cannot be imported)") else: ip.register_magics(OptsMagic) diff --git a/holoviews/ipython/preprocessors.py b/holoviews/ipython/preprocessors.py index 58a1e96958..164015559c 100644 --- a/holoviews/ipython/preprocessors.py +++ b/holoviews/ipython/preprocessors.py @@ -58,7 +58,7 @@ def filter_magic(source, magic, strip=True): Given the source of a cell, filter out the given magic and collect the lines using the magic into a list. - If strip is True, the IPython syntax part of the magic (e.g %magic + If strip is True, the IPython syntax part of the magic (e.g. %magic or %%magic) is stripped from the returned lines. """ filtered, magic_lines=[],[] @@ -111,7 +111,7 @@ def preprocess_cell(self, cell, resources, index): template='hv.util.opts({line!r})') source, opts_lines = filter_magic(source, '%%opts') if opts_lines: - # Escape braces e.g normalization options as they pass through format + # Escape braces (e.g. normalization options) as they pass through format template = 'hv.util.opts({options!r}, {{expr}})'.format( options=' '.join(opts_lines).replace('{','{{').replace('}','}}')) source = wrap_cell_expression(source, template) diff --git a/holoviews/ipython/widgets.py b/holoviews/ipython/widgets.py index cecad104fd..b8c054f35d 100644 --- a/holoviews/ipython/widgets.py +++ b/holoviews/ipython/widgets.py @@ -29,7 +29,7 @@ class ProgressBar(ProgressIndicator): Parameter to control display of the progress bar. By default, progress is shown on stdout but this may be disabled e.g. for jobs that log standard output to file. - + If the output mode is set to 'broadcast', a socket is opened on a stated port to broadcast the completion percentage. The RemoteProgress class may then be used to view the progress from @@ -56,7 +56,7 @@ def __init__(self, **params): self.start_time = None self._stdout_display(0, False) ProgressBar.current_progress.append(self) - super(ProgressBar,self).__init__(**params) + super().__init__(**params) def __call__(self, percentage): " Update the progress bar within the specified percent_range" @@ -136,7 +136,7 @@ class RemoteProgress(ProgressBar): port = param.Integer(default=8080, doc="Target port on hostname.") def __init__(self, port, **params): - super(RemoteProgress, self).__init__(port=port, **params) + super().__init__(port=port, **params) def __call__(self): import zmq @@ -188,7 +188,7 @@ class RunProgress(ProgressBar): def __init__(self, **params): - super(RunProgress,self).__init__(**params) + super().__init__(**params) def __call__(self, value): """ @@ -199,11 +199,11 @@ def __call__(self, value): while (value - completed) >= self.interval: self.run_hook(self.interval) completed += self.interval - super(RunProgress, self).__call__(100 * (completed / float(value))) + super().__call__(100 * (completed / float(value))) remaining = value - completed if remaining != 0: self.run_hook(remaining) - super(RunProgress, self).__call__(100) + super().__call__(100) def progress(iterator, enum=False, length=None): diff --git a/holoviews/operation/datashader.py b/holoviews/operation/datashader.py index 7b56bfdd2c..808edb9b9d 100644 --- a/holoviews/operation/datashader.py +++ b/holoviews/operation/datashader.py @@ -1,8 +1,6 @@ -from __future__ import absolute_import, division - import warnings -from collections import Callable +from collections.abc import Callable from functools import partial import param @@ -14,6 +12,7 @@ import datashader.transfer_functions as tf import dask.dataframe as dd +from datashader.colors import color_lookup from param.parameterized import bothmethod try: @@ -23,16 +22,17 @@ hammer_bundle, connect_edges = object, object from ..core import (Operation, Element, Dimension, NdOverlay, - CompositeOverlay, Dataset, Overlay, OrderedDict) + CompositeOverlay, Dataset, Overlay, OrderedDict, Store) from ..core.data import PandasInterface, XArrayInterface, DaskInterface, cuDFInterface from ..core.util import ( - Iterable, LooseVersion, basestring, cftime_types, cftime_to_timestamp, - datetime_types, dt_to_int, isfinite, get_param_values, max_range) + Iterable, LooseVersion, cftime_types, cftime_to_timestamp, + datetime_types, dt_to_int, isfinite, get_param_values, max_range +) from ..element import (Image, Path, Curve, RGB, Graph, TriMesh, QuadMesh, Contours, Spikes, Area, Rectangles, Spread, Segments, Scatter, Points, Polygons) from ..element.util import connect_tri_edges_pd -from ..streams import RangeXY, PlotSize +from ..streams import RangeXY, PlotSize, PointerXY ds_version = LooseVersion(ds.__version__) @@ -95,9 +95,9 @@ class ResamplingOperation(LinkableOperation): width and height. """) - streams = param.List(default=[PlotSize, RangeXY], doc=""" - List of streams that are applied if dynamic=True, allowing - for dynamic interaction with the plot.""") + streams = param.ClassSelector(default=[PlotSize, RangeXY], class_=(dict, list), doc=""" + List or dictionary of streams that are applied if dynamic=True, + allowing for dynamic interaction with the plot.""") element_type = param.ClassSelector(class_=(Dataset,), instantiate=False, is_instance=False, default=Image, @@ -112,6 +112,8 @@ class ResamplingOperation(LinkableOperation): used to represent this internal state is not freed between calls.""") + _transfer_options = [] + @bothmethod def instance(self_or_cls,**params): filtered = {k:v for k,v in params.items() if k in self_or_cls.param} @@ -220,13 +222,17 @@ class AggregationOperation(ResamplingOperation): aggregator parameter used to define a datashader Reduction. """ - aggregator = param.ClassSelector(class_=(ds.reductions.Reduction, basestring), + aggregator = param.ClassSelector(class_=(ds.reductions.Reduction, str), default=ds.count(), doc=""" Datashader reduction function used for aggregating the data. The aggregator may also define a column to aggregate; if no column is defined the first value dimension of the element will be used. May also be defined as a string.""") + vdim_prefix = param.String(default='{kdims} ', allow_None=True, doc=""" + Prefix to prepend to value dimension name where {kdims} + templates in the names of the input element key dimensions.""") + _agg_methods = { 'any': rd.any, 'count': rd.count, @@ -243,7 +249,7 @@ class AggregationOperation(ResamplingOperation): def _get_aggregator(self, element, add_field=True): agg = self.p.aggregator - if isinstance(agg, basestring): + if isinstance(agg, str): if agg not in self._agg_methods: agg_methods = sorted(self._agg_methods) raise ValueError("Aggregation method '%r' is not known; " @@ -291,6 +297,12 @@ def _get_agg_params(self, element, x, y, agg_fn, bounds): params = dict(get_param_values(element), kdims=[x, y], datatype=['xarray'], bounds=bounds) + if self.vdim_prefix: + kdim_list = '_'.join(str(kd) for kd in params['kdims']) + vdim_prefix = self.vdim_prefix.format(kdims=kdim_list) + else: + vdim_prefix = '' + category = None if hasattr(agg_fn, 'reduction'): category = agg_fn.cat_column @@ -302,12 +314,23 @@ def _get_agg_params(self, element, x, y, agg_fn, bounds): raise ValueError("Aggregation column '%s' not found on '%s' element. " "Ensure the aggregator references an existing " "dimension." % (column,element)) - name = '%s Count' % column if isinstance(agg_fn, ds.count_cat) else column - vdims = [dims[0].clone(name)] + if isinstance(agg_fn, (ds.count, ds.count_cat)): + if vdim_prefix: + vdim_name = '%s%s Count' % (vdim_prefix, column) + else: + vdim_name = '%s Count' % column + vdims = dims[0].clone(vdim_name, nodata=0) + else: + vdims = dims[0].clone(vdim_prefix + column) elif category: - vdims = Dimension('%s Count' % category) + agg_name = type(agg_fn).__name__.title() + agg_label = '%s %s' % (category, agg_name) + vdims = Dimension('%s%s' % (vdim_prefix, agg_label), label=agg_label) + if agg_name in ('Count', 'Any'): + vdims.nodata = 0 else: - vdims = Dimension('Count') + agg_name = type(agg_fn).__name__.title() + vdims = Dimension('%s%s' % (vdim_prefix, agg_name), label=agg_name, nodata=0) params['vdims'] = vdims return params @@ -606,16 +629,10 @@ def _process(self, element, key=None): df = PandasInterface.as_dframe(element) - if isinstance(agg_fn, (ds.count, ds.any)): - vdim = type(agg_fn).__name__ - else: - vdim = element.get_dimension(agg_fn.column) - cvs = ds.Canvas(plot_width=width, plot_height=height, x_range=x_range, y_range=y_range) - params = dict(get_param_values(element), kdims=[x, y], vdims=vdim, - datatype=['xarray'], bounds=(x0, y0, x1, y1)) + params = self._get_agg_params(element, x, y, agg_fn, (x0, y0, x1, y1)) if width == 0 or height == 0: return self._empty_agg(element, x, y, width, height, xs, ys, agg_fn, **params) @@ -645,7 +662,7 @@ def _process(self, element, key=None): df[y.name] = yvals+df[pos.name] df['_lower'] = yvals-df[neg.name] area = element.clone(df, vdims=[y, '_lower']+element.vdims[3:], new_type=Area) - return super(spread_aggregate, self)._process(area, key=None) + return super()._process(area, key=None) @@ -704,13 +721,7 @@ def _process(self, element, key=None): if xtype == 'datetime': df[x.name] = df[x.name].astype('datetime64[us]').astype('int64') - if isinstance(agg_fn, (ds.count, ds.any)): - vdim = type(agg_fn).__name__ - else: - vdim = element.get_dimension(agg_fn.column) - - params = dict(get_param_values(element), kdims=[x, y], vdims=vdim, - datatype=['xarray'], bounds=(x0, y0, x1, y1)) + params = self._get_agg_params(element, x, y, agg_fn, (x0, y0, x1, y1)) if width == 0 or height == 0: return self._empty_agg(element, x, y, width, height, xs, ys, agg_fn, **params) @@ -751,18 +762,10 @@ def _process(self, element, key=None): df[y0d.name] = df[y0d.name].astype('datetime64[us]').astype('int64') df[y1d.name] = df[y1d.name].astype('datetime64[us]').astype('int64') - if isinstance(agg_fn, (ds.count, ds.any)): - vdim = type(agg_fn).__name__ - elif isinstance(agg_fn, ds.count_cat): - vdim = '%s Count' % agg_fn.column - else: - vdim = element.get_dimension(agg_fn.column) - - if isinstance(agg_fn, ds.count_cat): + if isinstance(agg_fn, ds.count_cat) and df[agg_fn.column].dtype.name != 'category': df[agg_fn.column] = df[agg_fn.column].astype('category') - params = dict(get_param_values(element), kdims=[x0d, y0d], vdims=vdim, - datatype=['xarray'], bounds=(x0, y0, x1, y1)) + params = self._get_agg_params(element, x0d, y0d, agg_fn, (x0, y0, x1, y1)) if width == 0 or height == 0: return self._empty_agg(element, x0d, y0d, width, height, xs, ys, agg_fn, **params) @@ -824,7 +827,7 @@ class regrid(AggregationOperation): """ aggregator = param.ClassSelector(default=ds.mean(), - class_=(ds.reductions.Reduction, basestring)) + class_=(ds.reductions.Reduction, str)) expand = param.Boolean(default=False, doc=""" Whether the x_range and y_range should be allowed to expand @@ -864,7 +867,7 @@ def _get_xarrays(self, element, coords, xtype, ytype): for i, vd in enumerate(element.vdims): if element.interface is XArrayInterface: if element.interface.packed(element): - xarr = element.data[..., i] + xarr = element.data[..., i] else: xarr = element.data[vd.name] if 'datetime' in (xtype, ytype): @@ -962,13 +965,13 @@ class contours_rasterize(aggregate): """ aggregator = param.ClassSelector(default=ds.mean(), - class_=(ds.reductions.Reduction, basestring)) + class_=(ds.reductions.Reduction, str)) def _get_aggregator(self, element, add_field=True): agg = self.p.aggregator if not element.vdims and agg.column is None and not isinstance(agg, (rd.count, rd.any)): return ds.any() - return super(contours_rasterize, self)._get_aggregator(element, add_field) + return super()._get_aggregator(element, add_field) @@ -981,7 +984,7 @@ class trimesh_rasterize(aggregate): """ aggregator = param.ClassSelector(default=ds.mean(), - class_=(ds.reductions.Reduction, basestring)) + class_=(ds.reductions.Reduction, str)) interpolation = param.ObjectSelector(default='bilinear', objects=['bilinear', 'linear', None, False], doc=""" @@ -990,16 +993,42 @@ class trimesh_rasterize(aggregate): def _precompute(self, element, agg): from datashader.utils import mesh if element.vdims and getattr(agg, 'column', None) not in element.nodes.vdims: - simplices = element.dframe([0, 1, 2, 3]) - verts = element.nodes.dframe([0, 1]) + simplex_dims = [0, 1, 2, 3] + vert_dims = [0, 1] elif element.nodes.vdims: - simplices = element.dframe([0, 1, 2]) - verts = element.nodes.dframe([0, 1, 3]) + simplex_dims = [0, 1, 2] + vert_dims = [0, 1, 3] + else: + raise ValueError("Cannot shade TriMesh without value dimension.") + datatypes = [element.interface.datatype, element.nodes.interface.datatype] + if set(datatypes) == {'dask'}: + dims, node_dims = element.dimensions(), element.nodes.dimensions() + simplices = element.data[[dims[sd].name for sd in simplex_dims]] + verts = element.nodes.data[[node_dims[vd].name for vd in vert_dims]] + else: + if 'dask' in datatypes: + if datatypes[0] == 'dask': + p, n = 'simplexes', 'vertices' + else: + p, n = 'vertices', 'simplexes' + self.param.warning( + "TriMesh %s were provided as dask DataFrame but %s " + "were not. Datashader will not use dask to parallelize " + "rasterization unless both are provided as dask " + "DataFrames." % (p, n)) + simplices = element.dframe(simplex_dims) + verts = element.nodes.dframe(vert_dims) for c, dtype in zip(simplices.columns[:3], simplices.dtypes): if dtype.kind != 'i': simplices[c] = simplices[c].astype('int') - return {'mesh': mesh(verts, simplices), 'simplices': simplices, - 'vertices': verts} + mesh = mesh(verts, simplices) + if hasattr(mesh, 'persist'): + mesh = mesh.persist() + return { + 'mesh': mesh, + 'simplices': simplices, + 'vertices': verts + } def _precompute_wireframe(self, element, agg): if hasattr(element, '_wireframe'): @@ -1031,20 +1060,7 @@ def _process(self, element, key=None): wireframe = True precompute = False # TriMesh itself caches wireframe agg = self._get_aggregator(element) if isinstance(agg, (ds.any, ds.count)) else ds.any() - vdim = 'Count' if isinstance(agg, ds.count) else 'Any' - elif getattr(agg, 'column', None): - if agg.column in element.vdims: - vdim = element.get_dimension(agg.column) - elif isinstance(element, TriMesh) and agg.column in element.nodes.vdims: - vdim = element.nodes.get_dimension(agg.column) - else: - raise ValueError("Aggregation column %s not found on TriMesh element." - % agg.column) - else: - if isinstance(element, TriMesh) and element.nodes.vdims: - vdim = element.nodes.vdims[0] - else: - vdim = element.vdims[0] + elif getattr(agg, 'column', None) is None: agg = self._get_aggregator(element) if element._plot_id in self._precomputed: @@ -1053,15 +1069,13 @@ def _process(self, element, key=None): precomputed = self._precompute_wireframe(element, agg) else: precomputed = self._precompute(element, agg) - - params = dict(get_param_values(element), kdims=[x, y], - datatype=['xarray'], vdims=[vdim]) + bounds = (x_range[0], y_range[0], x_range[1], y_range[1]) + params = self._get_agg_params(element, x, y, agg, bounds) if width == 0 or height == 0: if width == 0: params['xdensity'] = 1 if height == 0: params['ydensity'] = 1 - bounds = (x_range[0], y_range[0], x_range[1], y_range[1]) - return Image((xs, ys, np.zeros((height, width))), bounds=bounds, **params) + return Image((xs, ys, np.zeros((height, width))), **params) if wireframe: segments = precomputed['segments'] @@ -1095,11 +1109,11 @@ class quadmesh_rasterize(trimesh_rasterize): def _precompute(self, element, agg): if ds_version <= '0.7.0': - return super(quadmesh_rasterize, self)._precompute(element.trimesh(), agg) + return super()._precompute(element.trimesh(), agg) def _process(self, element, key=None): if ds_version <= '0.7.0': - return super(quadmesh_rasterize, self)._process(element, key) + return super()._process(element, key) if element.interface.datatype != 'xarray': element = element.clone(datatype=['xarray']) @@ -1172,9 +1186,9 @@ class shade(LinkableOperation): Callable type must allow mapping colors for supplied values between 0 and 1.""") - normalization = param.ClassSelector(default='eq_hist', - class_=(basestring, Callable), - doc=""" + cnorm = param.ClassSelector(default='eq_hist', + class_=(str, Callable), + doc=""" The normalization operation applied before colormapping. Valid options include 'linear', 'log', 'eq_hist', 'cbrt', and any valid transfer function that accepts data, mask, nbins @@ -1242,8 +1256,14 @@ def to_xarray(cls, element): return element data = tuple(element.dimension_values(kd, expanded=False) for kd in element.kdims) - data += tuple(element.dimension_values(vd, flat=False) - for vd in element.vdims) + vdims = list(element.vdims) + # Override nodata temporarily + element.vdims[:] = [vd.clone(nodata=None) for vd in element.vdims] + try: + data += tuple(element.dimension_values(vd, flat=False) + for vd in element.vdims) + finally: + element.vdims[:] = vdims dtypes = [dt for dt in element.datatype if dt != 'xarray'] return element.clone(data, datatype=['xarray']+dtypes, bounds=element.bounds, @@ -1271,7 +1291,7 @@ def _process(self, element, key=None): # Compute shading options depending on whether # it is a categorical or regular aggregate - shade_opts = dict(how=self.p.normalization, + shade_opts = dict(how=self.p.cnorm, min_alpha=self.p.min_alpha, alpha=self.p.alpha) if element.ndims > 2: @@ -1292,12 +1312,18 @@ def _process(self, element, key=None): elif isinstance(self.p.cmap, Callable): colors = [self.p.cmap(s) for s in np.linspace(0, 1, 256)] shade_opts['cmap'] = map(self.rgb2hex, colors) + elif isinstance(self.p.cmap, str): + if self.p.cmap.startswith('#') or self.p.cmap in color_lookup: + shade_opts['cmap'] = self.p.cmap + else: + from ..plotting.util import process_cmap + shade_opts['cmap'] = process_cmap(self.p.cmap) else: shade_opts['cmap'] = self.p.cmap if self.p.clims: shade_opts['span'] = self.p.clims - elif ds_version > '0.5.0' and self.p.normalization != 'eq_hist': + elif ds_version > '0.5.0' and self.p.cnorm != 'eq_hist': shade_opts['span'] = element.range(vdim) params = dict(get_param_values(element), kdims=kdims, @@ -1325,14 +1351,14 @@ class geometry_rasterize(AggregationOperation): """ aggregator = param.ClassSelector(default=ds.mean(), - class_=(ds.reductions.Reduction, basestring)) + class_=(ds.reductions.Reduction, str)) def _get_aggregator(self, element, add_field=True): agg = self.p.aggregator - if (not (element.vdims or isinstance(agg, basestring)) and + if (not (element.vdims or isinstance(agg, str)) and agg.column is None and not isinstance(agg, (rd.count, rd.any))): return ds.count() - return super(geometry_rasterize, self)._get_aggregator(element, add_field) + return super()._get_aggregator(element, add_field) def _process(self, element, key=None): agg_fn = self._get_aggregator(element) @@ -1353,7 +1379,7 @@ def _process(self, element, key=None): if element._plot_id in self._precomputed: data, col = self._precomputed[element._plot_id] else: - if element.interface.datatype != 'spatialpandas': + if 'spatialpandas' not in element.interface.datatype: element = element.clone(datatype=['spatialpandas']) data = element.data col = element.interface.geo_column(data) @@ -1361,7 +1387,7 @@ def _process(self, element, key=None): if self.p.precompute: self._precomputed[element._plot_id] = (data, col) - if isinstance(agg_fn, ds.count_cat): + if isinstance(agg_fn, ds.count_cat) and data[agg_fn.column].dtype.name != 'category': data[agg_fn.column] = data[agg_fn.column].astype('category') if isinstance(element, Polygons): @@ -1406,7 +1432,7 @@ class rasterize(AggregationOperation): dimensions of the linked plot and the ranges of the axes. """ - aggregator = param.ClassSelector(class_=(ds.reductions.Reduction, basestring), + aggregator = param.ClassSelector(class_=(ds.reductions.Reduction, str), default='default') interpolation = param.ObjectSelector( @@ -1416,8 +1442,8 @@ class rasterize(AggregationOperation): _transforms = [(Image, regrid), (Polygons, geometry_rasterize), - (lambda x: (isinstance(x, Path) and - x.interface.datatype == 'spatialpandas'), + (lambda x: (isinstance(x, (Path, Points)) and + 'spatialpandas' in x.interface.datatype), geometry_rasterize), (TriMesh, trimesh_rasterize), (QuadMesh, quadmesh_rasterize), @@ -1591,7 +1617,8 @@ def _process(self, element, key=None): elif isinstance(element, Image): data = element.clone(datatype=['xarray']).data[element.vdims[0].name] else: - raise ValueError('spreading can only be applied to Image or RGB Elements.') + raise ValueError('spreading can only be applied to Image or RGB Elements. ' + 'Received object of type %s' % str(type(element))) kwargs = {} array = self._apply_spreading(data) @@ -1711,3 +1738,267 @@ class directly_connect_edges(_connect_edges, connect_edges): def _bundle(self, position_df, edges_df): return connect_edges.__call__(self, position_df, edges_df) + + +def identity(x): return x + + +class inspect_mask(Operation): + """ + Operation used to display the inspection mask, for use with other + inspection operations. Can be used directly but is more commonly + constructed using the mask property of the corresponding inspector + operation. + """ + + pixels = param.Integer(default=3, doc=""" + Size of the mask that should match the pixels parameter used in + the associated inspection operation.""") + + streams = param.ClassSelector(default=[PointerXY], class_=(dict, list)) + x = param.Number(default=0) + y = param.Number(default=0) + + @classmethod + def _distance_args(cls, element, x_range, y_range, pixels): + ycount, xcount = element.interface.shape(element, gridded=True) + x_delta = abs(x_range[1] - x_range[0]) / xcount + y_delta = abs(y_range[1] - y_range[0]) / ycount + return (x_delta*pixels, y_delta*pixels) + + def _process(self, raster, key=None): + if isinstance(raster, RGB): + raster = raster[..., raster.vdims[-1]] + x_range, y_range = raster.range(0), raster.range(1) + xdelta, ydelta = self._distance_args(raster, x_range, y_range, self.p.pixels) + x, y = self.p.x, self.p.y + return self._indicator(raster.kdims, x, y, xdelta, ydelta) + + def _indicator(self, kdims, x, y, xdelta, ydelta): + rect = np.array([(x-xdelta/2,y-ydelta/2), (x+xdelta/2, y-ydelta/2), + (x+xdelta/2, y+ydelta/2), (x-xdelta/2, y+ydelta/2)]) + data = {(str(kdims[0]),str(kdims[1])):rect} + return Polygons(data, kdims=kdims) + + +class inspect(Operation): + """ + Generalized inspect operation that detects the appropriate indicator + type. + """ + + pixels = param.Integer(default=3, doc=""" + Number of pixels in data space around the cursor point to search + for hits in. The hit within this box mask that is closest to the + cursor's position is displayed.""") + + null_value = param.Number(default=0, doc=""" + Value of raster which indicates no hits. For instance zero for + count aggregator (default) and commonly NaN for other (float) + aggregators. For RGBA images, the alpha channel is used which means + zero alpha acts as the null value.""") + + value_bounds = param.NumericTuple(default=None, length=2, allow_None=True, doc=""" + If not None, a numeric bounds for the pixel under the cursor in + order for hits to be computed. Useful for count aggregators where + a value of (1,1000) would make sure no more than a thousand + samples will be searched.""") + + hits = param.DataFrame(default=pd.DataFrame(), allow_None=True) + + max_indicators = param.Integer(default=1, doc=""" + Maximum number of indicator elements to display within the mask + of size pixels. Points are prioritized by distance from the + cursor point. This means that the default value of one shows the + single closest sample to the cursor. Note that this limit is not + applies to the hits parameter.""") + + transform = param.Callable(default=identity, doc=""" + Function that transforms the hits dataframe before it is passed to + the Points element. Can be used to customize the value dimensions + e.g. to implement custom hover behavior.""") + + # Stream values and overrides + streams = param.ClassSelector(default=dict(x=PointerXY.param.x, + y=PointerXY.param.y), + class_=(dict, list)) + + x = param.Number(default=0, doc="x-position to inspect.") + + y = param.Number(default=0, doc="y-position to inspect.") + + _dispatch = {} + + @property + def mask(self): + return inspect_mask.instance(pixels=self.p.pixels) + + def _update_hits(self, event): + self.hits = event.obj.hits + + @bothmethod + def instance(self_or_cls, **params): + inst = super(inspect, self_or_cls).instance(**params) + inst._op = None + return inst + + def _process(self, raster, key=None): + input_type = self._get_input_type(raster.pipeline.operations) + inspect_operation = self._dispatch[input_type] + if self._op is None: + self._op = inspect_operation.instance() + self._op.param.watch(self._update_hits, 'hits') + elif not isinstance(self._op, inspect_operation): + raise ValueError("Cannot reuse inspect instance on different " + "datashader input type.") + self._op.p = self.p + return self._op._process(raster) + + def _get_input_type(self, operations): + for op in operations: + output_type = getattr(op, 'output_type', None) + if output_type is not None: + if output_type in [el[0] for el in rasterize._transforms]: + # Datashader output types that are also input types e.g for regrid + if issubclass(output_type, (Image, RGB)): + continue + return output_type + raise RuntimeError('Could not establish input element type ' + 'for datashader pipeline in the inspect operation.') + + + +class inspect_base(inspect): + """ + Given datashaded aggregate (Image) output, return a set of + (hoverable) points sampled from those near the cursor. + """ + + def _process(self, raster, key=None): + self._validate(raster) + if isinstance(raster, RGB): + raster = raster[..., raster.vdims[-1]] + x_range, y_range = raster.range(0), raster.range(1) + xdelta, ydelta = self._distance_args(raster, x_range, y_range, self.p.pixels) + x, y = self.p.x, self.p.y + val = raster[x-xdelta:x+xdelta, y-ydelta:y+ydelta].reduce(function=np.nansum) + if np.isnan(val): + val = self.p.null_value + + if ((self.p.value_bounds and + not (self.p.value_bounds[0] < val < self.p.value_bounds[1])) + or val == self.p.null_value): + result = self._empty_df(raster.dataset) + else: + masked = self._mask_dataframe(raster, x, y, xdelta, ydelta) + result = self._sort_by_distance(raster, masked, x, y) + + self.hits = result + df = self.p.transform(result) + return self._element(raster, df.iloc[:self.p.max_indicators]) + + @classmethod + def _distance_args(cls, element, x_range, y_range, pixels): + ycount, xcount = element.interface.shape(element, gridded=True) + x_delta = abs(x_range[1] - x_range[0]) / xcount + y_delta = abs(y_range[1] - y_range[0]) / ycount + return (x_delta*pixels, y_delta*pixels) + + @classmethod + def _empty_df(cls, dataset): + if 'dask' in dataset.interface.datatype: + return dataset.data._meta.iloc[:0] + elif dataset.interface.datatype in ['pandas', 'geopandas', 'spatialpandas']: + return dataset.data.head(0) + return dataset.iloc[:0].dframe() + + @classmethod + def _mask_dataframe(cls, raster, x, y, xdelta, ydelta): + """ + Mask the dataframe around the specified x and y position with + the given x and y deltas + """ + ds = raster.dataset + x0, x1, y0, y1 = x-xdelta, x+xdelta, y-ydelta, y+ydelta + if 'spatialpandas' in ds.interface.datatype: + df = ds.data.cx[x0:x1, y0:y1] + return df.compute() if hasattr(df, 'compute') else df + xdim, ydim = raster.kdims + query = {xdim.name: (x0, x1), ydim.name: (y0, y1)} + return ds.select(**query).dframe() + + @classmethod + def _validate(cls, raster): + pass + + @classmethod + def _vdims(cls, raster, df): + ds = raster.dataset + if 'spatialpandas' in ds.interface.datatype: + coords = [ds.interface.geo_column(ds.data)] + else: + coords = [kd.name for kd in raster.kdims] + return [col for col in df.columns if col not in coords] + + + +class inspect_points(inspect_base): + + @classmethod + def _element(cls, raster, df): + return Points(df, kdims=raster.kdims, vdims=cls._vdims(raster, df)) + + @classmethod + def _sort_by_distance(cls, raster, df, x, y): + """ + Returns a dataframe of hits within a given mask around a given + spatial location, sorted by distance from that location. + """ + ds = raster.dataset.clone(df) + xs, ys = (ds.dimension_values(kd) for kd in raster.kdims) + dx, dy = xs - x, ys - y + distances = pd.Series(dx*dx + dy*dy) + return df.iloc[distances.argsort().values] + + + +class inspect_polygons(inspect_base): + + @classmethod + def _validate(cls, raster): + if 'spatialpandas' not in raster.dataset.interface.datatype: + raise ValueError("inspect_polygons only supports spatialpandas datatypes.") + + @classmethod + def _element(cls, raster, df): + polygons = Polygons(df, kdims=raster.kdims, vdims=cls._vdims(raster, df)) + if Store.loaded_backends() != []: + return polygons.opts(color_index=None) + else: + return polygons + + @classmethod + def _sort_by_distance(cls, raster, df, x, y): + """ + Returns a dataframe of hits within a given mask around a given + spatial location, sorted by distance from that location. + """ + xs, ys = [], [] + for geom in df.geometry.array: + gxs, gys = geom.flat_values[::2], geom.flat_values[1::2] + if not len(gxs): + xs.append(np.nan) + ys.append(np.nan) + else: + xs.append((np.min(gxs)+np.max(gxs))/2) + ys.append((np.min(gys)+np.max(gys))/2) + dx, dy = np.array(xs) - x, np.array(ys) - y + distances = pd.Series(dx*dx + dy*dy) + return df.iloc[distances.argsort().values] + + + +inspect._dispatch = { + Points: inspect_points, + Polygons: inspect_polygons +} diff --git a/holoviews/operation/element.py b/holoviews/operation/element.py index 63d0a32ffd..a9c0dac2a0 100644 --- a/holoviews/operation/element.py +++ b/holoviews/operation/element.py @@ -2,8 +2,6 @@ Collection of either extremely generic or simple Operation examples. """ -from __future__ import division - from distutils.version import LooseVersion import numpy as np @@ -14,11 +12,11 @@ from ..core import (Operation, NdOverlay, Overlay, GridMatrix, HoloMap, Dataset, Element, Collator, Dimension) from ..core.data import ArrayInterface, DictInterface, default_datatype -from ..core.data.interface import dask_array_module -from ..core.util import (group_sanitizer, label_sanitizer, pd, - basestring, datetime_types, isfinite, dt_to_int, - isdatetime, is_dask_array, is_cupy_array, - is_ibis_expr) +from ..core.data.util import dask_array_module +from ..core.util import ( + group_sanitizer, label_sanitizer, pd, datetime_types, isfinite, + dt_to_int, isdatetime, is_dask_array, is_cupy_array, is_ibis_expr +) from ..element.chart import Histogram, Scatter from ..element.raster import Image, RGB from ..element.path import Contours, Polygons @@ -55,7 +53,7 @@ class operation(Operation): checking. May be used to declare useful information to other code in - HoloViews e.g required for tab-completion support of operations + HoloViews, e.g. required for tab-completion support of operations registered with compositors.""") group = param.String(default='Operation', doc=""" @@ -659,7 +657,7 @@ class histogram(Operation): frequency_label = param.String(default=None, doc=""" Format string defining the label of the frequency dimension of the Histogram.""") - groupby = param.ClassSelector(default=None, class_=(basestring, Dimension), doc=""" + groupby = param.ClassSelector(default=None, class_=(str, Dimension), doc=""" Defines a dimension to group the Histogram returning an NdOverlay of Histograms.""") log = param.Boolean(default=False, doc=""" @@ -668,7 +666,7 @@ class histogram(Operation): mean_weighted = param.Boolean(default=False, doc=""" Whether the weighted frequencies are averaged.""") - normed = param.ObjectSelector(default=True, + normed = param.ObjectSelector(default=False, objects=[True, False, 'integral', 'height'], doc=""" Controls normalization behavior. If `True` or `'integral'`, then @@ -719,7 +717,7 @@ def _process(self, element, key=None): if is_cupy: import cupy full_cupy_support = LooseVersion(cupy.__version__) > '8.0' - if not full_cupy_support and (normed or self.p.weight_dimension): + if not full_cupy_support and (normed or self.p.weight_dimension): data = cupy.asnumpy(data) is_cupy = False else: @@ -857,7 +855,8 @@ class decimate(Operation): random_seed = param.Integer(default=42, doc=""" Seed used to initialize randomization.""") - streams = param.List(default=[RangeXY], doc=""" + streams = param.ClassSelector(default=[RangeXY], class_=(dict, list), + doc=""" List of streams that are applied if dynamic=True, allowing for dynamic interaction with the plot.""") @@ -1027,8 +1026,8 @@ class gridmatrix(param.ParameterizedFunction): or any other function which returns a viewable element.""") overlay_dims = param.List(default=[], doc=""" - If a HoloMap is supplied this will allow overlaying one or - more of it's key dimensions.""") + If a HoloMap is supplied, this will allow overlaying one or + more of its key dimensions.""") def __call__(self, data, **params): p = param.ParamOverrides(self, params) @@ -1055,7 +1054,7 @@ def _process(self, p, element, ranges={}): el_data = element.data # Get dimensions to plot against each other - types = (str, basestring, np.str_, np.object_)+datetime_types + types = (str, np.str_, np.object_)+datetime_types dims = [d for d in element.dimensions() if _is_number(element.range(d)[0]) and not issubclass(element.get_dimension_type(d), types)] diff --git a/holoviews/operation/normalization.py b/holoviews/operation/normalization.py index c7321c7f6f..2797ebdc73 100644 --- a/holoviews/operation/normalization.py +++ b/holoviews/operation/normalization.py @@ -81,7 +81,7 @@ class Normalization(Operation): def __call__(self, element, ranges={}, keys=None, **params): params = dict(params,ranges=ranges, keys=keys) - return super(Normalization, self).__call__(element, **params) + return super().__call__(element, **params) def process_element(self, element, key, ranges={}, keys=None, **params): diff --git a/holoviews/operation/stats.py b/holoviews/operation/stats.py index b0c7d63c66..797e95e9c6 100644 --- a/holoviews/operation/stats.py +++ b/holoviews/operation/stats.py @@ -3,7 +3,7 @@ from ..core import Dimension, Dataset, NdOverlay from ..core.operation import Operation -from ..core.util import basestring, cartesian_product, isfinite +from ..core.util import cartesian_product, isfinite from ..element import (Curve, Area, Image, Distribution, Bivariate, Contours, Polygons) @@ -53,7 +53,7 @@ class univariate_kde(Operation): n_samples = param.Integer(default=100, doc=""" Number of samples to compute the KDE over.""") - groupby = param.ClassSelector(default=None, class_=(basestring, Dimension), doc=""" + groupby = param.ClassSelector(default=None, class_=(str, Dimension), doc=""" Defines a dimension to group the Histogram returning an NdOverlay of Histograms.""") _per_element = True diff --git a/holoviews/plotting/__init__.py b/holoviews/plotting/__init__.py index bde2521601..1942c59383 100644 --- a/holoviews/plotting/__init__.py +++ b/holoviews/plotting/__init__.py @@ -5,16 +5,29 @@ This file defines the HTML tags used to wrap rendered output for display in the IPython Notebook (optional). """ -from __future__ import absolute_import - from ..core.options import Cycle, Compositor -from ..element import Area, Polygons +from ..element import Area, Image, QuadMesh, Polygons, Raster from ..element.sankey import _layout_sankey, Sankey from .plot import Plot from .renderer import Renderer, HTML_TAGS # noqa (API import) -from .util import list_cmaps # noqa (API import) +from .util import apply_nodata, list_cmaps # noqa (API import) from ..operation.stats import univariate_kde, bivariate_kde +Compositor.register(Compositor("Image", apply_nodata, None, + 'data', transfer_options=True, + transfer_parameters=True, + output_type=Image, + backends=['bokeh', 'matplotlib', 'plotly'])) +Compositor.register(Compositor("Raster", apply_nodata, None, + 'data', transfer_options=True, + transfer_parameters=True, + output_type=Raster, + backends=['bokeh', 'matplotlib', 'plotly'])) +Compositor.register(Compositor("QuadMesh", apply_nodata, None, + 'data', transfer_options=True, + transfer_parameters=True, + output_type=QuadMesh, + backends=['bokeh', 'matplotlib', 'plotly'])) Compositor.register(Compositor("Distribution", univariate_kde, None, 'data', transfer_options=True, transfer_parameters=True, diff --git a/holoviews/plotting/bokeh/__init__.py b/holoviews/plotting/bokeh/__init__.py index 3e6cbe840f..7cb90b0074 100644 --- a/holoviews/plotting/bokeh/__init__.py +++ b/holoviews/plotting/bokeh/__init__.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, unicode_literals +import sys import numpy as np @@ -36,6 +36,7 @@ from .graphs import GraphPlot, NodePlot, TriMeshPlot, ChordPlot from .heatmap import HeatMapPlot, RadialHeatMapPlot from .hex_tiles import HexTilesPlot +from .links import LinkCallback # noqa (API import) from .path import PathPlot, PolygonPlot, ContourPlot from .plot import GridPlot, LayoutPlot, AdjointLayoutPlot from .raster import RasterPlot, RGBPlot, HSVPlot, QuadMeshPlot @@ -150,7 +151,9 @@ # Register bokeh.palettes with Palette and Cycle def colormap_generator(palette): - return lambda value: palette[int(value*(len(palette)-1))] + # Epsilon ensures float precision doesn't cause issues (#4911) + epsilon = sys.float_info.epsilon*10 + return lambda value: palette[int(value*(len(palette)-1)+epsilon)] Palette.colormaps.update({name: colormap_generator(p[max(p.keys())]) for name, p in all_palettes.items()}) @@ -158,7 +161,7 @@ def colormap_generator(palette): Cycle.default_cycles.update({name: p[max(p.keys())] for name, p in all_palettes.items() if max(p.keys()) < 256}) -dflt_cmap = 'fire' +dflt_cmap = config.default_cmap all_palettes['fire'] = {len(fire): fire} options = Store.options(backend='bokeh') @@ -175,28 +178,30 @@ def colormap_generator(palette): options.Spread = Options('style', color=Cycle(), alpha=0.6, line_color='black', muted_alpha=0.2) options.Bars = Options('style', color=Cycle(), line_color='black', bar_width=0.8, muted_alpha=0.2) -options.Spikes = Options('style', color='black', cmap='fire', muted_alpha=0.2) +options.Spikes = Options('style', color='black', cmap=dflt_cmap, muted_alpha=0.2) options.Area = Options('style', color=Cycle(), alpha=1, line_color='black', muted_alpha=0.2) options.VectorField = Options('style', color='black', muted_alpha=0.2) # Paths options.Contours = Options('plot', show_legend=True) -options.Contours = Options('style', color=Cycle(), cmap='viridis') -options.Path = Options('style', color=Cycle(), cmap='viridis') +options.Contours = Options('style', color=Cycle(), cmap=dflt_cmap) +options.Path = Options('style', color=Cycle(), cmap=dflt_cmap) options.Box = Options('style', color='black') options.Bounds = Options('style', color='black') options.Ellipse = Options('style', color='black') options.Polygons = Options('style', color=Cycle(), line_color='black', - cmap='viridis') + cmap=dflt_cmap) +options.Rectangles = Options('style', cmap=dflt_cmap) +options.Segments = Options('style', cmap=dflt_cmap) # Geometries options.Rectangles = Options('style', line_color='black') # Rasters -options.Image = Options('style', cmap=dflt_cmap) -options.Raster = Options('style', cmap=dflt_cmap) -options.QuadMesh = Options('style', cmap=dflt_cmap, line_alpha=0) -options.HeatMap = Options('style', cmap='RdYlBu_r', annular_line_alpha=0, +options.Image = Options('style', cmap=config.default_gridded_cmap) +options.Raster = Options('style', cmap=config.default_gridded_cmap) +options.QuadMesh = Options('style', cmap=config.default_gridded_cmap, line_alpha=0) +options.HeatMap = Options('style', cmap=config.default_heatmap_cmap, annular_line_alpha=0, xmarks_line_color="#FFFFFF", xmarks_line_width=3, ymarks_line_color="#FFFFFF", ymarks_line_width=3) @@ -223,7 +228,7 @@ def colormap_generator(palette): edge_line_color='black', node_hover_fill_color='limegreen', edge_line_width=1, edge_hover_line_color='limegreen', edge_nonselection_alpha=0.2, edge_nonselection_line_color='black', - node_nonselection_alpha=0.2, + node_nonselection_alpha=0.2, cmap=dflt_cmap ) options.TriMesh = Options('plot', tools=[]) options.Chord = Options('style', node_size=15, node_color=Cycle(), diff --git a/holoviews/plotting/bokeh/annotation.py b/holoviews/plotting/bokeh/annotation.py index 15bed43d09..f6a8f29fb6 100644 --- a/holoviews/plotting/bokeh/annotation.py +++ b/holoviews/plotting/bokeh/annotation.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict import param @@ -20,7 +18,7 @@ '-': None} from bokeh.transform import dodge -from ...core.util import datetime_types, dimension_sanitizer, basestring +from ...core.util import datetime_types, dimension_sanitizer from ...element import HLine, VLine, VSpan from ..plot import GenericElementPlot from .element import AnnotationPlot, ElementPlot, CompositeElementPlot, ColorbarPlot @@ -32,11 +30,11 @@ class TextPlot(ElementPlot, AnnotationPlot): - style_opts = text_properties+['color', 'angle', 'visible'] + style_opts = base_properties+text_properties+['color', 'angle'] _plot_methods = dict(single='text', batched='text') selection_display = None - + def get_data(self, element, ranges, style): mapping = dict(x='x', y='y', text='text') if self.static_source: @@ -87,12 +85,6 @@ class LabelsPlot(ColorbarPlot, AnnotationPlot): yoffset = param.Number(default=None, doc=""" Amount of offset to apply to labels along x-axis.""") - # Deprecated options - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - selection_display = BokehOverlaySelectionDisplay() style_opts = base_properties + text_properties + ['cmap', 'angle'] @@ -118,19 +110,6 @@ def get_data(self, element, ranges, style): mapping['y'] = dodge(ydim, self.yoffset) data[tdim] = [dims[2].pprint_value(v) for v in element.dimension_values(2)] self._categorize_data(data, (xdim, ydim), element.dimensions()) - - cdim = element.get_dimension(self.color_index) - if cdim is None: - return data, mapping, style - - cdata, cmapping = self._get_color_data(element, ranges, style, name='text_color') - if dims[2] is cdim and cdata: - # If color dim is same as text dim, rename color column - data['text_color'] = cdata[tdim] - mapping['text_color'] = dict(cmapping['text_color'], field='text_color') - else: - data.update(cdata) - mapping.update(cmapping) return data, mapping, style @@ -176,7 +155,7 @@ def get_extents(self, element, ranges=None, range_type='combined'): if self.invert_axes: dim = 'x' if dim == 'y' else 'x' ranges[dim]['soft'] = loc, loc - return super(LineAnnotationPlot, self).get_extents(element, ranges, range_type) + return super().get_extents(element, ranges, range_type) class BoxAnnotationPlot(ElementPlot, AnnotationPlot): @@ -363,7 +342,7 @@ def _init_glyph(self, plot, mapping, properties, key): else: properties = {p if p == 'source' else 'text_'+p: v for p, v in properties.items()} - renderer, glyph = super(ArrowPlot, self)._init_glyph( + renderer, glyph = super()._init_glyph( plot, mapping, properties, key) plot.renderers.append(renderer) return renderer, glyph @@ -431,7 +410,7 @@ class DivPlot(BokehPlot, GenericElementPlot, AnnotationPlot): selection_display = None def __init__(self, element, plot=None, **params): - super(DivPlot, self).__init__(element, **params) + super().__init__(element, **params) self.callbacks = [] self.handles = {} if plot is None else self.handles['plot'] self.static = len(self.hmap) == 1 and len(self.keys) == len(self.hmap) diff --git a/holoviews/plotting/bokeh/callbacks.py b/holoviews/plotting/bokeh/callbacks.py index c010252388..2ccceeb3a1 100644 --- a/holoviews/plotting/bokeh/callbacks.py +++ b/holoviews/plotting/bokeh/callbacks.py @@ -3,25 +3,23 @@ import time from collections import defaultdict -from functools import partial import numpy as np import panel as pn -import param from bokeh.models import ( - CustomJS, FactorRange, DatetimeAxis, ToolbarBox, Range1d, - DataRange1d, PolyDrawTool, BoxEditTool, PolyEditTool, - FreehandDrawTool, PointDrawTool + CustomJS, FactorRange, DatetimeAxis, Range1d, DataRange1d, + PolyDrawTool, BoxEditTool, PolyEditTool, FreehandDrawTool, + PointDrawTool ) from panel.io.state import state -from pyviz_comms import JS_CALLBACK +from panel.io.model import hold from tornado import gen from ...core import OrderedDict from ...core.options import CallbackError from ...core.util import ( - datetime_types, dimension_sanitizer, isscalar, dt64_to_dt + datetime_types, dimension_sanitizer, dt64_to_dt ) from ...element import Table from ...streams import ( @@ -31,29 +29,81 @@ BoxEdit, PointDraw, PolyDraw, PolyEdit, CDSStream, FreehandDraw, CurveEdit, SelectionXY, Lasso, SelectMode ) -from ..links import Link, RectanglesTableLink, DataLink, RangeToolLink, SelectionLink, VertexTableLink -from ..plot import GenericElementPlot, GenericOverlayPlot -from .util import convert_timestamp +from .util import bokeh_version, convert_timestamp +if bokeh_version >= '2.3.0': + CUSTOM_TOOLTIP = 'description' +else: + CUSTOM_TOOLTIP = 'custom_tooltip' -class MessageCallback(object): + +class Callback(object): """ - A MessageCallback is an abstract baseclass used to supply Streams - with events originating from bokeh plot interactions. The baseclass - defines how messages are handled and the basic specification required - to define a Callback. + Provides a baseclass to define callbacks, which return data from + bokeh model callbacks, events and attribute changes. The callback + then makes this data available to any streams attached to it. + + The definition of a callback consists of a number of components: + + * models : Defines which bokeh models the callback will be + attached on referencing the model by its key in + the plots handles, e.g. this could be the x_range, + y_range, plot, a plotting tool or any other + bokeh mode. + + * attributes : The attributes define which attributes to send + back to Python. They are defined as a dictionary + mapping between the name under which the variable + is made available to Python and the specification + of the attribute. The specification should start + with the variable name that is to be accessed and + the location of the attribute separated by + periods. All models defined by the models and can + be addressed in this way, e.g. to get the start of + the x_range as 'x' you can supply {'x': + 'x_range.attributes.start'}. Additionally certain + handles additionally make the cb_obj variables + available containing additional information about + the event. + + * on_events : If the Callback should listen to bokeh events this + should declare the types of event as a list (optional) + + * on_changes : If the Callback should listen to model attribute + changes on the defined ``models`` (optional) + + If either on_events or on_changes are declared the Callback will + be registered using the on_event or on_change machinery, otherwise + it will be treated as a regular callback on the model. The + callback can also define a _process_msg method, which can modify + the data sent by the callback before it is passed to the streams. + + A callback supports three different throttling modes: + + - adaptive (default): The callback adapts the throttling timeout + depending on the rolling mean of the time taken to process each + message. The rolling window is controlled by the `adaptive_window` + value. + - throttle: Uses the fixed `throttle_timeout` as the minimum amount + of time between events. + - debounce: Processes the message only when no new event has been + received within the `throttle_timeout` duration. """ + # Throttling configuration + adaptive_window = 3 + throttle_timeout = 100 + throttling_scheme = 'adaptive' + + # Attributes to sync attributes = {} # The plotting handle(s) to attach the JS callback on models = [] - # Additional models available to the callback - extra_models = [] - # Conditions when callback should be skipped - skip = [] + skip_events = [] + skip_changes = [] # Callback will listen to events of the supplied type on the models on_events = [] @@ -61,10 +111,21 @@ class MessageCallback(object): # List of change events on the models to listen to on_changes = [] + # Internal state _callbacks = {} - _transforms = [] + def __init__(self, plot, streams, source, **params): + self.plot = plot + self.streams = streams + self.source = source + self.handle_ids = defaultdict(dict) + self.reset() + self._active = False + self._prev_msg = None + self._last_event = time.time() + self._history = [] + def _transform(self, msg): for transform in self._transforms: msg = transform(msg, self) @@ -77,33 +138,12 @@ def _process_msg(self, msg): """ return self._transform(msg) - def __init__(self, plot, streams, source, **params): - self.plot = plot - self.streams = streams - if plot.renderer.mode == 'server' or pn.config.comms != 'default': - self.comm = None - else: - if plot.pane: - on_error = partial(plot.pane._on_error, plot.root) - else: - on_error = None - self.comm = plot.renderer.comm_manager.get_client_comm(on_msg=self.on_msg) - self.comm._on_error = on_error - self.source = source - self.handle_ids = defaultdict(dict) - self.reset() - def cleanup(self): self.reset() self.handle_ids = None self.plot = None self.source = None self.streams = [] - if self.comm: - try: - self.comm.close() - except: - pass Callback._callbacks = {k: cb for k, cb in Callback._callbacks.items() if cb is not self} @@ -116,11 +156,9 @@ def reset(self): handle = handles[handle_name] cb_hash = (id(handle), id(type(self))) self._callbacks.pop(cb_hash, None) - self.callbacks = [] self.plot_handles = {} self._queue = [] - def _filter_msg(self, msg, ids): """ Filter event values that do not originate from the plotting @@ -136,7 +174,6 @@ def _filter_msg(self, msg, ids): filtered_msg[k] = v return filtered_msg - def on_msg(self, msg): streams = [] for stream in self.streams: @@ -165,7 +202,6 @@ def on_msg(self, msg): for stream in streams: stream._metadata = {} - def _init_plot_handles(self): """ Find all requested plotting handles and cache them along @@ -182,18 +218,12 @@ def _init_plot_handles(self): self.plot_handles = handles requested = {} - for h in self.models+self.extra_models: + for h in self.models: if h in self.plot_handles: requested[h] = handles[h] - elif h in self.extra_models: - print("Warning %s could not find the %s model. " - "The corresponding stream may not work." - % (type(self).__name__, h)) self.handle_ids.update(self._get_stream_handle_ids(requested)) - return requested - def _get_stream_handle_ids(self, handles): """ Gather the ids of the plotting handles attached to this callback @@ -208,140 +238,6 @@ def _get_stream_handle_ids(self, handles): stream_handle_ids[stream][h] = handle_id return stream_handle_ids - - -class CustomJSCallback(MessageCallback): - """ - The CustomJSCallback attaches CustomJS callbacks to a bokeh plot, - which looks up the requested attributes and sends back a message - to Python using a Comms instance. - """ - - js_callback = JS_CALLBACK - - code = "" - - # Timeout if a comm message is swallowed - timeout = 20000 - - # Timeout before the first event is processed - debounce = 20 - - @classmethod - def attributes_js(cls, attributes): - """ - Generates JS code to look up attributes on JS objects from - an attributes specification dictionary. If the specification - references a plotting particular plotting handle it will also - generate JS code to get the ID of the object. - - Simple example (when referencing cb_data or cb_obj): - - Input : {'x': 'cb_data.geometry.x'} - - Output : data['x'] = cb_data['geometry']['x'] - - Example referencing plot handle: - - Input : {'x0': 'x_range.attributes.start'} - - Output : if ((x_range !== undefined)) { - data['x0'] = {id: x_range['id'], value: x_range['attributes']['start']} - } - """ - assign_template = '{assign}{{id: {obj_name}["id"], value: {obj_name}{attr_getters}}};\n' - conditional_template = 'if (({obj_name} != undefined)) {{ {assign} }}' - code = '' - for key, attr_path in sorted(attributes.items()): - data_assign = 'data["{key}"] = '.format(key=key) - attrs = attr_path.split('.') - obj_name = attrs[0] - attr_getters = ''.join(['["{attr}"]'.format(attr=attr) - for attr in attrs[1:]]) - if obj_name not in ['cb_obj', 'cb_data']: - assign_str = assign_template.format( - assign=data_assign, obj_name=obj_name, attr_getters=attr_getters - ) - code += conditional_template.format( - obj_name=obj_name, assign=assign_str - ) - else: - assign_str = ''.join([data_assign, obj_name, attr_getters, ';\n']) - code += assign_str - return code - - - def get_customjs(self, references, plot_id=None): - """ - Creates a CustomJS callback that will send the requested - attributes back to python. - """ - # Generate callback JS code to get all the requested data - if plot_id is None: - plot_id = self.plot.id or 'PLACEHOLDER_PLOT_ID' - self_callback = self.js_callback.format(comm_id=self.comm.id, - timeout=self.timeout, - debounce=self.debounce, - plot_id=plot_id) - - attributes = self.attributes_js(self.attributes) - conditions = ["%s" % cond for cond in self.skip] - conditional = '' - if conditions: - conditional = 'if (%s) { return };\n' % (' || '.join(conditions)) - data = "var data = {};\n" - code = conditional + data + attributes + self.code + self_callback - return CustomJS(args=references, code=code) - - def set_customjs_callback(self, js_callback, handle): - """ - Generates a CustomJS callback by generating the required JS - code and gathering all plotting handles and installs it on - the requested callback handle. - """ - if self.on_events: - for event in self.on_events: - handle.js_on_event(event, js_callback) - if self.on_changes: - for change in self.on_changes: - handle.js_on_change(change, js_callback) - - -class ServerCallback(MessageCallback): - """ - Implements methods to set up bokeh server callbacks. A ServerCallback - resolves the requested attributes on the Python end and then hands - the msg off to the general on_msg handler, which will update the - Stream(s) attached to the callback. - - The ServerCallback supports three different throttling modes: - - - adaptive (default): The callback adapts the throttling timeout - depending on the rolling mean of the time taken to process each - message. The rolling window is controlled by the `adaptive_window` - value. - - throttle: Uses the fixed `throttle_timeout` as the minimum amount - of time between events. - - debounce: Processes the message only when no new event has been - received within the `throttle_timeout` duration. - """ - - adaptive_window = 3 - - throttle_timeout = 100 - - throttling_scheme = 'adaptive' - - skip_events = [] - skip_changes = [] - - def __init__(self, plot, streams, source, **params): - super(ServerCallback, self).__init__(plot, streams, source, **params) - self._active = False - self._prev_msg = None - self._last_event = time.time() - self._history = [] - @classmethod def resolve_attr_spec(cls, spec, cb_obj, model=None): """ @@ -393,8 +289,7 @@ def _schedule_callback(self, cb, timeout=None, offset=True): diff = time.time()-self._last_event timeout = max(timeout-(diff*1000), 50) if not pn.state.curdoc: - from tornado.ioloop import IOLoop - IOLoop.current().call_later(int(timeout)/1000., cb) + cb() else: pn.state.curdoc.add_timeout_callback(cb, int(timeout)) @@ -407,7 +302,11 @@ def on_change(self, attr, old, new): if not self._active and self.plot.document: self._active = True self._set_busy(True) - self._schedule_callback(self.process_on_change, offset=False) + if self.plot.renderer.mode == 'server': + self._schedule_callback(self.process_on_change, offset=False) + else: + with hold(self.plot.document): + self.process_on_change() def on_event(self, event): """ @@ -418,7 +317,11 @@ def on_event(self, event): if not self._active and self.plot.document: self._active = True self._set_busy(True) - self._schedule_callback(self.process_on_event, offset=False) + if self.plot.renderer.mode == 'server': + self._schedule_callback(self.process_on_event, offset=False) + else: + with hold(self.plot.document): + self.process_on_event() def throttled(self): now = time.time() @@ -438,6 +341,9 @@ def throttled(self): return False @gen.coroutine + def process_on_event_coroutine(self): + self.process_on_event() + def process_on_event(self): """ Trigger callback change event and triggering corresponding streams. @@ -447,7 +353,7 @@ def process_on_event(self): self._set_busy(False) return throttled = self.throttled() - if throttled: + if throttled and pn.state.curdoc: self._schedule_callback(self.process_on_event, throttled) return # Get unique event types in the queue @@ -467,16 +373,22 @@ def process_on_event(self): w = self.adaptive_window-1 diff = time.time()-self._last_event self._history = self._history[-w:] + [diff] - self._schedule_callback(self.process_on_event) + if self.plot.renderer.mode == 'server': + self._schedule_callback(self.process_on_event) + else: + self._active = False @gen.coroutine + def process_on_change_coroutine(self): + self.process_on_change() + def process_on_change(self): if not self._queue: self._active = False self._set_busy(False) return throttled = self.throttled() - if throttled: + if throttled and pn.state.curdoc: self._schedule_callback(self.process_on_change, throttled) return self._queue = [] @@ -507,9 +419,12 @@ def process_on_change(self): self._history = self._history[-w:] + [diff] self._prev_msg = msg - self._schedule_callback(self.process_on_change) + if self.plot.renderer.mode == 'server': + self._schedule_callback(self.process_on_change) + else: + self._active = False - def set_server_callback(self, handle): + def set_callback(self, handle): """ Set up on_change events for bokeh server interactions. """ @@ -523,65 +438,6 @@ def set_server_callback(self, handle): continue handle.on_change(change, self.on_change) - - -class Callback(CustomJSCallback, ServerCallback): - """ - Provides a baseclass to define callbacks, which return data from - bokeh model callbacks, events and attribute changes. The callback - then makes this data available to any streams attached to it. - - The definition of a callback consists of a number of components: - - * models : Defines which bokeh models the callback will be - attached on referencing the model by its key in - the plots handles, e.g. this could be the x_range, - y_range, plot, a plotting tool or any other - bokeh mode. - - * extra_models: Any additional models available in handles which - should be made available in the namespace of the - objects, e.g. to make a tool available to skip - checks. - - * attributes : The attributes define which attributes to send - back to Python. They are defined as a dictionary - mapping between the name under which the variable - is made available to Python and the specification - of the attribute. The specification should start - with the variable name that is to be accessed and - the location of the attribute separated by - periods. All models defined by the models and - extra_models attributes can be addressed in this - way, e.g. to get the start of the x_range as 'x' - you can supply {'x': 'x_range.attributes.start'}. - Additionally certain handles additionally make the - cb_data and cb_obj variables available containing - additional information about the event. - - * skip : Conditions when the Callback should be skipped - specified as a list of valid JS expressions, which - can reference models requested by the callback, - e.g. ['pan.attributes.active'] would skip the - callback if the pan tool is active. - - * code : Defines any additional JS code to be executed, - which can modify the data object that is sent to - the backend. - - * on_events : If the Callback should listen to bokeh events this - should declare the types of event as a list (optional) - - * on_changes : If the Callback should listen to model attribute - changes on the defined ``models`` (optional) - - If either on_events or on_changes are declared the Callback will - be registered using the on_event or on_change machinery, otherwise - it will be treated as a regular callback on the model. The - callback can also define a _process_msg method, which can modify - the data sent by the callback before it is passed to the streams. - """ - def initialize(self, plot_id=None): handles = self._init_plot_handles() for handle_name in self.models: @@ -604,12 +460,7 @@ def initialize(self, plot_id=None): cb.handle_ids[k].update(v) continue - if self.comm is None: - self.set_server_callback(handle) - else: - js_callback = self.get_customjs(handles, plot_id=plot_id) - self.set_customjs_callback(js_callback, handle) - self.callbacks.append(js_callback) + self.set_callback(handle) self._callbacks[cb_hash] = self @@ -621,38 +472,8 @@ class PointerXYCallback(Callback): attributes = {'x': 'cb_obj.x', 'y': 'cb_obj.y'} models = ['plot'] - extra_models= ['x_range', 'y_range'] - on_events = ['mousemove'] - # Clip x and y values to available axis range - code = """ - if (x_range.type.endsWith('Range1d')) { - var xstart = x_range.start; - var xend = x_range.end; - if (xstart > xend) { - [xstart, xend] = [xend, xstart] - } - if (cb_obj.x < xstart) { - data['x'] = xstart; - } else if (cb_obj.x > xend) { - data['x'] = xend; - } - } - if (y_range.type.endsWith('Range1d')) { - var ystart = y_range.start; - var yend = y_range.end; - if (ystart > yend) { - [ystart, yend] = [yend, ystart] - } - if (cb_obj.y < ystart) { - data['y'] = ystart; - } else if (cb_obj.y > yend) { - data['y'] = yend; - } - } - """ - def _process_out_of_bounds(self, value, start, end): "Clips out of bounds values" if isinstance(value, np.datetime64): @@ -687,10 +508,9 @@ def _process_msg(self, msg): if 'y' in msg and isinstance(yaxis, DatetimeAxis): msg['y'] = convert_timestamp(msg['y']) - server_mode = self.comm is None if isinstance(x_range, FactorRange) and isinstance(msg.get('x'), (int, float)): msg['x'] = x_range.factors[int(msg['x'])] - elif 'x' in msg and isinstance(x_range, (Range1d, DataRange1d)) and server_mode: + elif 'x' in msg and isinstance(x_range, (Range1d, DataRange1d)): xstart, xend = x_range.start, x_range.end if xstart > xend: xstart, xend = xend, xstart @@ -702,7 +522,7 @@ def _process_msg(self, msg): if isinstance(y_range, FactorRange) and isinstance(msg.get('y'), (int, float)): msg['y'] = y_range.factors[int(msg['y'])] - elif 'y' in msg and isinstance(y_range, (Range1d, DataRange1d)) and server_mode: + elif 'y' in msg and isinstance(y_range, (Range1d, DataRange1d)): ystart, yend = y_range.start, y_range.end if ystart > yend: ystart, yend = yend, ystart @@ -721,21 +541,7 @@ class PointerXCallback(PointerXYCallback): """ attributes = {'x': 'cb_obj.x'} - extra_models= ['x_range'] - code = """ - if (x_range.type.endsWith('Range1d')) { - var xstart = x_range.start; - var xend = x_range.end; - if (xstart > xend) { - [xstart, xend] = [xend, xstart] - } - if (cb_obj.x < xstart) { - data['x'] = xstart; - } else if (cb_obj.x > xend) { - data['x'] = xend; - } - } - """ + class PointerYCallback(PointerXYCallback): """ @@ -743,32 +549,16 @@ class PointerYCallback(PointerXYCallback): """ attributes = {'y': 'cb_obj.y'} - extra_models= ['y_range'] - code = """ - if (y_range.type.endsWith('Range1d')) { - var ystart = y_range.start; - var yend = y_range.end; - if (ystart > yend) { - [ystart, yend] = [yend, ystart] - } - if (cb_obj.y < ystart) { - data['y'] = ystart; - } else if (cb_obj.y > yend) { - data['y'] = yend; - } - } - """ + class DrawCallback(PointerXYCallback): on_events = ['pan', 'panstart', 'panend'] models = ['plot'] - extra_models=['pan', 'box_zoom', 'x_range', 'y_range'] - skip = ['pan && pan.attributes.active', 'box_zoom && box_zoom.attributes.active'] attributes = {'x': 'cb_obj.x', 'y': 'cb_obj.y', 'event': 'cb_obj.event_name'} def __init__(self, *args, **kwargs): self.stroke_count = 0 - super(DrawCallback, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _process_msg(self, msg): event = msg.pop('event') @@ -785,30 +575,6 @@ class TapCallback(PointerXYCallback): individual tap events within a doubletap event. """ - # Skip if tap is outside axis range - code = """ - if (x_range.type.endsWith('Range1d')) { - var xstart = x_range.start; - var xend = x_range.end; - if (xstart > xend) { - [xstart, xend] = [xend, xstart] - } - if ((cb_obj.x < xstart) || (cb_obj.x > xend)) { - return - } - } - if (y_range.type.endsWith('Range1d')) { - var ystart = y_range.start; - var yend = y_range.end; - if (ystart > yend) { - [ystart, yend] = [yend, ystart] - } - if ((cb_obj.y < ystart) || (cb_obj.y > yend)) { - return - } - } - """ - on_events = ['tap', 'doubletap'] def _process_out_of_bounds(self, value, start, end): @@ -839,6 +605,7 @@ class SingleTapCallback(TapCallback): on_events = ['tap'] + class PressUpCallback(TapCallback): """ Returns the mouse x/y-position of a pressup mouse event. @@ -897,28 +664,27 @@ def _process_msg(self, msg): data = {} if 'x0' in msg and 'x1' in msg: x0, x1 = msg['x0'], msg['x1'] + if x0 > x1: + x0, x1 = x1, x0 if isinstance(self.plot.handles.get('xaxis'), DatetimeAxis): if not isinstance(x0, datetime_types): x0 = convert_timestamp(x0) if not isinstance(x1, datetime_types): x1 = convert_timestamp(x1) - if self.plot.invert_xaxis: - x0, x1 = x1, x0 data['x_range'] = (x0, x1) if 'y0' in msg and 'y1' in msg: y0, y1 = msg['y0'], msg['y1'] + if y0 > y1: + y0, y1 = y1, y0 if isinstance(self.plot.handles.get('yaxis'), DatetimeAxis): if not isinstance(y0, datetime_types): y0 = convert_timestamp(y0) if not isinstance(y1, datetime_types): y1 = convert_timestamp(y1) - if self.plot.invert_yaxis: - y0, y1 = y1, y0 data['y_range'] = (y0, y1) return self._transform(data) - class RangeXCallback(RangeXYCallback): """ Returns the x-axis range of a plot. @@ -987,10 +753,8 @@ class BoundsCallback(Callback): 'y0': 'cb_obj.geometry.y0', 'y1': 'cb_obj.geometry.y1'} models = ['plot'] - extra_models = ['box_select'] on_events = ['selectiongeometry'] - skip = ["(cb_obj.geometry.type != 'rect') || (!cb_obj.final)"] skip_events = [lambda event: event.geometry['type'] != 'rect', lambda event: not event.final] @@ -1015,7 +779,7 @@ class SelectionXYCallback(BoundsCallback): """ def _process_msg(self, msg): - msg = super(SelectionXYCallback, self)._process_msg(msg) + msg = super()._process_msg(msg) if 'bounds' not in msg: return msg el = self.plot.current_frame @@ -1060,10 +824,8 @@ class BoundsXCallback(Callback): attributes = {'x0': 'cb_obj.geometry.x0', 'x1': 'cb_obj.geometry.x1'} models = ['plot'] - extra_models = ['xbox_select'] on_events = ['selectiongeometry'] - skip = ["(cb_obj.geometry.type != 'rect') || (!cb_obj.final)"] skip_events = [lambda event: event.geometry['type'] != 'rect', lambda event: not event.final] @@ -1085,10 +847,8 @@ class BoundsYCallback(Callback): attributes = {'y0': 'cb_obj.geometry.y0', 'y1': 'cb_obj.geometry.y1'} models = ['plot'] - extra_models = ['ybox_select'] on_events = ['selectiongeometry'] - skip = ["(cb_obj.geometry.type != 'rect') || (!cb_obj.final)"] skip_events = [lambda event: event.geometry['type'] != 'rect', lambda event: not event.final] @@ -1107,10 +867,7 @@ class LassoCallback(Callback): attributes = {'xs': 'cb_obj.geometry.x', 'ys': 'cb_obj.geometry.y'} models = ['plot'] - extra_models = ['lasso_select'] on_events = ['selectiongeometry'] - skip = ["(cb_obj.geometry.type != 'poly') || (!cb_obj.final)"] - skip_events = [lambda event: event.geometry['type'] != 'poly', lambda event: not event.final] @@ -1177,7 +934,7 @@ class CDSCallback(Callback): on_changes = ['data', 'patching'] def initialize(self, plot_id=None): - super(CDSCallback, self).initialize(plot_id) + super().initialize(plot_id) plot = self.plot data = self._process_msg({'data': plot.handles['source'].data})['data'] for stream in self.streams: @@ -1273,22 +1030,24 @@ def initialize(self, plot_id=None): if stream.num_objects: kwargs['num_objects'] = stream.num_objects if stream.tooltip: - kwargs['custom_tooltip'] = stream.tooltip + kwargs[CUSTOM_TOOLTIP] = stream.tooltip if stream.styles: self._create_style_callback(cds, glyph, 'x') + if stream.empty_value is not None: + kwargs['empty_value'] = stream.empty_value point_tool = PointDrawTool( add=all(s.add for s in self.streams), drag=all(s.drag for s in self.streams), - empty_value=stream.empty_value, renderers=renderers, **kwargs) + renderers=renderers, **kwargs) self.plot.state.tools.append(point_tool) self._update_cds_vdims(cds.data) # Add any value dimensions not already in the CDS data # ensuring the element can be reconstituted in entirety - super(PointDrawCallback, self).initialize(plot_id) + super().initialize(plot_id) def _process_msg(self, msg): self._update_cds_vdims(msg['data']) - return super(PointDrawCallback, self)._process_msg(msg) + return super()._process_msg(msg) class CurveEditCallback(GlyphDrawCallback): @@ -1303,7 +1062,7 @@ def initialize(self, plot_id=None): renderers = [renderer] kwargs = {} if stream.tooltip: - kwargs['custom_tooltip'] = stream.tooltip + kwargs[CUSTOM_TOOLTIP] = stream.tooltip point_tool = PointDrawTool( add=False, drag=True, renderers=renderers, **kwargs ) @@ -1314,11 +1073,11 @@ def initialize(self, plot_id=None): self.plot.state.tools.append(point_tool) self._update_cds_vdims(cds.data) - super(CurveEditCallback, self).initialize(plot_id) + super().initialize(plot_id) def _process_msg(self, msg): self._update_cds_vdims(msg['data']) - return super(CurveEditCallback, self)._process_msg(msg) + return super()._process_msg(msg) def _update_cds_vdims(self, data): """ @@ -1350,19 +1109,20 @@ def initialize(self, plot_id=None): if stream.styles: self._create_style_callback(cds, glyph, 'xs') if stream.tooltip: - kwargs['custom_tooltip'] = stream.tooltip + kwargs[CUSTOM_TOOLTIP] = stream.tooltip + if stream.empty_value is not None: + kwargs['empty_value'] = stream.empty_value poly_tool = PolyDrawTool( - drag=all(s.drag for s in self.streams), - empty_value=stream.empty_value, renderers=renderers, + drag=all(s.drag for s in self.streams), renderers=renderers, **kwargs ) plot.state.tools.append(poly_tool) self._update_cds_vdims(cds.data) - super(PolyDrawCallback, self).initialize(plot_id) + super().initialize(plot_id) def _process_msg(self, msg): self._update_cds_vdims(msg['data']) - return super(PolyDrawCallback, self)._process_msg(msg) + return super()._process_msg(msg) def _update_cds_vdims(self, data): """ @@ -1397,9 +1157,10 @@ def initialize(self, plot_id=None): self._create_style_callback(cds, glyph, 'xs') kwargs = {} if stream.tooltip: - kwargs['custom_tooltip'] = stream.tooltip + kwargs[CUSTOM_TOOLTIP] = stream.tooltip + if stream.empty_value is not None: + kwargs['empty_value'] = stream.empty_value poly_tool = FreehandDrawTool( - empty_value=stream.empty_value, num_objects=stream.num_objects, renderers=[plot.handles['glyph_renderer']], **kwargs @@ -1451,7 +1212,7 @@ def initialize(self, plot_id=None): if stream.num_objects: kwargs['num_objects'] = stream.num_objects if stream.tooltip: - kwargs['custom_tooltip'] = stream.tooltip + kwargs[CUSTOM_TOOLTIP] = stream.tooltip renderer = self.plot.handles['glyph_renderer'] if isinstance(self.plot, PathPlot): @@ -1464,7 +1225,7 @@ def initialize(self, plot_id=None): super(CDSCallback, self).initialize() def _process_msg(self, msg): - data = super(BoxEditCallback, self)._process_msg(msg) + data = super()._process_msg(msg) if 'data' not in data: return {} data = data['data'] @@ -1497,7 +1258,7 @@ def initialize(self, plot_id=None): stream = self.streams[0] kwargs = {} if stream.tooltip: - kwargs['custom_tooltip'] = stream.tooltip + kwargs[CUSTOM_TOOLTIP] = stream.tooltip if vertex_tool is None: vertex_style = dict({'size': 10}, **stream.vertex_style) r1 = plot.state.scatter([], [], **vertex_style) @@ -1508,429 +1269,35 @@ def initialize(self, plot_id=None): CDSCallback.initialize(self, plot_id) - -callbacks = Stream._callbacks['bokeh'] - -callbacks[PointerXY] = PointerXYCallback -callbacks[PointerX] = PointerXCallback -callbacks[PointerY] = PointerYCallback -callbacks[Tap] = TapCallback -callbacks[SingleTap] = SingleTapCallback -callbacks[DoubleTap] = DoubleTapCallback -callbacks[PressUp] = PressUpCallback -callbacks[PanEnd] = PanEndCallback -callbacks[MouseEnter] = MouseEnterCallback -callbacks[MouseLeave] = MouseLeaveCallback -callbacks[RangeXY] = RangeXYCallback -callbacks[RangeX] = RangeXCallback -callbacks[RangeY] = RangeYCallback -callbacks[BoundsXY] = BoundsCallback -callbacks[BoundsX] = BoundsXCallback -callbacks[BoundsY] = BoundsYCallback -callbacks[Lasso] = LassoCallback -callbacks[Selection1D] = Selection1DCallback -callbacks[PlotSize] = PlotSizeCallback -callbacks[SelectMode] = SelectModeCallback -callbacks[SelectionXY] = SelectionXYCallback -callbacks[Draw] = DrawCallback -callbacks[PlotReset] = ResetCallback -callbacks[CDSStream] = CDSCallback -callbacks[BoxEdit] = BoxEditCallback -callbacks[PointDraw] = PointDrawCallback -callbacks[CurveEdit] = CurveEditCallback -callbacks[FreehandDraw]= FreehandDrawCallback -callbacks[PolyDraw] = PolyDrawCallback -callbacks[PolyEdit] = PolyEditCallback - - - -class LinkCallback(param.Parameterized): - - source_model = None - target_model = None - source_handles = [] - target_handles = [] - - on_source_events = [] - on_source_changes = [] - - on_target_events = [] - on_target_changes = [] - - source_code = None - target_code = None - - def __init__(self, root_model, link, source_plot, target_plot=None): - self.root_model = root_model - self.link = link - self.source_plot = source_plot - self.target_plot = target_plot - self.validate() - - references = {k: v for k, v in link.param.get_param_values() - if k not in ('source', 'target', 'name')} - - for sh in self.source_handles+[self.source_model]: - key = '_'.join(['source', sh]) - references[key] = source_plot.handles[sh] - - for p, value in link.param.get_param_values(): - if p in ('name', 'source', 'target'): - continue - references[p] = value - - if target_plot is not None: - for sh in self.target_handles+[self.target_model]: - key = '_'.join(['target', sh]) - references[key] = target_plot.handles[sh] - - if self.source_model in source_plot.handles: - src_model = source_plot.handles[self.source_model] - src_cb = CustomJS(args=references, code=self.source_code) - for ch in self.on_source_changes: - src_model.js_on_change(ch, src_cb) - for ev in self.on_source_events: - src_model.js_on_event(ev, src_cb) - self.src_cb = src_cb - else: - self.src_cb = None - - if target_plot is not None and self.target_model in target_plot.handles and self.target_code: - tgt_model = target_plot.handles[self.target_model] - tgt_cb = CustomJS(args=references, code=self.target_code) - for ch in self.on_target_changes: - tgt_model.js_on_change(ch, tgt_cb) - for ev in self.on_target_events: - tgt_model.js_on_event(ev, tgt_cb) - self.tgt_cb = tgt_cb - else: - self.tgt_cb = None - - @classmethod - def find_links(cls, root_plot): - """ - Traverses the supplied plot and searches for any Links on - the plotted objects. - """ - plot_fn = lambda x: isinstance(x, GenericElementPlot) and not isinstance(x, GenericOverlayPlot) - plots = root_plot.traverse(lambda x: x, [plot_fn]) - potentials = [cls.find_link(plot) for plot in plots] - source_links = [p for p in potentials if p is not None] - found = [] - for plot, links in source_links: - for link in links: - if not link._requires_target: - # If link has no target don't look further - found.append((link, plot, None)) - continue - potentials = [cls.find_link(p, link) for p in plots] - tgt_links = [p for p in potentials if p is not None] - if tgt_links: - found.append((link, plot, tgt_links[0][0])) - return found - - @classmethod - def find_link(cls, plot, link=None): - """ - Searches a GenericElementPlot for a Link. - """ - registry = Link.registry.items() - for source in plot.link_sources: - if link is None: - links = [ - l for src, links in registry for l in links - if src is source or (src._plot_id is not None and - src._plot_id == source._plot_id)] - if links: - return (plot, links) - else: - if ((link.target is source) or - (link.target is not None and - link.target._plot_id is not None and - link.target._plot_id == source._plot_id)): - return (plot, [link]) - - def validate(self): - """ - Should be subclassed to check if the source and target plots - are compatible to perform the linking. - """ - - -class RangeToolLinkCallback(LinkCallback): - """ - Attaches a RangeTool to the source plot and links it to the - specified axes on the target plot - """ - - def __init__(self, root_model, link, source_plot, target_plot): - try: - from bokeh.models.tools import RangeTool - except: - raise Exception('RangeToolLink requires bokeh >= 0.13') - toolbars = list(root_model.select({'type': ToolbarBox})) - axes = {} - if 'x' in link.axes: - axes['x_range'] = target_plot.handles['x_range'] - if 'y' in link.axes: - axes['y_range'] = target_plot.handles['y_range'] - tool = RangeTool(**axes) - source_plot.state.add_tools(tool) - if toolbars: - toolbar = toolbars[0].toolbar - toolbar.tools.append(tool) - - -class DataLinkCallback(LinkCallback): - """ - Merges the source and target ColumnDataSource - """ - - def __init__(self, root_model, link, source_plot, target_plot): - src_cds = source_plot.handles['source'] - tgt_cds = target_plot.handles['source'] - if src_cds is tgt_cds: - return - - src_len = [len(v) for v in src_cds.data.values()] - tgt_len = [len(v) for v in tgt_cds.data.values()] - if src_len and tgt_len and (src_len[0] != tgt_len[0]): - raise Exception('DataLink source data length must match target ' - 'data length, found source length of %d and ' - 'target length of %d.' % (src_len[0], tgt_len[0])) - - # Ensure the data sources are compatible (i.e. overlapping columns are equal) - for k, v in tgt_cds.data.items(): - if k not in src_cds.data: - continue - v = np.asarray(v) - col = np.asarray(src_cds.data[k]) - if len(v) and isinstance(v[0], np.ndarray): - continue # Skip ragged arrays - if not ((isscalar(v) and v == col) or - (v.dtype.kind not in 'iufc' and (v==col).all()) or - np.allclose(v, np.asarray(src_cds.data[k]), equal_nan=True)): - raise ValueError('DataLink can only be applied if overlapping ' - 'dimension values are equal, %s column on source ' - 'does not match target' % k) - - src_cds.data.update(tgt_cds.data) - renderer = target_plot.handles.get('glyph_renderer') - if renderer is None: - pass - elif 'data_source' in renderer.properties(): - renderer.update(data_source=src_cds) - else: - renderer.update(source=src_cds) - if hasattr(renderer, 'view'): - renderer.view.update(source=src_cds) - target_plot.handles['source'] = src_cds - target_plot.handles['cds'] = src_cds - for callback in target_plot.callbacks: - callback.initialize(plot_id=root_model.ref['id']) - - -class SelectionLinkCallback(LinkCallback): - - source_model = 'selected' - target_model = 'selected' - - on_source_changes = ['indices'] - on_target_changes = ['indices'] - - source_handles = ['cds'] - target_handles = ['cds'] - - source_code = """ - target_selected.indices = source_selected.indices - target_cds.properties.selected.change.emit() - """ - - target_code = """ - source_selected.indices = target_selected.indices - source_cds.properties.selected.change.emit() - """ - -class RectanglesTableLinkCallback(DataLinkCallback): - - source_model = 'cds' - target_model = 'cds' - - source_handles = ['glyph'] - - on_source_changes = ['selected', 'data'] - on_target_changes = ['patching'] - - source_code = """ - var xs = source_cds.data[source_glyph.x.field] - var ys = source_cds.data[source_glyph.y.field] - var ws = source_cds.data[source_glyph.width.field] - var hs = source_cds.data[source_glyph.height.field] - - var x0 = [] - var x1 = [] - var y0 = [] - var y1 = [] - for (var i = 0; i < xs.length; i++) { - var hw = ws[i]/2. - var hh = hs[i]/2. - x0.push(xs[i]-hw) - x1.push(xs[i]+hw) - y0.push(ys[i]-hh) - y1.push(ys[i]+hh) - } - target_cds.data[columns[0]] = x0 - target_cds.data[columns[1]] = y0 - target_cds.data[columns[2]] = x1 - target_cds.data[columns[3]] = y1 - """ - - target_code = """ - var x0s = target_cds.data[columns[0]] - var y0s = target_cds.data[columns[1]] - var x1s = target_cds.data[columns[2]] - var y1s = target_cds.data[columns[3]] - - var xs = [] - var ys = [] - var ws = [] - var hs = [] - for (var i = 0; i < x0s.length; i++) { - var x0 = Math.min(x0s[i], x1s[i]) - var y0 = Math.min(y0s[i], y1s[i]) - var x1 = Math.max(x0s[i], x1s[i]) - var y1 = Math.max(y0s[i], y1s[i]) - xs.push((x0+x1)/2.) - ys.push((y0+y1)/2.) - ws.push(x1-x0) - hs.push(y1-y0) - } - source_cds.data['x'] = xs - source_cds.data['y'] = ys - source_cds.data['width'] = ws - source_cds.data['height'] = hs - """ - - def __init__(self, root_model, link, source_plot, target_plot=None): - DataLinkCallback.__init__(self, root_model, link, source_plot, target_plot) - LinkCallback.__init__(self, root_model, link, source_plot, target_plot) - columns = [kd.name for kd in source_plot.current_frame.kdims] - self.src_cb.args['columns'] = columns - self.tgt_cb.args['columns'] = columns - - -class VertexTableLinkCallback(LinkCallback): - - source_model = 'cds' - target_model = 'cds' - - on_source_changes = ['selected', 'data', 'patching'] - on_target_changes = ['data', 'patching'] - - source_code = """ - var index = source_cds.selected.indices[0]; - if (index == undefined) { - var xs_column = []; - var ys_column = []; - } else { - var xs_column = source_cds.data['xs'][index]; - var ys_column = source_cds.data['ys'][index]; - } - if (xs_column == undefined) { - var xs_column = []; - var ys_column = []; - } - var xs = [] - var ys = [] - var empty = [] - for (var i = 0; i < xs_column.length; i++) { - xs.push(xs_column[i]) - ys.push(ys_column[i]) - empty.push(null) - } - var [x, y] = vertex_columns - target_cds.data[x] = xs - target_cds.data[y] = ys - var length = xs.length - for (var col in target_cds.data) { - if (vertex_columns.indexOf(col) != -1) { continue; } - else if (col in source_cds.data) { - var path = source_cds.data[col][index]; - if ((path == undefined)) { - var data = empty; - } else if (path.length == length) { - var data = source_cds.data[col][index]; - } else { - var data = empty; - } - } else { - var data = empty; - } - target_cds.data[col] = data; - } - target_cds.change.emit() - target_cds.data = target_cds.data - """ - - target_code = """ - if (!source_cds.selected.indices.length) { return } - var [x, y] = vertex_columns - var xs_column = target_cds.data[x] - var ys_column = target_cds.data[y] - var xs = [] - var ys = [] - var points = [] - for (var i = 0; i < xs_column.length; i++) { - xs.push(xs_column[i]) - ys.push(ys_column[i]) - points.push(i) - } - var index = source_cds.selected.indices[0] - var xpaths = source_cds.data['xs'] - var ypaths = source_cds.data['ys'] - var length = source_cds.data['xs'].length - for (var col in target_cds.data) { - if ((col == x) || (col == y)) { continue; } - if (!(col in source_cds.data)) { - var empty = [] - for (var i = 0; i < length; i++) - empty.push([]) - source_cds.data[col] = empty - } - source_cds.data[col][index] = target_cds.data[col] - for (var p of points) { - for (var pindex = 0; pindex < xpaths.length; pindex++) { - if (pindex != index) { continue } - var xs = xpaths[pindex] - var ys = ypaths[pindex] - var column = source_cds.data[col][pindex] - if (column.length != xs.length) { - for (var ind = 0; ind < xs.length; ind++) { - column.push(null) - } - } - for (var ind = 0; ind < xs.length; ind++) { - if ((xs[ind] == xpaths[index][p]) && (ys[ind] == ypaths[index][p])) { - column[ind] = target_cds.data[col][p] - xs[ind] = xs[p]; - ys[ind] = ys[p]; - } - } - } - } - } - xpaths[index] = xs; - ypaths[index] = ys; - source_cds.change.emit() - source_cds.properties.data.change.emit(); - source_cds.data = source_cds.data - """ - - -callbacks = Link._callbacks['bokeh'] - -callbacks[RangeToolLink] = RangeToolLinkCallback -callbacks[DataLink] = DataLinkCallback -callbacks[SelectionLink] = SelectionLinkCallback -callbacks[VertexTableLink] = VertexTableLinkCallback -callbacks[RectanglesTableLink] = RectanglesTableLinkCallback +Stream._callbacks['bokeh'].update({ + PointerXY : PointerXYCallback, + PointerX : PointerXCallback, + PointerY : PointerYCallback, + Tap : TapCallback, + SingleTap : SingleTapCallback, + DoubleTap : DoubleTapCallback, + PressUp : PressUpCallback, + PanEnd : PanEndCallback, + MouseEnter : MouseEnterCallback, + MouseLeave : MouseLeaveCallback, + RangeXY : RangeXYCallback, + RangeX : RangeXCallback, + RangeY : RangeYCallback, + BoundsXY : BoundsCallback, + BoundsX : BoundsXCallback, + BoundsY : BoundsYCallback, + Lasso : LassoCallback, + Selection1D : Selection1DCallback, + PlotSize : PlotSizeCallback, + SelectionXY : SelectionXYCallback, + Draw : DrawCallback, + PlotReset : ResetCallback, + CDSStream : CDSCallback, + BoxEdit : BoxEditCallback, + PointDraw : PointDrawCallback, + CurveEdit : CurveEditCallback, + FreehandDraw: FreehandDrawCallback, + PolyDraw : PolyDrawCallback, + PolyEdit : PolyEditCallback, + SelectMode : SelectModeCallback +}) diff --git a/holoviews/plotting/bokeh/chart.py b/holoviews/plotting/bokeh/chart.py index 358b3ffe2c..1d51225623 100644 --- a/holoviews/plotting/bokeh/chart.py +++ b/holoviews/plotting/bokeh/chart.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict import numpy as np @@ -14,12 +12,12 @@ from ...core.data import Dataset from ...core.dimension import dimension_name from ...core.util import ( - OrderedDict, basestring, dimension_sanitizer, isfinite + OrderedDict, dimension_sanitizer, isfinite ) from ...operation import interpolate_curve from ...util.transform import dim from ..mixins import AreaMixin, BarsMixin, SpikesMixin -from ..util import compute_sizes, get_min_distance +from ..util import get_min_distance from .element import ElementPlot, ColorbarPlot, LegendPlot, OverlayPlot from .selection import BokehOverlaySelectionDisplay from .styles import ( @@ -38,67 +36,14 @@ class PointPlot(LegendPlot, ColorbarPlot): The current selection as a list of integers corresponding to the selected items.""") - # Deprecated parameters - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - - size_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of size style mapping, e.g. `size=dim('size')`""") - - scaling_method = param.ObjectSelector(default="area", - objects=["width", "area"], - doc=""" - Deprecated in favor of size style mapping, e.g. - size=dim('size')**2.""") - - scaling_factor = param.Number(default=1, bounds=(0, None), doc=""" - Scaling factor which is applied to either the width or area - of each point, depending on the value of `scaling_method`.""") - - size_fn = param.Callable(default=np.abs, doc=""" - Function applied to size values before applying scaling, - to remove values lower than zero.""") - selection_display = BokehOverlaySelectionDisplay() - style_opts = (['cmap', 'palette', 'marker', 'size', 'angle', 'visible'] + - line_properties + fill_properties) + style_opts = (['cmap', 'palette', 'marker', 'size', 'angle'] + + base_properties + line_properties + fill_properties) _plot_methods = dict(single='scatter', batched='scatter') _batched_style_opts = line_properties + fill_properties + ['size', 'marker', 'angle'] - def _get_size_data(self, element, ranges, style): - data, mapping = {}, {} - sdim = element.get_dimension(self.size_index) - ms = style.get('size', np.sqrt(6)) - if sdim and ((isinstance(ms, basestring) and ms in element) or isinstance(ms, dim)): - self.param.warning( - "Cannot declare style mapping for 'size' option and " - "declare a size_index; ignoring the size_index.") - sdim = None - if not sdim or self.static_source: - return data, mapping - - map_key = 'size_' + sdim.name - ms = ms**2 - sizes = element.dimension_values(self.size_index) - sizes = compute_sizes(sizes, self.size_fn, - self.scaling_factor, - self.scaling_method, ms) - if sizes is None: - eltype = type(element).__name__ - self.param.warning( - '%s dimension is not numeric, cannot use to scale %s size.' - % (sdim.pprint_label, eltype)) - else: - data[map_key] = np.sqrt(sizes) - mapping['size'] = map_key - return data, mapping - - def get_data(self, element, ranges, style): dims = element.dimensions(label=True) @@ -112,14 +57,6 @@ def get_data(self, element, ranges, style): data[ydim] = element.dimension_values(ydim) self._categorize_data(data, dims[:2], element.dimensions()) - cdata, cmapping = self._get_color_data(element, ranges, style) - data.update(cdata) - mapping.update(cmapping) - - sdata, smapping = self._get_size_data(element, ranges, style) - data.update(sdata) - mapping.update(smapping) - if 'angle' in style and isinstance(style['angle'], (int, float)): style['angle'] = np.deg2rad(style['angle']) @@ -190,7 +127,7 @@ class VectorFieldPlot(ColorbarPlot): arrow_heads = param.Boolean(default=True, doc=""" Whether or not to draw arrow heads.""") - magnitude = param.ClassSelector(class_=(basestring, dim), doc=""" + magnitude = param.ClassSelector(class_=(str, dim), doc=""" Dimension or dimension value transform that declares the magnitude of each vector. Magnitude is expected to be scaled between 0-1, by default the magnitudes are rescaled relative to the minimum @@ -208,25 +145,6 @@ class VectorFieldPlot(ColorbarPlot): Whether the lengths will be rescaled to take into account the smallest non-zero distance between two vectors.""") - # Deprecated parameters - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of dimension value transform on color option, - e.g. `color=dim('Magnitude')`. - """) - - size_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of the magnitude option, e.g. - `magnitude=dim('Magnitude')`. - """) - - normalize_lengths = param.Boolean(default=True, doc=""" - Deprecated in favor of rescaling length using dimension value - transforms using the magnitude option, e.g. - `dim('Magnitude').norm()`.""") - selection_display = BokehOverlaySelectionDisplay() style_opts = base_properties + line_properties + ['scale', 'cmap'] @@ -236,15 +154,8 @@ class VectorFieldPlot(ColorbarPlot): _plot_methods = dict(single='segment') def _get_lengths(self, element, ranges): - size_dim = element.get_dimension(self.size_index) mag_dim = self.magnitude - if size_dim and mag_dim: - self.param.warning( - "Cannot declare style mapping for 'magnitude' option " - "and declare a size_index; ignoring the size_index.") - elif size_dim: - mag_dim = size_dim - elif isinstance(mag_dim, basestring): + if isinstance(mag_dim, basestring): mag_dim = element.get_dimension(mag_dim) (x0, x1), (y0, y1) = (element.range(i) for i in range(2)) @@ -268,11 +179,10 @@ def _get_lengths(self, element, ranges): return magnitudes def _glyph_properties(self, *args): - properties = super(VectorFieldPlot, self)._glyph_properties(*args) + properties = super()._glyph_properties(*args) properties.pop('scale', None) return properties - def get_data(self, element, ranges, style): input_scale = style.pop('scale', 1.0) @@ -284,9 +194,6 @@ def get_data(self, element, ranges, style): else: xidx, yidx = (0, 1) lens = self._get_lengths(element, ranges)/input_scale - cdim = element.get_dimension(self.color_index) - cdata, cmapping = self._get_color_data(element, ranges, style, - name='line_color') # Compute segments and arrowheads xs = element.dimension_values(xidx) @@ -307,7 +214,6 @@ def get_data(self, element, ranges, style): x0s, x1s = (xs + nxoff, xs - pxoff) y0s, y1s = (ys + nyoff, ys - pyoff) - color = None if self.arrow_heads: arrow_len = (lens/4.) xa1s = x0s - np.cos(rads+np.pi/4)*arrow_len @@ -318,17 +224,9 @@ def get_data(self, element, ranges, style): x1s = np.concatenate([x1s, xa1s, xa2s]) y0s = np.tile(y0s, 3) y1s = np.concatenate([y1s, ya1s, ya2s]) - if cdim and cdim.name in cdata: - color = np.tile(cdata[cdim.name], 3) - elif cdim: - color = cdata.get(cdim.name) data = {'x0': x0s, 'x1': x1s, 'y0': y0s, 'y1': y1s} mapping = dict(x0='x0', x1='x1', y0='y0', y1='y1') - if cdim and color is not None: - data[cdim.name] = color - mapping.update(cmapping) - return (data, mapping, style) @@ -412,7 +310,6 @@ def get_batched_data(self, overlay, ranges): return data, mapping, style - class HistogramPlot(ColorbarPlot): selection_display = BokehOverlaySelectionDisplay(color_prop=['color', 'fill_color']) @@ -433,6 +330,8 @@ def get_data(self, element, ranges, style): x = element.kdims[0] values = element.dimension_values(1) edges = element.interface.coords(element, x, edges=True) + if hasattr(edges, 'compute'): + edges = edges.compute() data = dict(top=values, left=edges[:-1], right=edges[1:]) self._get_hover_data(data, element) return (data, mapping, style) @@ -443,8 +342,7 @@ def get_extents(self, element, ranges, range_type='combined'): s0 = min(s0, 0) if isfinite(s0) else 0 s1 = max(s1, 0) if isfinite(s1) else 0 ranges[ydim.name]['soft'] = (s0, s1) - return super(HistogramPlot, self).get_extents(element, ranges, range_type) - + return super().get_extents(element, ranges, range_type) class SideHistogramPlot(HistogramPlot): @@ -470,7 +368,7 @@ class SideHistogramPlot(HistogramPlot): """ def __init__(self, *args, **kwargs): - super(SideHistogramPlot, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.invert_axes: self.default_tools.append('ybox_select') else: @@ -483,15 +381,20 @@ def get_data(self, element, ranges, style): if d is not None] dimension = color_dims[0] if color_dims else None cmapper = self._get_colormapper(dimension, element, {}, {}) - if cmapper and dimension in element.dimensions(): + if cmapper: + cvals = None if isinstance(dimension, dim): - dim_name = dimension.dimension.name - data[dim_name] = [] if self.static_source else dimension.apply(element) + if dimension.applies(element): + dim_name = dimension.dimension.name + cvals = [] if self.static_source else dimension.apply(element) else: - dim_name = dimension.name - data[dim_name] = [] if self.static_source else element.dimension_values(dimension) - mapping['fill_color'] = {'field': dim_name, - 'transform': cmapper} + if dimension in element.dimensions(): + dim_name = dimension.name + cvals = [] if self.static_source else element.dimension_values(dimension) + if cvals is not None: + data[dim_name] = cvals + mapping['fill_color'] = {'field': dim_name, + 'transform': cmapper} return (data, mapping, style) @@ -499,7 +402,7 @@ def _init_glyph(self, plot, mapping, properties): """ Returns a Bokeh glyph object. """ - ret = super(SideHistogramPlot, self)._init_glyph(plot, mapping, properties) + ret = super()._init_glyph(plot, mapping, properties) if not 'field' in mapping.get('fill_color', {}): return ret dim = mapping['fill_color']['field'] @@ -680,15 +583,9 @@ class SpikesPlot(SpikesMixin, ColorbarPlot): show_legend = param.Boolean(default=True, doc=""" Whether to show legend for the plot.""") - # Deprecated parameters - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - selection_display = BokehOverlaySelectionDisplay() - style_opts = base_properties + line_properties + ['cmap', 'palette'] + style_opts = base_properties + line_properties + ['cmap', 'palette'] _nonvectorized_styles = base_properties + ['cmap'] _plot_methods = dict(single='segment') @@ -696,7 +593,7 @@ class SpikesPlot(SpikesMixin, ColorbarPlot): def _get_axis_dims(self, element): if 'spike_length' in self.lookup_options(element, 'plot').options: return [element.dimensions()[0], None, None] - return super(SpikesPlot, self)._get_axis_dims(element) + return super()._get_axis_dims(element) def get_data(self, element, ranges, style): dims = element.dimensions() @@ -720,9 +617,6 @@ def get_data(self, element, ranges, style): else: mapping = {'x0': 'x', 'x1': 'x', 'y0': 'y0', 'y1': 'y1'} - cdata, cmapping = self._get_color_data(element, ranges, dict(style)) - data.update(cdata) - mapping.update(cmapping) self._get_hover_data(data, element) return data, mapping, style @@ -772,12 +666,6 @@ class BarPlot(BarsMixin, ColorbarPlot, LegendPlot): stacked = param.Boolean(default=False, doc=""" Whether the bars should be stacked or grouped.""") - # Deprecated parameters - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - selection_display = BokehOverlaySelectionDisplay() style_opts = (base_properties + fill_properties + line_properties + @@ -792,7 +680,7 @@ class BarPlot(BarsMixin, ColorbarPlot, LegendPlot): def _axis_properties(self, axis, key, plot, dimension=None, ax_mapping={'x': 0, 'y': 1}): - props = super(BarPlot, self)._axis_properties(axis, key, plot, dimension, ax_mapping) + props = super()._axis_properties(axis, key, plot, dimension, ax_mapping) if (not self.multi_level and not self.stacked and self.current_frame.ndims > 1 and ((not self.invert_axes and axis == 'x') or (self.invert_axes and axis =='y'))): props['separator_line_width'] = 0 @@ -817,6 +705,33 @@ def _get_axis_dims(self, element): xdims = element.kdims[0] return (xdims, element.vdims[0]) + def _add_color_data(self, ds, ranges, style, cdim, data, mapping, factors, colors): + cdata, cmapping = self._get_color_data( + ds, ranges, dict(style), cdim, factors=factors, colors=colors + ) + if 'color' not in cmapping: + return + + # Enable legend if colormapper is categorical + cmapper = cmapping['color']['transform'] + legend_prop = 'legend_field' if bokeh_version >= '1.3.5' else 'legend' + if ('color' in cmapping and self.show_legend and + isinstance(cmapper, CategoricalColorMapper)): + mapping[legend_prop] = cdim.name + + if not self.stacked and ds.ndims > 1 and self.multi_level: + cmapping.pop(legend_prop, None) + mapping.pop(legend_prop, None) + + # Merge data and mappings + mapping.update(cmapping) + for k, cd in cdata.items(): + if isinstance(cmapper, CategoricalColorMapper) and cd.dtype.kind in 'uif': + cd = categorize_array(cd, cdim) + if k not in data or len(data[k]) != [len(data[key]) for key in data if key != k][0]: + data[k].append(cd) + else: + data[k][-1] = cd def _get_factors(self, element, ranges): xvals, gvals = self._get_coords(element, ranges) @@ -824,7 +739,6 @@ def _get_factors(self, element, ranges): xvals = [(x, g) for x in xvals for g in gvals] return ([], xvals) if self.invert_axes else (xvals, []) - def get_stack(self, xvals, yvals, baselines, sign='positive'): """ Iterates over a x- and y-values in a stack layer @@ -847,39 +761,9 @@ def get_stack(self, xvals, yvals, baselines, sign='positive'): tops.append(top) return bottoms, tops - def _glyph_properties(self, *args, **kwargs): props = super(BarPlot, self)._glyph_properties(*args, **kwargs) - return {k: v for k, v in props.items() if k not in ['width', 'bar_width']} - - - def _add_color_data(self, ds, ranges, style, cdim, data, mapping, factors, colors): - cdata, cmapping = self._get_color_data(ds, ranges, dict(style), - factors=factors, colors=colors) - if 'color' not in cmapping: - return - - # Enable legend if colormapper is categorical - cmapper = cmapping['color']['transform'] - legend_prop = 'legend_field' if bokeh_version >= '1.3.5' else 'legend' - if ('color' in cmapping and self.show_legend and - isinstance(cmapper, CategoricalColorMapper)): - mapping[legend_prop] = cdim.name - - if not self.stacked and ds.ndims > 1 and self.multi_level: - cmapping.pop(legend_prop, None) - mapping.pop(legend_prop, None) - - # Merge data and mappings - mapping.update(cmapping) - for k, cd in cdata.items(): - if isinstance(cmapper, CategoricalColorMapper) and cd.dtype.kind in 'uif': - cd = categorize_array(cd, cdim) - if k not in data or len(data[k]) != [len(data[key]) for key in data if key != k][0]: - data[k].append(cd) - else: - data[k][-1] = cd - + return {k: v for k, v in props.items() if k not in ['bar_width']} def get_data(self, element, ranges, style): # Get x, y, group, stack and color dimensions @@ -902,17 +786,12 @@ def get_data(self, element, ranges, style): xdim = element.get_dimension(0) ydim = element.vdims[0] - no_cidx = self.color_index is None - color_index = (group_dim or stack_dim) if no_cidx else self.color_index - color_dim = element.get_dimension(color_index) - if color_dim: - self.color_index = color_dim.name + cdim = element.get_dimension(group_dim or stack_dim) + style_mapping = [v for k, v in style.items() if 'color' in k and + (isinstance(v, dim) or v in element)] # Define style information - width = style.get('bar_width', style.get('width', 1)) - if 'width' in style: - self.param.warning("BarPlot width option is deprecated " - "use 'bar_width' instead.") + width = style.get('bar_width', 1) cmap = style.get('cmap') hover = 'hover' in self.handles @@ -940,19 +819,10 @@ def get_data(self, element, ranges, style): mapping = {'x': xdim.name, 'top': ydim.name, 'bottom': bottom, 'width': width} # Get colors - cdim = color_dim or group_dim - style_mapping = [v for k, v in style.items() if 'color' in k and - (isinstance(v, dim) or v in element)] - if style_mapping and not no_cidx and self.color_index is not None: - self.param.warning("Cannot declare style mapping for '%s' option " - "and declare a color_index; ignoring the color_index." - % style_mapping[0]) - cdim = None - cvals = element.dimension_values(cdim, expanded=False) if cdim else None if cvals is not None: - if cvals.dtype.kind in 'uif' and no_cidx: - cvals = categorize_array(cvals, color_dim) + if cvals.dtype.kind in 'uif': + cvals = categorize_array(cvals, cdim) factors = None if cvals.dtype.kind in 'uif' else list(cvals) if cdim is xdim and factors: @@ -972,7 +842,7 @@ def get_data(self, element, ranges, style): for i, (k, ds) in enumerate(grouped.items()): k = k[0] if isinstance(k, tuple) else k if group_dim: - gval = k if isinstance(k, basestring) else group_dim.pprint_value(k) + gval = k if isinstance(k, str) else group_dim.pprint_value(k) # Apply stacking or grouping if grouping == 'stacked': for sign, slc in [('negative', (None, 0)), ('positive', (0, None))]: @@ -987,7 +857,10 @@ def get_data(self, element, ranges, style): data['top'].append(ts) data[xdim.name].append(xs) data[stack_dim.name].append(slc_ds.dimension_values(stack_dim)) - if hover: data[ydim.name].append(ys) + if hover: + data[ydim.name].append(ys) + for vd in slc_ds.vdims[1:]: + data[vd.name].append(slc_ds.dimension_values(vd)) if not style_mapping: self._add_color_data(slc_ds, ranges, style, cdim, data, mapping, factors, colors) @@ -1000,13 +873,13 @@ def get_data(self, element, ranges, style): data[ydim.name].append(ys) if hover: data[xdim.name].append(xs) if group_dim not in ds.dimensions(): - ds = ds.add_dimension(group_dim.name, ds.ndims, gval) + ds = ds.add_dimension(group_dim, ds.ndims, gval) data[group_dim.name].append(ds.dimension_values(group_dim)) else: data[xdim.name].append(ds.dimension_values(xdim)) data[ydim.name].append(ds.dimension_values(ydim)) - if hover: + if hover and grouping != 'stacked': for vd in ds.vdims[1:]: data[vd.name].append(ds.dimension_values(vd)) @@ -1024,7 +897,7 @@ def get_data(self, element, ranges, style): for name, val in mapping.items(): sanitized = None - if isinstance(val, basestring): + if isinstance(val, str): sanitized = dimension_sanitizer(mapping[name]) mapping[name] = sanitized elif isinstance(val, dict) and 'field' in val: diff --git a/holoviews/plotting/bokeh/element.py b/holoviews/plotting/bokeh/element.py index 61d02c598d..41475fe6cb 100644 --- a/holoviews/plotting/bokeh/element.py +++ b/holoviews/plotting/bokeh/element.py @@ -1,8 +1,6 @@ -from __future__ import absolute_import, division, unicode_literals - -import sys import warnings -from types import FunctionType + +yfrom types import FunctionType import param import numpy as np @@ -31,12 +29,13 @@ from ...core import DynamicMap, CompositeOverlay, Element, Dimension, Dataset from ...core.options import abbreviated_exception, SkipRendering from ...core import util -from ...element import Annotation, Graph, VectorField, Path, Contours, Tiles -from ...streams import Stream, Buffer, RangeXY, PlotSize +from ...element import ( + Annotation, Contours, Graph, Path, Tiles, VectorField +) +from ...streams import Buffer, RangeXY, PlotSize from ...util.transform import dim from ..plot import GenericElementPlot, GenericOverlayPlot from ..util import process_cmap, color_intervals, dim_range_key -from .callbacks import PlotSizeCallback from .plot import BokehPlot from .styles import ( base_properties, legend_dimensions, line_properties, mpl_to_bokeh, @@ -47,9 +46,20 @@ TOOL_TYPES, bokeh_version, date_to_integer, decode_bytes, get_tab_title, glyph_order, py2js_tickformatter, recursive_model_update, theme_attr_json, cds_column_replace, hold_policy, match_dim_specs, - compute_layout_properties, wrap_formatter, match_ax_type, remove_legend + compute_layout_properties, wrap_formatter, match_ax_type, + prop_is_none, remove_legend ) +try: + from bokeh.models import EqHistColorMapper +except ImportError: + EqHistColorMapper = None + +try: + from bokeh.models import BinnedTicker +except ImportError: + BinnedTicker = None + if bokeh_version >= '2.0.1': try: TOOLS_MAP = Tool._known_aliases @@ -178,11 +188,11 @@ class ElementPlot(BokehPlot, GenericElementPlot): 'left', 'right', None.""") xformatter = param.ClassSelector( - default=None, class_=(util.basestring, TickFormatter, FunctionType), doc=""" + default=None, class_=(str, TickFormatter, FunctionType), doc=""" Formatter for ticks along the x-axis.""") yformatter = param.ClassSelector( - default=None, class_=(util.basestring, TickFormatter, FunctionType), doc=""" + default=None, class_=(str, TickFormatter, FunctionType), doc=""" Formatter for ticks along the x-axis.""") _categorical = False @@ -200,10 +210,10 @@ class ElementPlot(BokehPlot, GenericElementPlot): def __init__(self, element, plot=None, **params): self.current_ranges = None - super(ElementPlot, self).__init__(element, **params) + super().__init__(element, **params) self.handles = {} if plot is None else self.handles['plot'] self.static = len(self.hmap) == 1 and len(self.keys) == len(self.hmap) - self.callbacks = self._construct_callbacks() + self.callbacks, self.source_streams = self._construct_callbacks() self.static_source = False self.streaming = [s for s in self.streams if isinstance(s, Buffer)] self.geographic = bool(self.hmap.last.traverse(lambda x: x, Tiles)) @@ -216,7 +226,6 @@ def __init__(self, element, plot=None, **params): # Flag to check whether plot has been updated self._updated = False - def _hover_opts(self, element): if self.batched: dims = list(self.hmap.last.kdims) @@ -225,7 +234,6 @@ def _hover_opts(self, element): dims += element.dimensions() return list(util.unique_iterator(dims)), {} - def _init_tools(self, element, callbacks=[]): """ Processes the list of tools to be supplied to the plot. @@ -239,7 +247,7 @@ def _init_tools(self, element, callbacks=[]): cb_tools, tool_names = [], [] hover = False for cb in callbacks: - for handle in cb.models+cb.extra_models: + for handle in cb.models: if handle and handle in TOOLS_MAP: tool_names.append(handle) if handle == 'hover': @@ -323,7 +331,6 @@ def _get_hover_data(self, data, element, dimensions=None): if dim not in data: data[dim] = [v for _ in range(len(list(data.values())[0]))] - def _merge_ranges(self, plots, xspecs, yspecs, xtype, ytype): """ Given a list of other plots return axes that are shared @@ -346,7 +353,6 @@ def _merge_ranges(self, plots, xspecs, yspecs, xtype, ytype): plot_ranges['x_range'] = plot.y_range return plot_ranges - def _get_axis_dims(self, element): """Returns the dimensions corresponding to each axis. @@ -360,11 +366,12 @@ def _get_axis_dims(self, element): else: return dims + [None] - def _axes_props(self, plots, subplots, element, ranges): # Get the bottom layer and range element el = element.traverse(lambda x: x, [lambda el: isinstance(el, Element) and not isinstance(el, (Annotation, Tiles))]) el = el[0] if el else element + if isinstance(el, Graph): + el = el.nodes dims = self._get_axis_dims(el) xlabel, ylabel, zlabel = self._get_axis_labels(dims) @@ -395,12 +402,12 @@ def _axes_props(self, plots, subplots, element, ranges): if xdims is not None and any(xdim.name in ranges and 'factors' in ranges[xdim.name] for xdim in xdims): categorical_x = True else: - categorical_x = any(isinstance(x, (util.basestring, bytes)) for x in (l, r)) + categorical_x = any(isinstance(x, (str, bytes)) for x in (l, r)) if ydims is not None and any(ydim.name in ranges and 'factors' in ranges[ydim.name] for ydim in ydims): categorical_y = True else: - categorical_y = any(isinstance(y, (util.basestring, bytes)) for y in (b, t)) + categorical_y = any(isinstance(y, (str, bytes)) for y in (b, t)) range_types = (self._x_range_type, self._y_range_type) if self.invert_axes: range_types = range_types[::-1] @@ -411,10 +418,7 @@ def _axes_props(self, plots, subplots, element, ranges): x_axis_type = 'auto' categorical_x = True else: - if isinstance(el, Graph): - xtype = el.nodes.get_dimension_type(xdims[0]) - else: - xtype = el.get_dimension_type(xdims[0]) + xtype = el.get_dimension_type(xdims[0]) if ((xtype is np.object_ and issubclass(type(l), util.datetime_types)) or xtype in util.datetime_types): x_axis_type = 'datetime' @@ -534,20 +538,6 @@ def _plot_properties(self, key, element): if aspect_props['aspect_ratio'] is None: aspect_props['aspect_ratio'] = self.state.aspect_ratio - if self.dynamic and aspect_props['match_aspect']: - # Sync the plot size on dynamic plots to support accurate - # scaling of dimension ranges - plot_size = [s for s in self.streams if isinstance(s, PlotSize)] - callbacks = [c for c in self.callbacks if isinstance(c, PlotSizeCallback)] - if plot_size: - stream = plot_size[0] - elif callbacks: - stream = callbacks[0].streams[0] - else: - stream = PlotSize() - self.callbacks.append(PlotSizeCallback(self, [stream], None)) - stream.add_subscriber(self._update_size) - plot_props = { 'align': self.align, 'margin': self.margin, @@ -570,14 +560,11 @@ def _plot_properties(self, key, element): plot_props['lod_'+lod_prop] = v return plot_props - def _update_size(self, width, height, scale): - self.state.frame_width = width - self.state.frame_height = height def _set_active_tools(self, plot): "Activates the list of active tools" for tool in self.active_tools: - if isinstance(tool, util.basestring): + if isinstance(tool, str): tool_type = TOOL_TYPES[tool] matching = [t for t in plot.toolbar.tools if isinstance(t, tool_type)] @@ -607,14 +594,15 @@ def _title_properties(self, key, plot, element): # this will override theme if not set to the default 12pt title_font = self._fontsize('title').get('fontsize') if title_font != '12pt': - opts['text_font_size'] = value(title_font) + title_font = title_font if bokeh_version > '2.2.3' else value(title_font) + opts['text_font_size'] = title_font return opts def _init_axes(self, plot): if self.xaxis is None: plot.xaxis.visible = False - elif isinstance(self.xaxis, util.basestring) and 'top' in self.xaxis: + elif isinstance(self.xaxis, str) and 'top' in self.xaxis: plot.above = plot.below plot.below = [] plot.xaxis[:] = plot.above @@ -623,7 +611,7 @@ def _init_axes(self, plot): if self.yaxis is None: plot.yaxis.visible = False - elif isinstance(self.yaxis, util.basestring) and'right' in self.yaxis: + elif isinstance(self.yaxis, str) and'right' in self.yaxis: plot.right = plot.left plot.left = [] plot.yaxis[:] = plot.right @@ -642,8 +630,9 @@ def _axis_properties(self, axis, key, plot, dimension=None, if ((axis == 'x' and self.xaxis in ['bottom-bare', 'top-bare', 'bare']) or (axis == 'y' and self.yaxis in ['left-bare', 'right-bare', 'bare'])): - axis_props['axis_label_text_font_size'] = value('0pt') - axis_props['major_label_text_font_size'] = value('0pt') + zero_pt = '0pt' if bokeh_version > '2.2.3' else value('0pt') + axis_props['axis_label_text_font_size'] = zero_pt + axis_props['major_label_text_font_size'] = zero_pt axis_props['major_tick_line_color'] = None axis_props['minor_tick_line_color'] = None else: @@ -652,11 +641,14 @@ def _axis_properties(self, axis, key, plot, dimension=None, axis_props['axis_label_text_font_size'] = labelsize ticksize = self._fontsize('%sticks' % axis, common=False).get('fontsize') if ticksize: - axis_props['major_label_text_font_size'] = value(ticksize) + ticksize = ticksize if bokeh_version > '2.2.3' else value(ticksize) + axis_props['major_label_text_font_size'] = ticksize rotation = self.xrotation if axis == 'x' else self.yrotation if rotation: axis_props['major_label_orientation'] = np.radians(rotation) ticker = self.xticks if axis == 'x' else self.yticks + if isinstance(ticker, np.ndarray): + ticker = list(ticker) if isinstance(ticker, Ticker): axis_props['ticker'] = ticker elif isinstance(ticker, int): @@ -668,13 +660,15 @@ def _axis_properties(self, axis, key, plot, dimension=None, # because in JS the lookup fails otherwise ticks = [int(t) if isinstance(t, float) and t.is_integer() else t for t in ticks] - labels = [l if isinstance(l, util.basestring) else str(l) + labels = [l if isinstance(l, str) else str(l) for l in labels] - axis_props['ticker'] = FixedTicker(ticks=ticks) - axis_props['major_label_overrides'] = dict(zip(ticks, labels)) else: - axis_props['ticker'] = FixedTicker(ticks=ticker) - + ticks, labels = ticker, None + if ticks and util.isdatetime(ticks[0]): + ticks = [util.dt_to_int(tick, 'ms') for tick in ticks] + axis_props['ticker'] = FixedTicker(ticks=ticks) + if labels is not None: + axis_props['major_label_overrides'] = dict(zip(ticks, labels)) formatter = self.xformatter if axis == 'x' else self.yformatter if formatter: formatter = wrap_formatter(formatter, axis) @@ -706,6 +700,9 @@ def _axis_properties(self, axis, key, plot, dimension=None, box_zoom = self.state.select(type=tools.BoxZoomTool) if box_zoom: box_zoom[0].match_aspect = True + wheel_zoom = self.state.select(type=tools.WheelZoomTool) + if wheel_zoom: + wheel_zoom[0].zoom_on_axis = False elif isinstance(axis_obj, CategoricalAxis): for key in list(axis_props): if key.startswith('major_label'): @@ -812,6 +809,7 @@ def _update_ranges(self, element, ranges): xaxis, yaxis = self.handles['xaxis'], self.handles['yaxis'] categorical = isinstance(xaxis, CategoricalAxis) or isinstance(yaxis, CategoricalAxis) datetime = isinstance(xaxis, DatetimeAxis) or isinstance(yaxis, CategoricalAxis) + range_streams = [s for s in self.streams if isinstance(s, RangeXY)] if data_aspect and (categorical or datetime): ax_type = 'categorical' if categorical else 'datetime axes' @@ -834,7 +832,6 @@ def _update_ranges(self, element, ranges): else: frame_aspect = plot.frame_height/plot.frame_width - range_streams = [s for s in self.streams if isinstance(s, RangeXY)] if self.drawn: current_l, current_r = plot.x_range.start, plot.x_range.end current_b, current_t = plot.y_range.start, plot.y_range.end @@ -903,6 +900,8 @@ def _update_ranges(self, element, ranges): box_zoom.match_aspect = True if scroll_zoom: scroll_zoom.zoom_on_axis = False + elif any(rs._triggering for rs in range_streams): + xupdate, yupdate = False, False if not self.drawn or xupdate: self._update_range(x_range, l, r, xfactors, self.invert_xaxis, @@ -1040,15 +1039,21 @@ def _init_glyph(self, plot, mapping, properties): if isinstance(plot_method, tuple): # Handle alternative plot method for flipped axes plot_method = plot_method[int(self.invert_axes)] + if 'legend_field' in properties and 'legend_label' in properties: + del properties['legend_label'] renderer = getattr(plot, plot_method)(**dict(properties, **mapping)) return renderer, renderer.glyph + def _element_transform(self, transform, element, ranges): + return transform.apply(element, ranges=ranges, flat=True) + + def _apply_transforms(self, element, data, ranges, style, group=None): new_style = dict(style) prefix = group+'_' if group else '' for k, v in dict(style).items(): - if isinstance(v, util.basestring): + if isinstance(v, str): if validate(k, v) == True: continue elif v in element or (isinstance(element, Graph) and v in element.nodes): @@ -1069,11 +1074,8 @@ def _apply_transforms(self, element, data, ranges, style, group=None): ds = Dataset({d.name: v for d, v in self.overlay_dims.items()}, list(self.overlay_dims)) val = v.apply(ds, ranges=ranges, flat=True)[0] - elif isinstance(element, Path) and not isinstance(element, Contours): - val = np.concatenate([v.apply(el, ranges=ranges, flat=True) - for el in element.split()]) else: - val = v.apply(element, ranges=ranges, flat=True) + val = self._element_transform(v, element, ranges) if (not util.isscalar(val) and len(util.unique_array(val)) == 1 and ((not 'color' in k or validate('color', val)) or k in self._nonvectorized_styles)): @@ -1200,12 +1202,13 @@ def _glyph_properties(self, plot, element, source, ranges, style, group=None): def _filter_properties(self, properties, glyph_type, allowed): glyph_props = dict(properties) + for gtype in ((glyph_type, '') if glyph_type else ('',)): for prop in ('color', 'alpha'): glyph_prop = properties.get(gtype+prop) - if glyph_prop and ('line_'+prop not in glyph_props or gtype): + if glyph_prop is not None and ('line_'+prop not in glyph_props or gtype): glyph_props['line_'+prop] = glyph_prop - if glyph_prop and ('fill_'+prop not in glyph_props or gtype): + if glyph_prop is not None and ('fill_'+prop not in glyph_props or gtype): glyph_props['fill_'+prop] = glyph_prop props = {k[len(gtype):]: v for k, v in glyph_props.items() @@ -1231,8 +1234,15 @@ def _update_glyph(self, renderer, properties, mapping, glyph, source, data): for glyph_type in ('', 'selection_', 'nonselection_', 'hover_', 'muted_'): if renderer: glyph = getattr(renderer, glyph_type+'glyph', None) + if glyph == 'auto': + base_glyph = renderer.glyph + props = base_glyph.properties_with_values() + glyph = type(base_glyph)(**{k: v for k, v in props.items() + if not prop_is_none(v)}) + setattr(renderer, glyph_type+'glyph', glyph) if not glyph or (not renderer and glyph_type): continue + filtered = self._filter_properties(merged, glyph_type, allowed_properties) # Ensure that data is populated before updating glyph @@ -1295,7 +1305,7 @@ def _postprocess_hover(self, renderer, source): hover = self.handles.get('hover') if hover is None: return - if not isinstance(hover.tooltips, util.basestring) and 'hv_created' in hover.tags: + if not isinstance(hover.tooltips, str) and 'hv_created' in hover.tags: for k, values in source.data.items(): key = '@{%s}' % k if key in hover.formatters: @@ -1391,10 +1401,6 @@ def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): self.drawn = True - trigger = self._trigger - self._trigger = [] - Stream.trigger(trigger) - return plot @@ -1441,7 +1447,10 @@ def _reset_ranges(self): """ Resets RangeXY streams if norm option is set to framewise """ - if self.overlaid: + # Skipping conditional to temporarily revert fix (see https://github.com/holoviz/holoviews/issues/4396) + # This fix caused PlotSize change events to rerender + # rasterized/datashaded with the full extents which was wrong + if self.overlaid or True: return for el, callbacks in self.traverse(lambda x: (x.current_frame, x.callbacks)): if el is None: @@ -1460,6 +1469,7 @@ def update_frame(self, key, ranges=None, plot=None, element=None): """ self._reset_ranges() reused = isinstance(self.hmap, DynamicMap) and (self.overlaid or self.batched) + self.prev_frame = self.current_frame if not reused and element is None: element = self._get_frame(key) elif element is not None: @@ -1687,23 +1697,34 @@ class ColorbarPlot(ElementPlot): 'opts': {'location': 'bottom_right', 'orientation': 'horizontal'}}} - color_levels = param.ClassSelector(default=None, class_=( - (int, list) + ((range,) if sys.version_info.major > 2 else ())), doc=""" + color_levels = param.ClassSelector(default=None, class_=(int, list, range), doc=""" Number of discrete colors to use when colormapping or a set of color intervals defining the range of values to map each color to.""") + cformatter = param.ClassSelector( + default=None, class_=(str, TickFormatter, FunctionType), doc=""" + Formatter for ticks along the colorbar axis.""") + clabel = param.String(default=None, doc=""" - An explicit override of the color bar label, if set takes precedence + An explicit override of the color bar label. If set, takes precedence over the title key in colorbar_opts.""") clim = param.Tuple(default=(np.nan, np.nan), length=2, doc=""" - User-specified colorbar axis range limits for the plot, as a tuple (low,high). - If specified, takes precedence over data and dimension ranges.""") + User-specified colorbar axis range limits for the plot, as a tuple (low,high). + If specified, takes precedence over data and dimension ranges.""") + + clim_percentile = param.ClassSelector(default=False, class_=(int, float, bool), doc=""" + Percentile value to compute colorscale robust to outliers. If + True, uses 2nd and 98th percentile; otherwise uses the specified + numerical percentile value.""") cformatter = param.ClassSelector( - default=None, class_=(util.basestring, TickFormatter, FunctionType), doc=""" + default=None, class_=(str, TickFormatter, FunctionType), doc=""" Formatter for ticks along the colorbar axis.""") + cnorm = param.ObjectSelector(default='linear', objects=['linear', 'log', 'eq_hist'], doc=""" + Color normalization to be applied during colormapping.""") + colorbar = param.Boolean(default=False, doc=""" Whether to display a colorbar.""") @@ -1727,7 +1748,7 @@ class ColorbarPlot(ElementPlot): #FFFFFFFF or a length 3 or length 4 tuple specifying values in the range 0-1 or a named HTML color.""") - logz = param.Boolean(default=False, doc=""" + logz = param.Boolean(default=False, doc=""" Whether to apply log scaling to the z-axis.""") symmetric = param.Boolean(default=False, doc=""" @@ -1743,7 +1764,9 @@ class ColorbarPlot(ElementPlot): def _draw_colorbar(self, plot, color_mapper, prefix=''): if CategoricalColorMapper and isinstance(color_mapper, CategoricalColorMapper): return - if LogColorMapper and isinstance(color_mapper, LogColorMapper) and color_mapper.low > 0: + if EqHistColorMapper and isinstance(color_mapper, EqHistColorMapper) and BinnedTicker: + ticker = BinnedTicker(mapper=color_mapper) + elif isinstance(color_mapper, LogColorMapper) and color_mapper.low > 0: ticker = LogTicker() else: ticker = BasicTicker() @@ -1800,7 +1823,11 @@ def _get_colormapper(self, eldim, element, ranges, style, factors=None, colors=N if cdim == eldim] if cmappers: cmapper, mappers = cmappers[0] - cmapper = cmapper if cmapper else mappers[0] + if not cmapper: + if mappers and mappers[0]: + cmapper = mappers[0] + else: + return None self.handles['color_mapper'] = cmapper return cmapper else: @@ -1812,7 +1839,10 @@ def _get_colormapper(self, eldim, element, ranges, style, factors=None, colors=N if all(util.isfinite(cl) for cl in self.clim): low, high = self.clim elif dim_name in ranges: - low, high = ranges[dim_name]['combined'] + if self.clim_percentile and 'robust' in ranges[dim_name]: + low, high = ranges[dim_name]['robust'] + else: + low, high = ranges[dim_name]['combined'] dlow, dhigh = ranges[dim_name]['data'] if (util.is_int(low, int_like=True) and util.is_int(high, int_like=True) and @@ -1876,12 +1906,12 @@ def _get_colormapper(self, eldim, element, ranges, style, factors=None, colors=N return cmapper - def _get_color_data(self, element, ranges, style, name='color', factors=None, colors=None, + def _get_color_data(self, element, ranges, style, cdim, name='color', factors=None, colors=None, int_categories=False): data, mapping = {}, {} - cdim = element.get_dimension(self.color_index) + cdim = element.get_dimension(cdim) color = style.get(name, None) - if cdim and ((isinstance(color, util.basestring) and color in element) or isinstance(color, dim)): + if cdim and ((isinstance(color, str) and color in element) or isinstance(color, dim)): self.param.warning( "Cannot declare style mapping for '%s' option and " "declare a color_index; ignoring the color_index." @@ -1923,8 +1953,9 @@ def _get_color_data(self, element, ranges, style, name='color', factors=None, co def _get_cmapper_opts(self, low, high, factors, colors): if factors is None: - colormapper = LinearColorMapper - if self.logz: + if self.cnorm == 'linear': + colormapper = LinearColorMapper + if self.cnorm == 'log' or self.logz: colormapper = LogColorMapper if util.is_int(low) and util.is_int(high) and low == 0: low = 1 @@ -1934,10 +1965,16 @@ def _get_cmapper_opts(self, low, high, factors, colors): elif util.is_number(low) and low <= 0: self.param.warning( "Log color mapper lower bound <= 0 and will not " - "render corrrectly. Ensure you set a positive " + "render correctly. Ensure you set a positive " "lower bound on the color dimension or using " "the `clim` option." ) + elif self.cnorm == 'eq_hist': + if EqHistColorMapper is None: + raise ImportError("Could not import bokeh.models.EqHistColorMapper. " + "Note that the option cnorm='eq_hist' requires " + "bokeh 2.2.3 or higher.") + colormapper = EqHistColorMapper if isinstance(low, (bool, np.bool_)): low = int(low) if isinstance(high, (bool, np.bool_)): high = int(high) # Pad zero-range to avoid breaking colorbar (as of bokeh 1.0.4) @@ -1965,7 +2002,7 @@ def _init_glyph(self, plot, mapping, properties): """ Returns a Bokeh glyph object and optionally creates a colorbar. """ - ret = super(ColorbarPlot, self)._init_glyph(plot, mapping, properties) + ret = super()._init_glyph(plot, mapping, properties) if self.colorbar: for k, v in list(self.handles.items()): if not k.endswith('color_mapper'): @@ -2090,7 +2127,7 @@ def _process_legend(self, overlay): (isinstance(p, OverlayPlot) or isinstance(p, AnnotationPlot))] if (not self.show_legend or len(plot.legend) == 0 or (len(non_annotation) <= 1 and not (self.dynamic or legend_plots))): - return super(OverlayPlot, self)._process_legend() + return super()._process_legend() elif not plot.legend: return @@ -2182,7 +2219,7 @@ def _process_legend(self, overlay): leg.update(**self.legend_opts) for item in leg.items: for r in item.renderers: - r.muted = self.legend_muted + r.muted = self.legend_muted or r.muted def _init_tools(self, element, callbacks=[]): @@ -2196,7 +2233,7 @@ def _init_tools(self, element, callbacks=[]): if el is not None: el_tools = subplot._init_tools(el, self.callbacks) for tool in el_tools: - if isinstance(tool, util.basestring): + if isinstance(tool, str): tool_type = TOOL_TYPES.get(tool) else: tool_type = type(tool) @@ -2223,7 +2260,7 @@ def _merge_tools(self, subplot): self.handles['hover'] = subplot.handles['hover'] elif 'hover' in subplot.handles and 'hover_tools' in self.handles: hover = subplot.handles['hover'] - if hover.tooltips and not isinstance(hover.tooltips, util.basestring): + if hover.tooltips and not isinstance(hover.tooltips, str): tooltips = tuple((name, spec.replace('{%F %T}', '')) for name, spec in hover.tooltips) else: tooltips = () @@ -2231,12 +2268,11 @@ def _merge_tools(self, subplot): if tool: tool_renderers = [] if tool.renderers == 'auto' else tool.renderers hover_renderers = [] if hover.renderers == 'auto' else hover.renderers - renderers = tool_renderers + hover_renderers + renderers = [r for r in tool_renderers + hover_renderers if r is not None] tool.renderers = list(util.unique_iterator(renderers)) if 'hover' not in self.handles: self.handles['hover'] = tool - def _get_factors(self, overlay, ranges): xfactors, yfactors = [], [] for k, sp in self.subplots.items(): @@ -2252,13 +2288,11 @@ def _get_factors(self, overlay, ranges): yfactors = np.concatenate(yfactors) return util.unique_array(xfactors), util.unique_array(yfactors) - def _get_axis_dims(self, element): subplots = list(self.subplots.values()) if subplots: return subplots[0]._get_axis_dims(element) - return super(OverlayPlot, self)._get_axis_dims(element) - + return super()._get_axis_dims(element) def initialize_plot(self, ranges=None, plot=None, plots=None): key = util.wrap_tuple(self.hmap.last_key) @@ -2311,13 +2345,13 @@ def initialize_plot(self, ranges=None, plot=None, plots=None): self.drawn = True self.handles['plots'] = plots - self._update_callbacks(self.handles['plot']) if 'plot' in self.handles and not self.tabs: plot = self.handles['plot'] self.handles['xaxis'] = plot.xaxis[0] self.handles['yaxis'] = plot.yaxis[0] self.handles['x_range'] = plot.x_range self.handles['y_range'] = plot.y_range + for cb in self.callbacks: cb.initialize() @@ -2337,6 +2371,7 @@ def update_frame(self, key, ranges=None, element=None): """ self._reset_ranges() reused = isinstance(self.hmap, DynamicMap) and self.overlaid + self.prev_frame = self.current_frame if not reused and element is None: element = self._get_frame(key) elif element is not None: diff --git a/holoviews/plotting/bokeh/geometry.py b/holoviews/plotting/bokeh/geometry.py index 690ca027fd..c5fa923010 100644 --- a/holoviews/plotting/bokeh/geometry.py +++ b/holoviews/plotting/bokeh/geometry.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np import param diff --git a/holoviews/plotting/bokeh/graphs.py b/holoviews/plotting/bokeh/graphs.py index e893e6abbd..d30127ea69 100644 --- a/holoviews/plotting/bokeh/graphs.py +++ b/holoviews/plotting/bokeh/graphs.py @@ -1,23 +1,22 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict import param import numpy as np -from bokeh.models import (StaticLayoutProvider, NodesAndLinkedEdges, - EdgesAndLinkedNodes, Patches, Bezier, ColumnDataSource) +from bokeh.models import ( + StaticLayoutProvider, NodesAndLinkedEdges, EdgesAndLinkedNodes, + Patches, Bezier, ColumnDataSource, NodesOnly +) from ...core.data import Dataset -from ...core.options import Cycle, abbreviated_exception -from ...core.util import basestring, dimension_sanitizer, unique_array +from ...core.options import abbreviated_exception +from ...core.util import dimension_sanitizer from ...util.transform import dim from ..mixins import ChordMixin -from ..util import process_cmap, get_directed_graph_paths +from ..util import get_directed_graph_paths from .chart import ColorbarPlot, PointPlot from .element import CompositeElementPlot, LegendPlot from .styles import ( - base_properties, line_properties, fill_properties, text_properties, - rgba_tuple + base_properties, line_properties, fill_properties, text_properties ) @@ -42,16 +41,6 @@ class GraphPlot(CompositeElementPlot, ColorbarPlot, LegendPlot): tools = param.List(default=['hover', 'tap'], doc=""" A list of plugin tools to use on the plot.""") - # Deprecated options - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `node_color=dim('color')`""") - - edge_color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `edge_color=dim('color')`""") - # Map each glyph to a style group _style_groups = {'scatter': 'node', 'multi_line': 'edge', 'patches': 'edge', 'bezier': 'edge'} @@ -93,63 +82,13 @@ def _hover_opts(self, element): dims = [] return dims, {} - def get_extents(self, element, ranges, range_type='combined'): - return super(GraphPlot, self).get_extents(element.nodes, ranges, range_type) - + return super().get_extents(element.nodes, ranges, range_type) def _get_axis_dims(self, element): - return element.nodes.dimensions()[:2] - - - def _get_edge_colors(self, element, ranges, edge_data, edge_mapping, style): - cdim = element.get_dimension(self.edge_color_index) - if not cdim: - return - elstyle = self.lookup_options(element, 'style') - cycle = elstyle.kwargs.get('edge_color') - if not isinstance(cycle, Cycle): - cycle = None - - idx = element.get_dimension_index(cdim) - field = dimension_sanitizer(cdim.name) - cvals = element.dimension_values(cdim) - if idx in self._node_columns: - factors = element.nodes.dimension_values(2, expanded=False) - elif idx == 2 and cvals.dtype.kind in 'uif': - factors = None - else: - factors = unique_array(cvals) - - default_cmap = 'viridis' if factors is None else 'tab20' - cmap = style.get('edge_cmap', style.get('cmap', default_cmap)) - nan_colors = {k: rgba_tuple(v) for k, v in self.clipping_colors.items()} - if factors is None or (factors.dtype.kind in 'uif' and idx not in self._node_columns): - colors, factors = None, None - else: - if factors.dtype.kind == 'f': - cvals = cvals.astype(np.int32) - factors = factors.astype(np.int32) - if factors.dtype.kind not in 'SU': - field += '_str__' - cvals = [str(f) for f in cvals] - factors = (str(f) for f in factors) - factors = list(factors) - if isinstance(cmap, dict): - colors = [cmap.get(f, nan_colors.get('NaN', self._default_nan)) for f in factors] - else: - colors = process_cmap(cycle or cmap, len(factors)) - if field not in edge_data: - edge_data[field] = cvals - edge_style = dict(style, cmap=cmap) - mapper = self._get_colormapper(cdim, element, ranges, edge_style, - factors, colors, 'edge', 'edge_colormapper') - transform = {'field': field, 'transform': mapper} - color_type = 'fill_color' if self.filled else 'line_color' - edge_mapping['edge_'+color_type] = transform - edge_mapping['edge_nonselection_'+color_type] = transform - edge_mapping['edge_selection_'+color_type] = transform - + if isinstance(element, Graph): + element = element.nodes + return element.dimensions()[:2] def _get_edge_paths(self, element, ranges): path_data, mapping = {}, {} @@ -197,23 +136,8 @@ def get_data(self, element, ranges, style): # Handle node colors fixed_color = style.pop('node_color', None) - cycle = self.lookup_options(element, 'style').kwargs.get('node_color') - if isinstance(cycle, Cycle) and 'cmap' not in style: - colors = cycle - else: - colors = None - cdata, cmapping = self._get_color_data( - element.nodes, ranges, style, name='node_fill_color', - colors=colors, int_categories=True - ) - if fixed_color is not None and not cdata: + if fixed_color is not None: style['node_color'] = fixed_color - point_data.update(cdata) - point_mapping = cmapping - if 'node_fill_color' in point_mapping: - style = {k: v for k, v in style.items() if k not in - ['node_fill_color', 'node_nonselection_fill_color']} - point_mapping['node_nonselection_fill_color'] = point_mapping['node_fill_color'] # Handle edge colors edge_mapping = {} @@ -225,7 +149,6 @@ def get_data(self, element, ranges, style): start = np.array([node_indices.get(x, nan_node) for x in start], dtype=np.int32) end = np.array([node_indices.get(y, nan_node) for y in end], dtype=np.int32) path_data = dict(start=start, end=end) - self._get_edge_colors(element, ranges, path_data, edge_mapping, style) if not static: pdata, pmapping = self._get_edge_paths(element, ranges) path_data.update(pdata) @@ -245,7 +168,7 @@ def get_data(self, element, ranges, style): dim_name += '_values' path_data[dim_name] = element.dimension_values(d) data = {'scatter_1': point_data, self.edge_glyph: path_data, 'layout': layout} - mapping = {'scatter_1': point_mapping, self.edge_glyph: edge_mapping} + mapping = {'scatter_1': {}, self.edge_glyph: edge_mapping} return data, mapping, style @@ -328,14 +251,14 @@ def _set_interaction_policies(self, renderer): elif self.selection_policy == 'edges': renderer.selection_policy = EdgesAndLinkedNodes() else: - renderer.selection_policy = None + renderer.selection_policy = NodesOnly() if self.inspection_policy == 'nodes': renderer.inspection_policy = NodesAndLinkedEdges() elif self.inspection_policy == 'edges': renderer.inspection_policy = EdgesAndLinkedNodes() else: - renderer.inspection_policy = None + renderer.inspection_policy = NodesOnly() def _init_glyphs(self, plot, element, ranges, source): # Get data and initialize data source @@ -376,7 +299,7 @@ def _init_glyphs(self, plot, element, ranges, source): class ChordPlot(ChordMixin, GraphPlot): - labels = param.ClassSelector(class_=(basestring, dim), doc=""" + labels = param.ClassSelector(class_=(str, dim), doc=""" The dimension or dimension value transform used to draw labels from.""") show_frame = param.Boolean(default=False, doc=""" @@ -384,7 +307,7 @@ class ChordPlot(ChordMixin, GraphPlot): # Deprecated options - label_index = param.ClassSelector(default=None, class_=(basestring, int), + label_index = param.ClassSelector(default=None, class_=(str, int), allow_None=True, doc=""" Index of the dimension from which the node labels will be drawn""") @@ -409,7 +332,7 @@ def _sync_arcs(self): arc_glyph.update(**styles) def _init_glyphs(self, plot, element, ranges, source): - super(ChordPlot, self)._init_glyphs(plot, element, ranges, source) + super()._init_glyphs(plot, element, ranges, source) # Ensure that arc glyph matches node style if 'multi_line_2_glyph' in self.handles: arc_renderer = self.handles['multi_line_2_glyph_renderer'] @@ -422,11 +345,11 @@ def _init_glyphs(self, plot, element, ranges, source): def _update_glyphs(self, element, ranges, style): if 'multi_line_2_glyph' in self.handles: self._sync_arcs() - super(ChordPlot, self)._update_glyphs(element, ranges, style) + super()._update_glyphs(element, ranges, style) def get_data(self, element, ranges, style): offset = style.pop('label_offset', 1.05) - data, mapping, style = super(ChordPlot, self).get_data(element, ranges, style) + data, mapping, style = super().get_data(element, ranges, style) angles = element._angles arcs = defaultdict(list) for i in range(len(element.nodes)): @@ -447,7 +370,7 @@ def get_data(self, element, ranges, style): "and declare a label_index; ignoring the label_index.") elif label_dim: labels = label_dim - elif isinstance(labels, basestring): + elif isinstance(labels, str): labels = element.nodes.get_dimension(labels) if labels is None: @@ -498,8 +421,6 @@ class TriMeshPlot(GraphPlot): def _process_vertices(self, element): style = self.style[self.cyclic_index] edge_color = style.get('edge_color') - if edge_color not in element.nodes: - edge_color = self.edge_color_index simplex_dim = element.get_dimension(edge_color) vertex_dim = element.nodes.get_dimension(edge_color) if vertex_dim and not simplex_dim: @@ -512,8 +433,8 @@ def _process_vertices(self, element): def _init_glyphs(self, plot, element, ranges, source): element = self._process_vertices(element) - super(TriMeshPlot, self)._init_glyphs(plot, element, ranges, source) + super()._init_glyphs(plot, element, ranges, source) def _update_glyphs(self, element, ranges, style): element = self._process_vertices(element) - super(TriMeshPlot, self)._update_glyphs(element, ranges, style) + super()._update_glyphs(element, ranges, style) diff --git a/holoviews/plotting/bokeh/heatmap.py b/holoviews/plotting/bokeh/heatmap.py index 51780abec2..8e9d8e6f64 100644 --- a/holoviews/plotting/bokeh/heatmap.py +++ b/holoviews/plotting/bokeh/heatmap.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np @@ -68,7 +66,10 @@ def is_radial(cls, heatmap): and not (opts.get('radial') == False)) or opts.get('radial', False)) def _get_factors(self, element, ranges): - return super(HeatMapPlot, self)._get_factors(element.gridded, ranges) + return super()._get_factors(element.gridded, ranges) + + def _element_transform(self, transform, element, ranges): + return transform.apply(element.gridded, ranges=ranges, flat=False).T.flatten() def get_data(self, element, ranges, style): x, y, z = [dimension_sanitizer(d) for d in element.dimensions(label=True)[:3]] @@ -154,13 +155,13 @@ def _draw_markers(self, plot, element, marks, axis='x'): 'HeatMap quads for distinguishable set a line_width') def _init_glyphs(self, plot, element, ranges, source): - super(HeatMapPlot, self)._init_glyphs(plot, element, ranges, source) + super()._init_glyphs(plot, element, ranges, source) self._draw_markers(plot, element, self.xmarks, axis='x') self._draw_markers(plot, element, self.ymarks, axis='y') def _update_glyphs(self, element, ranges, style): - super(HeatMapPlot, self)._update_glyphs(element, ranges, style) + super()._update_glyphs(element, ranges, style) plot = self.handles['plot'] self._draw_markers(plot, element, self.xmarks, axis='x') self._draw_markers(plot, element, self.ymarks, axis='y') @@ -242,11 +243,10 @@ class RadialHeatMapPlot(CompositeElementPlot, ColorbarPlot): ['ticks_' + p for p in text_properties] + ['cmap']) def __init__(self, *args, **kwargs): - super(RadialHeatMapPlot, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.xaxis = None self.yaxis = None - def _get_bins(self, kind, order, reverse=False): """ Map elements from given `order` array to bins of start and end values @@ -269,7 +269,6 @@ def _get_bins(self, kind, order, reverse=False): return dict(zip(order, bins)) - @staticmethod def _get_bounds(mapper, values): """ @@ -279,15 +278,13 @@ def _get_bounds(mapper, values): array = np.array([mapper.get(x) for x in values]) return array[:, 0], array[:, 1] - def _postprocess_hover(self, renderer, source): """ Limit hover tool to annular wedges only. """ if isinstance(renderer.glyph, AnnularWedge): - super(RadialHeatMapPlot, self)._postprocess_hover(renderer, source) - + super()._postprocess_hover(renderer, source) def get_extents(self, view, ranges, range_type='combined'): """Supply custom, static extents because radial heatmaps always have @@ -299,11 +296,9 @@ def get_extents(self, view, ranges, range_type='combined'): upper = 2 * self.max_radius + self.radius_outer return (lower, lower, upper, upper) - def _get_axis_dims(self, element): return (None, None) - def _axis_properties(self, *args, **kwargs): """Overwrite default axis properties handling due to clashing categorical input and numerical output axes. @@ -315,7 +310,6 @@ def _axis_properties(self, *args, **kwargs): return {} - def get_default_mapping(self, z, cmapper): """Create dictionary containing default ColumnDataSource glyph to data mappings. @@ -347,25 +341,21 @@ def get_default_mapping(self, z, cmapper): 'multi_line_1': map_xmarks, 'arc_1': map_ymarks} - def _pprint(self, element, dim_label, vals): - """Helper function to convert values to corresponding dimension type. - """ - + Helper function to convert values to corresponding dimension type. + """ if vals.dtype.kind not in 'SU': dim = element.gridded.get_dimension(dim_label) return [dim.pprint_value(v) for v in vals] return vals - def _compute_tick_mapping(self, kind, order, bins): - """Helper function to compute tick mappings based on `ticks` and + """ + Helper function to compute tick mappings based on `ticks` and default orders and bins. - """ - if kind == "angle": ticks = self.xticks reverse = True @@ -386,12 +376,10 @@ def _compute_tick_mapping(self, kind, order, bins): return {x: bins[x] for x in text_nth} - def _get_seg_labels_data(self, order_seg, bins_seg): - """Generate ColumnDataSource dictionary for segment labels. - """ - + Generate ColumnDataSource dictionary for segment labels. + """ if self.xticks is None: return dict(x=[], y=[], text=[], angle=[]) @@ -412,10 +400,9 @@ def _get_seg_labels_data(self, order_seg, bins_seg): angle=1.5 * np.pi + radiant) def _get_ann_labels_data(self, order_ann, bins_ann): - """Generate ColumnDataSource dictionary for annular labels. - """ - + Generate ColumnDataSource dictionary for annular labels. + """ if self.yticks is None: return dict(x=[], y=[], text=[], angle=[]) @@ -433,13 +420,11 @@ def _get_ann_labels_data(self, order_ann, bins_ann): text=labels, angle=[0]*len(labels)) - @staticmethod def _get_markers(marks, order, bins): - """Helper function to get marker positions depending on mark type. - """ - + Helper function to get marker positions depending on mark type. + """ if callable(marks): markers = [x for x in order if marks(x)] elif isinstance(marks, list): @@ -452,12 +437,10 @@ def _get_markers(marks, order, bins): return np.array([bins[x][1] for x in markers]) - def _get_xmarks_data(self, order_seg, bins_seg): - """Generate ColumnDataSource dictionary for segment separation lines. - """ - + Generate ColumnDataSource dictionary for segment separation lines. + """ if not self.xmarks: return dict(xs=[], ys=[]) @@ -477,12 +460,10 @@ def _get_xmarks_data(self, order_seg, bins_seg): return dict(xs=list(xs), ys=list(ys)) - def _get_ymarks_data(self, order_ann, bins_ann): - """Generate ColumnDataSource dictionary for segment separation lines. - """ - + Generate ColumnDataSource dictionary for segment separation lines. + """ if not self.ymarks: return dict(radius=[]) @@ -490,9 +471,7 @@ def _get_ymarks_data(self, order_ann, bins_ann): return dict(radius=radius) - def get_data(self, element, ranges, style): - # dimension labels dim_labels = element.dimensions(label=True)[:3] x, y, z = [dimension_sanitizer(d) for d in dim_labels] @@ -566,7 +545,7 @@ def get_data(self, element, ranges, style): return data, mapping, style def _init_glyph(self, plot, mapping, properties, key): - ret = super(RadialHeatMapPlot, self)._init_glyph(plot, mapping, properties, key) + ret = super()._init_glyph(plot, mapping, properties, key) if self.colorbar and 'color_mapper' in self.handles: self._draw_colorbar(plot, self.handles['color_mapper']) return ret diff --git a/holoviews/plotting/bokeh/hex_tiles.py b/holoviews/plotting/bokeh/hex_tiles.py index 5cd7e93391..9a3365ea89 100644 --- a/holoviews/plotting/bokeh/hex_tiles.py +++ b/holoviews/plotting/bokeh/hex_tiles.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals import types import param @@ -11,9 +10,8 @@ from ...core import Dimension, Operation from ...core.options import Compositor -from ...core.util import basestring, isfinite, max_range +from ...core.util import isfinite, max_range from ...element import HexTiles -from ...util.transform import dim as dim_transform from .element import ColorbarPlot from .selection import BokehOverlaySelectionDisplay from .styles import base_properties, line_properties, fill_properties @@ -124,28 +122,6 @@ class HexTilesPlot(ColorbarPlot): doc=""" The orientation of hexagon bins. By default the pointy side is on top.""") - # Deprecated options - - color_index = param.ClassSelector(default=2, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - - max_scale = param.Number(default=0.9, bounds=(0, None), doc=""" - When size_index is enabled this defines the maximum size of each - bin relative to uniform tile size, i.e. for a value of 1, the - largest bin will match the size of bins when scaling is disabled. - Setting value larger than 1 will result in overlapping bins.""") - - min_scale = param.Number(default=0, bounds=(0, None), doc=""" - When size_index is enabled this defines the minimum size of each - bin relative to uniform tile size, i.e. for a value of 1, the - smallest bin will match the size of bins when scaling is disabled. - Setting value larger than 1 will result in overlapping bins.""") - - size_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the sizes will the drawn.""") - selection_display = BokehOverlaySelectionDisplay() style_opts = base_properties + line_properties + fill_properties + ['cmap', 'scale'] @@ -165,7 +141,7 @@ def get_extents(self, element, ranges, range_type='combined'): if yd and ydim.name in ranges: ranges[ydim.name]['hard'] = yd.range ranges[ydim.name]['hard'] = max_range([yd.soft_range, ranges[ydim.name]['soft']]) - return super(HexTilesPlot, self).get_extents(element, ranges, range_type) + return super().get_extents(element, ranges, range_type) def _hover_opts(self, element): if self.aggregator is np.size: @@ -193,7 +169,7 @@ def get_data(self, element, ranges, style): scale = ysize/xsize data = {'q': q, 'r': r} - cdata, cmapping = self._get_color_data(element, ranges, style) + cdata, cmapping = self._get_color_data(element, ranges, style, element.vdims[0]) data.update(cdata) mapping.update(cmapping) if self.min_count is not None and self.min_count <= 0: @@ -205,26 +181,5 @@ def get_data(self, element, ranges, style): style['orientation'] = self.orientation+'top' style['size'] = size style['aspect_scale'] = scale - scale_dim = element.get_dimension(self.size_index) - scale = style.get('scale') - if (scale_dim and ((isinstance(scale, basestring) and scale in element) or - isinstance(scale, dim_transform))): - self.param.warning("Cannot declare style mapping for 'scale' option " - "and declare a size_index; ignoring the size_index.") - scale_dim = None - if scale_dim is not None: - sizes = element.dimension_values(scale_dim) - if self.aggregator is np.size: - ptp = sizes.max() - baseline = 0 - else: - ptp = sizes.ptp() - baseline = sizes.min() - if self.min_scale > self.max_scale: - raise ValueError('min_scale parameter must be smaller ' - 'than max_scale parameter.') - scale = self.max_scale - self.min_scale - mapping['scale'] = 'scale' - data['scale'] = (((sizes - baseline) / ptp) * scale) + self.min_scale return data, mapping, style diff --git a/holoviews/plotting/bokeh/links.py b/holoviews/plotting/bokeh/links.py new file mode 100644 index 0000000000..64392e13a2 --- /dev/null +++ b/holoviews/plotting/bokeh/links.py @@ -0,0 +1,402 @@ +import numpy as np + +from bokeh.models import CustomJS, ToolbarBox + +from ...core.util import isscalar +from ..links import ( + Link, RectanglesTableLink, DataLink, RangeToolLink, + SelectionLink, VertexTableLink +) +from ..plot import GenericElementPlot, GenericOverlayPlot + + +class LinkCallback(object): + + source_model = None + target_model = None + source_handles = [] + target_handles = [] + + on_source_events = [] + on_source_changes = [] + + on_target_events = [] + on_target_changes = [] + + source_code = None + target_code = None + + def __init__(self, root_model, link, source_plot, target_plot=None): + self.root_model = root_model + self.link = link + self.source_plot = source_plot + self.target_plot = target_plot + self.validate() + + references = {k: v for k, v in link.param.get_param_values() + if k not in ('source', 'target', 'name')} + + for sh in self.source_handles+[self.source_model]: + key = '_'.join(['source', sh]) + references[key] = source_plot.handles[sh] + + for p, value in link.param.get_param_values(): + if p in ('name', 'source', 'target'): + continue + references[p] = value + + if target_plot is not None: + for sh in self.target_handles+[self.target_model]: + key = '_'.join(['target', sh]) + references[key] = target_plot.handles[sh] + + if self.source_model in source_plot.handles: + src_model = source_plot.handles[self.source_model] + src_cb = CustomJS(args=references, code=self.source_code) + for ch in self.on_source_changes: + src_model.js_on_change(ch, src_cb) + for ev in self.on_source_events: + src_model.js_on_event(ev, src_cb) + self.src_cb = src_cb + else: + self.src_cb = None + + if target_plot is not None and self.target_model in target_plot.handles and self.target_code: + tgt_model = target_plot.handles[self.target_model] + tgt_cb = CustomJS(args=references, code=self.target_code) + for ch in self.on_target_changes: + tgt_model.js_on_change(ch, tgt_cb) + for ev in self.on_target_events: + tgt_model.js_on_event(ev, tgt_cb) + self.tgt_cb = tgt_cb + else: + self.tgt_cb = None + + @classmethod + def find_links(cls, root_plot): + """ + Traverses the supplied plot and searches for any Links on + the plotted objects. + """ + plot_fn = lambda x: isinstance(x, GenericElementPlot) and not isinstance(x, GenericOverlayPlot) + plots = root_plot.traverse(lambda x: x, [plot_fn]) + potentials = [cls.find_link(plot) for plot in plots] + source_links = [p for p in potentials if p is not None] + found = [] + for plot, links in source_links: + for link in links: + if not link._requires_target: + # If link has no target don't look further + found.append((link, plot, None)) + continue + potentials = [cls.find_link(p, link) for p in plots] + tgt_links = [p for p in potentials if p is not None] + if tgt_links: + found.append((link, plot, tgt_links[0][0])) + return found + + @classmethod + def find_link(cls, plot, link=None): + """ + Searches a GenericElementPlot for a Link. + """ + registry = Link.registry.items() + for source in plot.link_sources: + if link is None: + links = [ + l for src, links in registry for l in links + if src is source or (src._plot_id is not None and + src._plot_id == source._plot_id)] + if links: + return (plot, links) + else: + if ((link.target is source) or + (link.target is not None and + link.target._plot_id is not None and + link.target._plot_id == source._plot_id)): + return (plot, [link]) + + def validate(self): + """ + Should be subclassed to check if the source and target plots + are compatible to perform the linking. + """ + + +class RangeToolLinkCallback(LinkCallback): + """ + Attaches a RangeTool to the source plot and links it to the + specified axes on the target plot + """ + + def __init__(self, root_model, link, source_plot, target_plot): + try: + from bokeh.models.tools import RangeTool + except: + raise Exception('RangeToolLink requires bokeh >= 0.13') + toolbars = list(root_model.select({'type': ToolbarBox})) + axes = {} + if 'x' in link.axes: + axes['x_range'] = target_plot.handles['x_range'] + if 'y' in link.axes: + axes['y_range'] = target_plot.handles['y_range'] + tool = RangeTool(**axes) + source_plot.state.add_tools(tool) + if toolbars: + toolbar = toolbars[0].toolbar + toolbar.tools.append(tool) + + +class DataLinkCallback(LinkCallback): + """ + Merges the source and target ColumnDataSource + """ + + def __init__(self, root_model, link, source_plot, target_plot): + src_cds = source_plot.handles['source'] + tgt_cds = target_plot.handles['source'] + if src_cds is tgt_cds: + return + + src_len = [len(v) for v in src_cds.data.values()] + tgt_len = [len(v) for v in tgt_cds.data.values()] + if src_len and tgt_len and (src_len[0] != tgt_len[0]): + raise Exception('DataLink source data length must match target ' + 'data length, found source length of %d and ' + 'target length of %d.' % (src_len[0], tgt_len[0])) + + # Ensure the data sources are compatible (i.e. overlapping columns are equal) + for k, v in tgt_cds.data.items(): + if k not in src_cds.data: + continue + v = np.asarray(v) + col = np.asarray(src_cds.data[k]) + if len(v) and isinstance(v[0], np.ndarray): + continue # Skip ragged arrays + if not ((isscalar(v) and v == col) or + (v.dtype.kind not in 'iufc' and (v==col).all()) or + np.allclose(v, np.asarray(src_cds.data[k]), equal_nan=True)): + raise ValueError('DataLink can only be applied if overlapping ' + 'dimension values are equal, %s column on source ' + 'does not match target' % k) + + src_cds.data.update(tgt_cds.data) + renderer = target_plot.handles.get('glyph_renderer') + if renderer is None: + pass + elif 'data_source' in renderer.properties(): + renderer.update(data_source=src_cds) + else: + renderer.update(source=src_cds) + if hasattr(renderer, 'view'): + renderer.view.update(source=src_cds) + target_plot.handles['source'] = src_cds + target_plot.handles['cds'] = src_cds + for callback in target_plot.callbacks: + callback.initialize(plot_id=root_model.ref['id']) + + +class SelectionLinkCallback(LinkCallback): + + source_model = 'selected' + target_model = 'selected' + + on_source_changes = ['indices'] + on_target_changes = ['indices'] + + source_handles = ['cds'] + target_handles = ['cds'] + + source_code = """ + target_selected.indices = source_selected.indices + target_cds.properties.selected.change.emit() + """ + + target_code = """ + source_selected.indices = target_selected.indices + source_cds.properties.selected.change.emit() + """ + +class RectanglesTableLinkCallback(DataLinkCallback): + + source_model = 'cds' + target_model = 'cds' + + source_handles = ['glyph'] + + on_source_changes = ['selected', 'data'] + on_target_changes = ['patching'] + + source_code = """ + var xs = source_cds.data[source_glyph.x.field] + var ys = source_cds.data[source_glyph.y.field] + var ws = source_cds.data[source_glyph.width.field] + var hs = source_cds.data[source_glyph.height.field] + + var x0 = [] + var x1 = [] + var y0 = [] + var y1 = [] + for (var i = 0; i < xs.length; i++) { + var hw = ws[i]/2. + var hh = hs[i]/2. + x0.push(xs[i]-hw) + x1.push(xs[i]+hw) + y0.push(ys[i]-hh) + y1.push(ys[i]+hh) + } + target_cds.data[columns[0]] = x0 + target_cds.data[columns[1]] = y0 + target_cds.data[columns[2]] = x1 + target_cds.data[columns[3]] = y1 + """ + + target_code = """ + var x0s = target_cds.data[columns[0]] + var y0s = target_cds.data[columns[1]] + var x1s = target_cds.data[columns[2]] + var y1s = target_cds.data[columns[3]] + + var xs = [] + var ys = [] + var ws = [] + var hs = [] + for (var i = 0; i < x0s.length; i++) { + var x0 = Math.min(x0s[i], x1s[i]) + var y0 = Math.min(y0s[i], y1s[i]) + var x1 = Math.max(x0s[i], x1s[i]) + var y1 = Math.max(y0s[i], y1s[i]) + xs.push((x0+x1)/2.) + ys.push((y0+y1)/2.) + ws.push(x1-x0) + hs.push(y1-y0) + } + source_cds.data['x'] = xs + source_cds.data['y'] = ys + source_cds.data['width'] = ws + source_cds.data['height'] = hs + """ + + def __init__(self, root_model, link, source_plot, target_plot=None): + DataLinkCallback.__init__(self, root_model, link, source_plot, target_plot) + LinkCallback.__init__(self, root_model, link, source_plot, target_plot) + columns = [kd.name for kd in source_plot.current_frame.kdims] + self.src_cb.args['columns'] = columns + self.tgt_cb.args['columns'] = columns + + +class VertexTableLinkCallback(LinkCallback): + + source_model = 'cds' + target_model = 'cds' + + on_source_changes = ['selected', 'data', 'patching'] + on_target_changes = ['data', 'patching'] + + source_code = """ + var index = source_cds.selected.indices[0]; + if (index == undefined) { + var xs_column = []; + var ys_column = []; + } else { + var xs_column = source_cds.data['xs'][index]; + var ys_column = source_cds.data['ys'][index]; + } + if (xs_column == undefined) { + var xs_column = []; + var ys_column = []; + } + var xs = [] + var ys = [] + var empty = [] + for (var i = 0; i < xs_column.length; i++) { + xs.push(xs_column[i]) + ys.push(ys_column[i]) + empty.push(null) + } + var [x, y] = vertex_columns + target_cds.data[x] = xs + target_cds.data[y] = ys + var length = xs.length + for (var col in target_cds.data) { + if (vertex_columns.indexOf(col) != -1) { continue; } + else if (col in source_cds.data) { + var path = source_cds.data[col][index]; + if ((path == undefined)) { + var data = empty; + } else if (path.length == length) { + var data = source_cds.data[col][index]; + } else { + var data = empty; + } + } else { + var data = empty; + } + target_cds.data[col] = data; + } + target_cds.change.emit() + target_cds.data = target_cds.data + """ + + target_code = """ + if (!source_cds.selected.indices.length) { return } + var [x, y] = vertex_columns + var xs_column = target_cds.data[x] + var ys_column = target_cds.data[y] + var xs = [] + var ys = [] + var points = [] + for (var i = 0; i < xs_column.length; i++) { + xs.push(xs_column[i]) + ys.push(ys_column[i]) + points.push(i) + } + var index = source_cds.selected.indices[0] + var xpaths = source_cds.data['xs'] + var ypaths = source_cds.data['ys'] + var length = source_cds.data['xs'].length + for (var col in target_cds.data) { + if ((col == x) || (col == y)) { continue; } + if (!(col in source_cds.data)) { + var empty = [] + for (var i = 0; i < length; i++) + empty.push([]) + source_cds.data[col] = empty + } + source_cds.data[col][index] = target_cds.data[col] + for (var p of points) { + for (var pindex = 0; pindex < xpaths.length; pindex++) { + if (pindex != index) { continue } + var xs = xpaths[pindex] + var ys = ypaths[pindex] + var column = source_cds.data[col][pindex] + if (column.length != xs.length) { + for (var ind = 0; ind < xs.length; ind++) { + column.push(null) + } + } + for (var ind = 0; ind < xs.length; ind++) { + if ((xs[ind] == xpaths[index][p]) && (ys[ind] == ypaths[index][p])) { + column[ind] = target_cds.data[col][p] + xs[ind] = xs[p]; + ys[ind] = ys[p]; + } + } + } + } + } + xpaths[index] = xs; + ypaths[index] = ys; + source_cds.change.emit() + source_cds.properties.data.change.emit(); + source_cds.data = source_cds.data + """ + + +callbacks = Link._callbacks['bokeh'] + +callbacks[RangeToolLink] = RangeToolLinkCallback +callbacks[DataLink] = DataLinkCallback +callbacks[SelectionLink] = SelectionLinkCallback +callbacks[VertexTableLink] = VertexTableLinkCallback +callbacks[RectanglesTableLink] = RectanglesTableLinkCallback diff --git a/holoviews/plotting/bokeh/path.py b/holoviews/plotting/bokeh/path.py index 9a3dcf636b..8c67341345 100644 --- a/holoviews/plotting/bokeh/path.py +++ b/holoviews/plotting/bokeh/path.py @@ -1,12 +1,10 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict import param import numpy as np from ...core import util -from ...element import Polygons +from ...element import Contours, Polygons from ...util.transform import dim from .callbacks import PolyDrawCallback, PolyEditCallback from .element import ColorbarPlot, LegendPlot, OverlayPlot @@ -15,7 +13,8 @@ expand_batched_style, base_properties, line_properties, fill_properties, mpl_to_bokeh, validate ) -from .util import bokeh_version, multi_polygons_data +from .util import multi_polygons_data + class PathPlot(LegendPlot, ColorbarPlot): @@ -27,12 +26,6 @@ class PathPlot(LegendPlot, ColorbarPlot): show_legend = param.Boolean(default=False, doc=""" Whether to show legend for the plot.""") - # Deprecated options - - color_index = param.ClassSelector(default=None, class_=(util.basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - style_opts = base_properties + line_properties + ['cmap'] _plot_methods = dict(single='multi_line', batched='multi_line') @@ -40,14 +33,17 @@ class PathPlot(LegendPlot, ColorbarPlot): _nonvectorized_styles = base_properties + ['cmap'] _batched_style_opts = line_properties + def _element_transform(self, transform, element, ranges): + if isinstance(element, Contours): + return super()._element_transform(transform, element, ranges) + return np.concatenate([transform.apply(el, ranges=ranges, flat=True) + for el in element.split()]) + def _hover_opts(self, element): - cdim = element.get_dimension(self.color_index) if self.batched: dims = list(self.hmap.last.kdims)+self.hmap.last.last.vdims else: dims = list(self.overlay_dims.keys())+self.hmap.last.vdims - if cdim not in dims and cdim is not None: - dims.append(cdim) return dims, {} @@ -67,15 +63,13 @@ def _get_hover_data(self, data, element): def get_data(self, element, ranges, style): color = style.get('color', None) cdim = None - if isinstance(color, util.basestring) and not validate('color', color): + if isinstance(color, str) and not validate('color', color) == False: cdim = element.get_dimension(color) - elif self.color_index is not None: - cdim = element.get_dimension(self.color_index) scalar = element.interface.isunique(element, cdim, per_geom=True) if cdim else False style_mapping = { (s, v) for s, v in style.items() if (s not in self._nonvectorized_styles) and - ((isinstance(v, util.basestring) and v in element) or isinstance(v, dim)) and + ((isinstance(v, str) and v in element) or isinstance(v, dim)) and not (not isinstance(v, dim) and v == color and s == 'color')} mapping = dict(self._mapping) @@ -94,18 +88,9 @@ def get_data(self, element, ranges, style): vals = defaultdict(list) if hover: vals.update({util.dimension_sanitizer(vd.name): [] for vd in element.vdims}) - if cdim and self.color_index is not None: - dim_name = util.dimension_sanitizer(cdim.name) - cmapper = self._get_colormapper(cdim, element, ranges, style) - mapping['line_color'] = {'field': dim_name, 'transform': cmapper} - vals[dim_name] = [] xpaths, ypaths = [], [] for path in element.split(): - if cdim and self.color_index is not None: - scalar = path.interface.isunique(path, cdim, per_geom=True) - cvals = path.dimension_values(cdim, not scalar) - vals[dim_name].append(cvals[:-1]) cols = path.columns(path.kdims) xs, ys = (cols[kd.name] for kd in element.kdims) alen = len(xs) @@ -167,17 +152,11 @@ class ContourPlot(PathPlot): show_legend = param.Boolean(default=False, doc=""" Whether to show legend for the plot.""") - # Deprecated options - - color_index = param.ClassSelector(default=0, class_=(util.basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - _color_style = 'line_color' _nonvectorized_styles = base_properties + ['cmap'] def __init__(self, *args, **params): - super(ContourPlot, self).__init__(*args, **params) + super().__init__(*args, **params) self._has_holes = None def _hover_opts(self, element): @@ -197,13 +176,11 @@ def _get_hover_data(self, data, element): interface = element.interface scalar_kwargs = {'per_geom': True} if interface.multi else {} - npath = len([vs for vs in data.values()][0]) + for d in element.vdims: dim = util.dimension_sanitizer(d.name) if dim not in data: - if element.level is not None: - data[dim] = np.full(npath, element.level) - elif interface.isunique(element, d, **scalar_kwargs): + if interface.isunique(element, d, **scalar_kwargs): data[dim] = element.dimension_values(d, expanded=False) else: data[dim] = element.split(datatype='array', dimensions=[d]) @@ -213,6 +190,19 @@ def _get_hover_data(self, data, element): if dim not in data: data[dim] = [v for _ in range(len(list(data.values())[0]))] + def _apply_transforms(self, element, data, ranges, style, group=None): + transformed = super(ContourPlot, self)._apply_transforms( + element, data, ranges, style, group + ) + if not element.vdims or any(isinstance(t, dict) and 'transform' in t + for t in transformed.values()): + return transformed + default_transform = {self._color_style: dim(element.vdims[0])} + transformed.update(super(ContourPlot, self)._apply_transforms( + element, data, ranges, default_transform, group + )) + return transformed + def get_data(self, element, ranges, style): if self._has_holes is None: draw_callbacks = any(isinstance(cb, (PolyDrawCallback, PolyEditCallback)) @@ -239,40 +229,6 @@ def get_data(self, element, ranges, style): data = dict(xs=xs, ys=ys) mapping = dict(self._mapping) self._get_hover_data(data, element) - - color, fill_color = style.get('color'), style.get('fill_color') - if (((isinstance(color, dim) and color.applies(element)) or color in element) or - (isinstance(fill_color, dim) and fill_color.applies(element)) or fill_color in element): - cdim = None - elif None not in [element.level, self.color_index] and element.vdims: - cdim = element.vdims[0] - else: - cidx = self.color_index+2 if isinstance(self.color_index, int) else self.color_index - cdim = element.get_dimension(cidx) - - if cdim is None: - return data, mapping, style - - ncontours = len(xs) - dim_name = util.dimension_sanitizer(cdim.name) - if element.level is not None: - values = np.full(ncontours, float(element.level)) - else: - values = element.dimension_values(cdim, expanded=False) - data[dim_name] = values - - factors = None - if cdim.name in ranges and 'factors' in ranges[cdim.name]: - factors = ranges[cdim.name]['factors'] - elif values.dtype.kind in 'SUO' and len(values): - if isinstance(values[0], np.ndarray): - values = np.concatenate(values) - factors = util.unique_array(values) - cmapper = self._get_colormapper(cdim, element, ranges, style, factors) - mapping[self._color_style] = {'field': dim_name, 'transform': cmapper} - if self.show_legend: - legend_prop = 'legend_field' if bokeh_version >= '1.3.5' else 'legend' - mapping[legend_prop] = dim_name return data, mapping, style def _init_glyph(self, plot, mapping, properties): diff --git a/holoviews/plotting/bokeh/plot.py b/holoviews/plotting/bokeh/plot.py index a7e4280bab..c28d63e9e5 100644 --- a/holoviews/plotting/bokeh/plot.py +++ b/holoviews/plotting/bokeh/plot.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from itertools import groupby from collections import defaultdict @@ -19,7 +17,7 @@ ) from ...core.options import SkipRendering from ...core.util import ( - basestring, cftime_to_timestamp, cftime_types, get_method_owner, + cftime_to_timestamp, cftime_types, get_method_owner, is_param_method, unique_iterator, wrap_tuple, wrap_tuple_streams, _STANDARD_CALENDARS ) @@ -30,11 +28,10 @@ CallbackPlot ) from ..util import attach_streams, displayable, collate -from .callbacks import LinkCallback +from .links import LinkCallback from .util import ( - TOOL_TYPES, filter_toolboxes, make_axis, update_shared_sources, - empty_plot, decode_bytes, theme_attr_json, cds_column_replace, - get_default + filter_toolboxes, make_axis, update_shared_sources, empty_plot, + decode_bytes, theme_attr_json, cds_column_replace, get_default ) @@ -168,25 +165,6 @@ def _update_datasource(self, source, data): self._update_selected(source) - def _update_callbacks(self, plot): - """ - Iterates over all subplots and updates existing CustomJS - callbacks with models that were replaced when compositing - subplots into a CompositePlot and sets the plot id to match - the root level bokeh model. - """ - subplots = self.traverse(lambda x: x, [GenericElementPlot]) - merged_tools = {t: list(plot.select({'type': TOOL_TYPES[t]})) - for t in self._merged_tools} - for subplot in subplots: - for cb in subplot.callbacks: - for c in cb.callbacks: - for tool, objs in merged_tools.items(): - if tool in c.args and objs: - c.args[tool] = objs[0] - if self.top_level: - c.code = c.code.replace('PLACEHOLDER_PLOT_ID', self.id) - @property def state(self): """ @@ -258,8 +236,8 @@ def _fontsize(self, key, label='fontsize', common=True): Converts integer fontsizes to a string specifying fontsize in pt. """ - size = super(BokehPlot, self)._fontsize(key, label, common) - return {k: v if isinstance(v, basestring) else '%spt' % v + size = super()._fontsize(key, label, common) + return {k: v if isinstance(v, str) else '%spt' % v for k, v in size.items()} def _get_title_div(self, key, default_fontsize='15pt', width=450): @@ -492,7 +470,7 @@ class GridPlot(CompositePlot, GenericCompositePlot): def __init__(self, layout, ranges=None, layout_num=1, keys=None, **params): if not isinstance(layout, GridSpace): raise Exception("GridPlot only accepts GridSpace.") - super(GridPlot, self).__init__(layout=layout, layout_num=layout_num, + super().__init__(layout=layout, layout_num=layout_num, ranges=ranges, keys=keys, **params) self.cols, self.rows = layout.shape self.subplots, self.layout = self._create_subplots(layout, ranges) @@ -615,7 +593,6 @@ def initialize_plot(self, ranges=None, plots=[]): self.handles['plot'] = plot self.handles['plots'] = plots - self._update_callbacks(plot) if self.shared_datasource: self.sync_sources() @@ -704,7 +681,7 @@ class LayoutPlot(CompositePlot, GenericLayoutPlot): Whether to display overlaid plots in separate panes""") def __init__(self, layout, keys=None, **params): - super(LayoutPlot, self).__init__(layout, keys=keys, **params) + super().__init__(layout, keys=keys, **params) self.layout, self.subplots, self.paths = self._init_layout(layout) if self.top_level: self.traverse(lambda x: attach_streams(self, x.hmap, 2), @@ -988,7 +965,6 @@ def initialize_plot(self, plots=None, ranges=None): self.handles['plot'] = layout_plot self.handles['plots'] = plots - self._update_callbacks(layout_plot) if self.shared_datasource: self.sync_sources() @@ -1029,8 +1005,7 @@ def __init__(self, layout, layout_type, subplots, **params): self.view_positions = self.layout_dict[self.layout_type]['positions'] # The supplied (axes, view) objects as indexed by position - super(AdjointLayoutPlot, self).__init__(subplots=subplots, **params) - + super().__init__(subplots=subplots, **params) def initialize_plot(self, ranges=None, plots=[]): """ @@ -1055,7 +1030,6 @@ def initialize_plot(self, ranges=None, plots=[]): if not adjoined_plots: adjoined_plots = [None] return adjoined_plots - def update_frame(self, key, ranges=None): plot = None for pos in ['main', 'right', 'top']: diff --git a/holoviews/plotting/bokeh/raster.py b/holoviews/plotting/bokeh/raster.py index 9eb25fe830..1605257502 100644 --- a/holoviews/plotting/bokeh/raster.py +++ b/holoviews/plotting/bokeh/raster.py @@ -17,6 +17,11 @@ class RasterPlot(ColorbarPlot): clipping_colors = param.Dict(default={'NaN': 'transparent'}) + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + padding = param.ClassSelector(default=0, class_=(int, float, tuple)) show_legend = param.Boolean(default=False, doc=""" @@ -41,7 +46,7 @@ def _hover_opts(self, element): return tooltips, {} def _postprocess_hover(self, renderer, source): - super(RasterPlot, self)._postprocess_hover(renderer, source) + super()._postprocess_hover(renderer, source) hover = self.handles.get('hover') if not (hover and isinstance(hover.tooltips, list)): return @@ -71,7 +76,7 @@ def _postprocess_hover(self, renderer, source): hover.formatters = formatters def __init__(self, *args, **kwargs): - super(RasterPlot, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.hmap.type == Raster: self.invert_yaxis = not self.invert_yaxis @@ -175,7 +180,7 @@ def get_data(self, element, ranges, style): if self.invert_axes: img = img.T l, b, r, t = b, l, t, r - + dh, dw = t-b, r-l if self.invert_xaxis: l, r = r, l @@ -191,18 +196,21 @@ def get_data(self, element, ranges, style): return (data, mapping, style) - class HSVPlot(RGBPlot): def get_data(self, element, ranges, style): - return super(HSVPlot, self).get_data(element.rgb, ranges, style) - + return super().get_data(element.rgb, ranges, style) class QuadMeshPlot(ColorbarPlot): clipping_colors = param.Dict(default={'NaN': 'transparent'}) + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + padding = param.ClassSelector(default=0, class_=(int, float, tuple)) show_legend = param.Boolean(default=False, doc=""" @@ -238,6 +246,7 @@ def get_data(self, element, ranges, style): x, y = dimension_sanitizer(x.name), dimension_sanitizer(y.name) zdata = element.dimension_values(z, flat=False) + if irregular: dims = element.kdims if self.invert_axes: dims = dims[::-1] diff --git a/holoviews/plotting/bokeh/renderer.py b/holoviews/plotting/bokeh/renderer.py index e1490b3328..7e87311720 100644 --- a/holoviews/plotting/bokeh/renderer.py +++ b/holoviews/plotting/bokeh/renderer.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import base64 import logging from io import BytesIO @@ -62,7 +60,6 @@ def _save_prefix(self_or_cls, ext): "Hook to prefix content for instance JS when saving HTML" return - @bothmethod def get_plot(self_or_cls, obj, doc=None, renderer=None, **kwargs): """ @@ -70,13 +67,13 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, **kwargs): Allows supplying a document attach the plot to, useful when combining the bokeh model with another plot. """ - plot = super(BokehRenderer, self_or_cls).get_plot(obj, doc, renderer, **kwargs) + plot = super().get_plot(obj, doc, renderer, **kwargs) if plot.document is None: plot.document = Document() if self_or_cls.notebook_context else curdoc() - plot.document.theme = self_or_cls.theme + if self_or_cls.theme: + plot.document.theme = self_or_cls.theme return plot - def _figure_data(self, plot, fmt, doc=None, as_script=False, **kwargs): """ Given a plot instance, an output format and an optional bokeh @@ -146,7 +143,6 @@ def _figure_data(self, plot, fmt, doc=None, as_script=False, **kwargs): else: return data - @classmethod def plot_options(cls, obj, percent_size): """ @@ -175,7 +171,6 @@ def plot_options(cls, obj, percent_size): options['height'] = int(height) return dict(options) - @bothmethod def get_size(self_or_cls, plot): """ @@ -192,7 +187,6 @@ def get_size(self_or_cls, plot): 'and bokeh plot objects.') return compute_plot_size(plot) - @classmethod def load_nb(cls, inline=True): cls._loaded = True diff --git a/holoviews/plotting/bokeh/sankey.py b/holoviews/plotting/bokeh/sankey.py index dba658df51..196d0c9b91 100644 --- a/holoviews/plotting/bokeh/sankey.py +++ b/holoviews/plotting/bokeh/sankey.py @@ -1,12 +1,10 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np from bokeh.models import Patches from ...core.data import Dataset -from ...core.util import basestring, max_range, dimension_sanitizer +from ...core.util import max_range, dimension_sanitizer from ...util.transform import dim from .graphs import GraphPlot @@ -14,7 +12,7 @@ class SankeyPlot(GraphPlot): - labels = param.ClassSelector(class_=(basestring, dim), doc=""" + labels = param.ClassSelector(class_=(str, dim), doc=""" The dimension or dimension value transform used to draw labels from.""") label_position = param.ObjectSelector(default='right', @@ -48,27 +46,16 @@ class SankeyPlot(GraphPlot): The height of the component (in pixels). This can be either fixed or preferred height, depending on height sizing policy.""") - # Deprecated options - - color_index = param.ClassSelector(default=2, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the node labels will be drawn""") - - label_index = param.ClassSelector(default=2, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the node labels will be drawn""") - - _style_groups = dict(GraphPlot._style_groups, quad='node', text='label') - - _draw_order = ['graph', 'quad_1', 'text_1', 'text_2'] - + filled = True + style_opts = GraphPlot.style_opts + ['edge_fill_alpha', 'nodes_line_color', 'label_text_font_size'] + _style_groups = dict(GraphPlot._style_groups, quad='node', text='label') - filled = True + _draw_order = ['graph', 'quad_1', 'text_1', 'text_2'] def _init_glyphs(self, plot, element, ranges, source): - super(SankeyPlot, self)._init_glyphs(plot, element, ranges, source) + super()._init_glyphs(plot, element, ranges, source) renderer = plot.renderers.pop(plot.renderers.index(self.handles['glyph_renderer'])) plot.renderers = [renderer] + plot.renderers arc_renderer = self.handles['quad_1_glyph_renderer'] @@ -79,7 +66,7 @@ def _init_glyphs(self, plot, element, ranges, source): self._sync_nodes() def get_data(self, element, ranges, style): - data, mapping, style = super(SankeyPlot, self).get_data(element, ranges, style) + data, mapping, style = super().get_data(element, ranges, style) self._compute_quads(element, data, mapping) style['nodes_line_color'] = 'black' self._compute_labels(element, data, mapping) @@ -90,11 +77,11 @@ def _init_glyph(self, plot, mapping, properties, key): if key == 'quad_1': properties.pop('size', None) mapping.pop('size', None) - return super(SankeyPlot, self)._init_glyph(plot, mapping, properties, key) + return super()._init_glyph(plot, mapping, properties, key) def _update_glyphs(self, element, ranges, style): self._sync_nodes() - super(SankeyPlot, self)._update_glyphs(element, ranges, style) + super()._update_glyphs(element, ranges, style) def _sync_nodes(self): arc_renderer = self.handles['quad_1_glyph_renderer'] @@ -136,21 +123,13 @@ def _compute_labels(self, element, data, mapping): else: nodes = element - label_dim = nodes.get_dimension(self.label_index) labels = self.labels - if label_dim and labels: - if self.label_index not in [2, None]: - self.param.warning( - "Cannot declare style mapping for 'labels' option " - "and declare a label_index; ignoring the label_index.") - elif label_dim: - labels = label_dim - if isinstance(labels, basestring): + if isinstance(labels, str): labels = element.nodes.get_dimension(labels) if labels is None: text = [] - if isinstance(labels, dim): + elif isinstance(labels, dim): text = labels.apply(element, flat=True) else: text = element.nodes.dimension_values(labels) @@ -219,9 +198,9 @@ def _patch_hover(self, element, data): """ Replace edge start and end hover data with label_index data. """ - if not (self.inspection_policy == 'edges' and 'hover' in self.handles): + if not (self.inspection_policy == 'edges' and 'hover' in self.handles) or self.labels is None: return - lidx = element.nodes.get_dimension(self.label_index) + lidx = element.nodes.get_dimension(self.labels) src, tgt = [dimension_sanitizer(kd.name) for kd in element.kdims[:2]] if src == 'start': src += '_values' if tgt == 'end': tgt += '_values' @@ -236,7 +215,7 @@ def get_extents(self, element, ranges, range_type='combined'): if range_type == 'extents': return element.nodes.extents xdim, ydim = element.nodes.kdims[:2] - xpad = .05 if self.label_index is None else 0.25 + xpad = .05 if self.labels is None else 0.25 x0, x1 = ranges[xdim.name][range_type] y0, y1 = ranges[ydim.name][range_type] xdiff = (x1-x0) @@ -264,4 +243,4 @@ def _postprocess_hover(self, renderer, source): else: if isinstance(renderer.glyph, Patches): return - super(SankeyPlot, self)._postprocess_hover(renderer, source) + super()._postprocess_hover(renderer, source) diff --git a/holoviews/plotting/bokeh/selection.py b/holoviews/plotting/bokeh/selection.py index 5a577a1f4a..5c2b6bd1bc 100644 --- a/holoviews/plotting/bokeh/selection.py +++ b/holoviews/plotting/bokeh/selection.py @@ -14,7 +14,7 @@ def _build_selection(self, el, exprs, **kwargs): opts['selected'] = list(np.where(mask)[0]) return el.opts(clone=True, backend='bokeh', **opts) - def build_selection(self, selection_streams, hvobj, operations, region_stream=None): + def build_selection(self, selection_streams, hvobj, operations, region_stream=None, cache={}): sel_streams = [selection_streams.exprs_stream] hvobj = hvobj.apply(self._build_selection, streams=sel_streams, per_element=True) for op in operations: @@ -52,7 +52,9 @@ def _build_element_layer(self, element, layer_color, layer_alpha, **opts): merged_opts[opt] = opts[opt] filtered = {k: v for k, v in merged_opts.items() if k in allowed} - return element.opts(backend='bokeh', clone=True, tools=['box_select'], + plot_opts = Store.lookup_options('bokeh', element, 'plot').kwargs + tools = plot_opts.get('tools', []) + ['box_select'] + return element.opts(backend='bokeh', clone=True, tools=tools, **filtered) def _style_region_element(self, region_element, unselected_color): diff --git a/holoviews/plotting/bokeh/stats.py b/holoviews/plotting/bokeh/stats.py index bafa04f53a..52089cf192 100644 --- a/holoviews/plotting/bokeh/stats.py +++ b/holoviews/plotting/bokeh/stats.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict from functools import partial @@ -10,11 +8,11 @@ from .selection import BokehOverlaySelectionDisplay from ...core import NdOverlay -from ...core.dimension import Dimension, Dimensioned +from ...core.dimension import Dimensioned from ...core.ndmapping import sorted_context from ...core.util import ( - basestring, dimension_sanitizer, wrap_tuple, unique_iterator, - isfinite, is_dask_array, is_cupy_array + dimension_sanitizer, wrap_tuple, unique_iterator, isfinite, + is_dask_array, is_cupy_array ) from ...operation.stats import univariate_kde from ...util.transform import dim @@ -70,12 +68,6 @@ class BoxWhiskerPlot(CompositeElementPlot, ColorbarPlot, LegendPlot): show_legend = param.Boolean(default=False, doc=""" Whether to show legend for the plot.""") - # Deprecated options - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `box_color=dim('color')`""") - # X-axis is categorical _x_range_type = FactorRange @@ -94,7 +86,7 @@ class BoxWhiskerPlot(CompositeElementPlot, ColorbarPlot, LegendPlot): selection_display = BokehOverlaySelectionDisplay(color_prop='box_color') def get_extents(self, element, ranges, range_type='combined'): - return super(BoxWhiskerPlot, self).get_extents( + return super().get_extents( element, ranges, range_type, 'categorical', element.vdims[0] ) @@ -117,7 +109,7 @@ def _apply_transforms(self, element, data, ranges, style, group=None): element = agg else: element = element.clone([(agg,)]) - return super(BoxWhiskerPlot, self)._apply_transforms(element, data, ranges, style, group) + return super()._apply_transforms(element, data, ranges, style, group) def _get_factors(self, element, ranges): """ @@ -138,7 +130,7 @@ def _get_factors(self, element, ranges): def _postprocess_hover(self, renderer, source): if not isinstance(renderer.glyph, (Circle, VBar, HBar)): return - super(BoxWhiskerPlot, self)._postprocess_hover(renderer, source) + super()._postprocess_hover(renderer, source) def _box_stats(self, vals): is_finite = isfinite @@ -156,7 +148,7 @@ def _box_stats(self, vals): vals = vals[is_finite(vals)] - if len(vals): + if is_dask or len(vals): q1, q2, q3 = (percentile(vals, q=q) for q in range(25, 100, 25)) iqr = q3 - q1 upper = max(vals[vals <= q3 + 1.5*iqr].max(), q3) @@ -204,13 +196,6 @@ def get_data(self, element, ranges, style): out_map = {'x': 'index', 'y': vdim} vbar2_map = dict(vbar_map) - # Get color values - if self.color_index is not None: - cdim = element.get_dimension(self.color_index) - cidx = element.get_dimension_index(self.color_index) - else: - cdim, cidx = None, None - factors = [] vdim = element.vdims[0].name for key, g in groups.items(): @@ -224,10 +209,7 @@ def get_data(self, element, ranges, style): hover = 'hover' in self.handles # Add color factor - if cidx is not None and cidx=element.ndims: - cdim = Dimension('index') - else: - r1_data[dimension_sanitizer(cdim.name)] = factors - r2_data[dimension_sanitizer(cdim.name)] = factors - factors = list(unique_iterator(factors)) - if self.show_legend: legend_prop = 'legend_field' if bokeh_version >= '1.3.5' else 'legend' - vbar_map[legend_prop] = cdim.name + vbar_map[legend_prop] = 'index' return data, mapping, style @@ -345,12 +320,6 @@ class ViolinPlot(BoxWhiskerPlot): violin_width = param.Number(default=0.8, doc=""" Relative width of the violin""") - # Deprecated options - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `violin_color=dim('color')`""") - # Map each glyph to a style group _style_groups = {'patches': 'violin', 'multi_line': 'outline', 'segment': 'stats', 'vbar': 'box', 'scatter': 'median', @@ -371,7 +340,7 @@ class ViolinPlot(BoxWhiskerPlot): selection_display = BokehOverlaySelectionDisplay(color_prop='violin_fill_color') def _get_axis_dims(self, element): - split_dim = dim(self.split) if isinstance(self.split, basestring) else self.split + split_dim = dim(self.split) if isinstance(self.split, str) else self.split kdims = [kd for kd in element.kdims if not split_dim or kd != split_dim.dimension] return kdims, element.vdims[0] @@ -379,7 +348,7 @@ def _get_factors(self, element, ranges): """ Get factors for categorical axes. """ - split_dim = dim(self.split) if isinstance(self.split, basestring) else self.split + split_dim = dim(self.split) if isinstance(self.split, str) else self.split kdims = [kd for kd in element.kdims if not split_dim or kd != split_dim.dimension] if not kdims: xfactors, yfactors = [element.label], [] @@ -503,7 +472,7 @@ def _kde_data(self, element, el, key, split_dim, split_cats, **kwargs): def get_data(self, element, ranges, style): - split_dim = dim(self.split) if isinstance(self.split, basestring) else self.split + split_dim = dim(self.split) if isinstance(self.split, str) else self.split kdims = [kd for kd in element.kdims if not split_dim or split_dim.dimension != kd] if kdims: diff --git a/holoviews/plotting/bokeh/styles.py b/holoviews/plotting/bokeh/styles.py index 07009f1bb8..50f14f860f 100644 --- a/holoviews/plotting/bokeh/styles.py +++ b/holoviews/plotting/bokeh/styles.py @@ -15,7 +15,7 @@ cm, colors = None, None from ...core.options import abbreviated_exception -from ...core.util import basestring, arraylike_types +from ...core.util import arraylike_types from ...util.transform import dim from ..util import COLOR_ALIASES, RGB_HEX_REGEX, rgb2hex @@ -100,7 +100,7 @@ def mpl_to_bokeh(properties): 'angle' : angle.is_valid, 'alpha' : alpha.is_valid, 'color' : lambda x: ( - color.is_valid(x) or (isinstance(x, basestring) and RGB_HEX_REGEX.match(x)) + color.is_valid(x) or (isinstance(x, str) and RGB_HEX_REGEX.match(x)) ), 'font_size' : font_size.is_valid, 'line_dash' : dash_pattern.is_valid, @@ -122,7 +122,7 @@ def validate(style, value, scalar=False): --------- style: str The style to validate (e.g. 'color', 'size' or 'marker') - value: + value: The style value to validate scalar: bool diff --git a/holoviews/plotting/bokeh/tabular.py b/holoviews/plotting/bokeh/tabular.py index daa98c11d6..266301dbb6 100644 --- a/holoviews/plotting/bokeh/tabular.py +++ b/holoviews/plotting/bokeh/tabular.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param from bokeh.models import Column @@ -44,11 +42,11 @@ class TablePlot(BokehPlot, GenericElementPlot): _stream_data = True def __init__(self, element, plot=None, **params): - super(TablePlot, self).__init__(element, **params) + super().__init__(element, **params) self.handles = {} if plot is None else self.handles['plot'] element_ids = self.hmap.traverse(lambda x: id(x), [Dataset, ItemTable]) self.static = len(set(element_ids)) == 1 and len(self.keys) == len(self.hmap) - self.callbacks = self._construct_callbacks() + self.callbacks, self.source_streams = self._construct_callbacks() self.streaming = [s for s in self.streams if isinstance(s, Buffer)] self.static_source = False @@ -56,7 +54,6 @@ def get_data(self, element, ranges, style): return ({dimension_sanitizer(d.name): element.dimension_values(d) for d in element.dimensions()}, {}, style) - def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): """ Initializes a new plot object with the last available frame. diff --git a/holoviews/plotting/bokeh/tiles.py b/holoviews/plotting/bokeh/tiles.py index f4b16df2e3..0727306828 100644 --- a/holoviews/plotting/bokeh/tiles.py +++ b/holoviews/plotting/bokeh/tiles.py @@ -1,10 +1,7 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np -from bokeh.models import WMTSTileSource, BBoxTileSource, QUADKEYTileSource, SaveTool +from bokeh.models import WMTSTileSource, BBoxTileSource, QUADKEYTileSource -from ...core import util from ...core.options import SkipRendering from ...element.tiles import _ATTRIBUTIONS from .element import ElementPlot @@ -13,11 +10,10 @@ class TilePlot(ElementPlot): style_opts = ['alpha', 'render_parents', 'level', 'smoothing', 'min_zoom', 'max_zoom'] - selection_display = None def get_extents(self, element, ranges, range_type='combined'): - extents = super(TilePlot, self).get_extents(element, ranges, range_type) + extents = super().get_extents(element, ranges, range_type) if (not self.overlaid and all(e is None or not np.isfinite(e) for e in extents) and range_type in ('combined', 'data')): x0, x1 = (-20037508.342789244, 20037508.342789244) @@ -27,10 +23,12 @@ def get_extents(self, element, ranges, range_type='combined'): return extents def get_data(self, element, ranges, style): - if not isinstance(element.data, util.basestring): + if not isinstance(element.data, str): SkipRendering("WMTS element data must be a URL string, " "bokeh cannot render %r" % element.data) - if '{Q}' in element.data: + if element.data is None: + raise ValueError("Tile source URL may not be None with the bokeh backend") + elif '{Q}' in element.data: tile_source = QUADKEYTileSource elif all(kw in element.data for kw in ('{XMIN}', '{XMAX}', '{YMIN}', '{YMAX}')): tile_source = BBoxTileSource @@ -64,7 +62,4 @@ def _init_glyph(self, plot, mapping, properties): level = properties.pop('level', 'glyph') renderer = plot.add_tile(tile_source, level=level) renderer.alpha = properties.get('alpha', 1) - - # Remove save tool - plot.tools = [t for t in plot.tools if not isinstance(t, SaveTool)] return renderer, tile_source diff --git a/holoviews/plotting/bokeh/util.py b/holoviews/plotting/bokeh/util.py index 10caa77a71..0c6980450d 100644 --- a/holoviews/plotting/bokeh/util.py +++ b/holoviews/plotting/bokeh/util.py @@ -1,14 +1,12 @@ -from __future__ import absolute_import, division, unicode_literals - -import re -import time -import sys import calendar import datetime as dt -from types import FunctionType +import inspect +import re +import time from collections import defaultdict from contextlib import contextmanager +from types import FunctionType import param import bokeh @@ -42,8 +40,8 @@ from ...core.ndmapping import NdMapping from ...core.overlay import Overlay from ...core.util import ( - LooseVersion, _getargspec, basestring, callable_name, cftime_types, - cftime_to_timestamp, pd, unique_array, isnumeric, arraylike_types + LooseVersion, arraylike_types, callable_name, cftime_types, + cftime_to_timestamp, isnumeric, pd, unique_array ) from ...core.spaces import get_nested_dmaps, DynamicMap from ..util import dim_axis_label @@ -99,13 +97,21 @@ def convert_timestamp(timestamp): return np.datetime64(datetime.replace(tzinfo=None)) +def prop_is_none(value): + """ + Checks if property value is None. + """ + return (value is None or + (isinstance(value, dict) and 'value' in value + and value['value'] is None)) + + def decode_bytes(array): """ Decodes an array, list or tuple of bytestrings to avoid python 3 bokeh serialization errors """ - if (sys.version_info.major == 2 or not len(array) or - (isinstance(array, arraylike_types) and array.dtype.kind != 'O')): + if (not len(array) or (isinstance(array, arraylike_types) and array.dtype.kind != 'O')): return array decoded = [v.decode('utf-8') if isinstance(v, bytes) else v for v in array] if isinstance(array, np.ndarray): @@ -394,7 +400,7 @@ def font_size_to_pixels(size): """ Convert a fontsize to a pixel value """ - if size is None or not isinstance(size, basestring): + if size is None or not isinstance(size, str): return conversions = {'em': 16, 'pt': 16/12.} val = re.findall('\d+', size) @@ -581,7 +587,7 @@ def py2js_tickformatter(formatter, msg=''): param.main.param.warning(msg+error) return - args = _getargspec(formatter).args + args = inspect.getfullargspec(formatter).args arg_define = 'var %s = tick;' % args[0] if args else '' return_js = 'return formatter();\n' jsfunc = '\n'.join([arg_define, jscode, return_js]) @@ -636,7 +642,7 @@ def filter_batched_data(data, mapping): if 'transform' in v: continue v = v['field'] - elif not isinstance(v, basestring): + elif not isinstance(v, str): continue values = data[v] try: diff --git a/holoviews/plotting/links.py b/holoviews/plotting/links.py index 7ba99be2f1..5174260ea6 100644 --- a/holoviews/plotting/links.py +++ b/holoviews/plotting/links.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import weakref from collections import defaultdict @@ -45,7 +43,7 @@ def __init__(self, source, target=None, **params): # Source is stored as a weakref to allow it to be garbage collected self._source = None if source is None else weakref.ref(source) self._target = None if target is None else weakref.ref(target) - super(Link, self).__init__(**params) + super().__init__(**params) self.link() @classmethod @@ -138,7 +136,7 @@ def __init__(self, source, target, **params): if 'vertex_columns' not in params: dimensions = [dimension_sanitizer(d.name) for d in target.dimensions()[:2]] params['vertex_columns'] = dimensions - super(VertexTableLink, self).__init__(source, target, **params) + super().__init__(source, target, **params) class RectanglesTableLink(Link): diff --git a/holoviews/plotting/mixins.py b/holoviews/plotting/mixins.py index 31842b4dfd..5486163ef4 100644 --- a/holoviews/plotting/mixins.py +++ b/holoviews/plotting/mixins.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np from ..core import util, Dataset, Dimension @@ -35,7 +33,7 @@ def get_extents(self, element, ranges, range_type='combined'): for kd in [kdim0, kdim1]]) ranges[kdim0] = new_range ranges[kdim1] = new_range - return super(GeomMixin, self).get_extents(element, ranges, range_type) + return super().get_extents(element, ranges, range_type) class ChordMixin(object): @@ -47,8 +45,7 @@ def get_extents(self, element, ranges, range_type='combined'): xdim, ydim = element.nodes.kdims[:2] if range_type not in ('combined', 'data', 'extents'): return xdim.range[0], ydim.range[0], xdim.range[1], ydim.range[1] - no_labels = (element.nodes.get_dimension(self.label_index) is None and - self.labels is None) + no_labels = self.labels is None rng = 1.1 if no_labels else 1.4 x0, x1 = util.max_range([xdim.range, (-rng, rng)]) y0, y1 = util.max_range([ydim.range, (-rng, rng)]) @@ -73,11 +70,11 @@ def get_extents(self, element, ranges, range_type='combined'): y0, y1 = element.range(1) return (x0, y0, x1, y1) else: - return super(HeatMapMixin, self).get_extents(element, ranges, range_type) + return super().get_extents(element, ranges, range_type) class SpikesMixin(object): - + def get_extents(self, element, ranges, range_type='combined'): opts = self.lookup_options(element, 'plot').options if len(element.dimensions()) > 1 and 'spike_length' not in opts: @@ -107,27 +104,27 @@ def get_extents(self, element, ranges, range_type='combined'): 'hard': (np.nan, np.nan), 'soft': proxy_range, 'combined': proxy_range} - return super(SpikesMixin, self).get_extents(element, ranges, range_type, - ydim=proxy_dim) - + return super().get_extents(element, ranges, range_type, + ydim=proxy_dim) class AreaMixin(object): - + def get_extents(self, element, ranges, range_type='combined'): vdims = element.vdims[:2] vdim = vdims[0].name if len(vdims) > 1: new_range = {} for r in ranges[vdim]: - new_range[r] = util.max_range([ranges[vd.name][r] for vd in vdims]) + if r != 'values': + new_range[r] = util.max_range([ranges[vd.name][r] for vd in vdims]) ranges[vdim] = new_range else: s0, s1 = ranges[vdim]['soft'] s0 = min(s0, 0) if util.isfinite(s0) else 0 s1 = max(s1, 0) if util.isfinite(s1) else 0 ranges[vdim]['soft'] = (s0, s1) - return super(AreaMixin, self).get_extents(element, ranges, range_type) + return super().get_extents(element, ranges, range_type) class BarsMixin(object): @@ -151,7 +148,7 @@ def get_extents(self, element, ranges, range_type='combined'): s1 = max(s1, 0) if util.isfinite(s1) else 0 ranges[vdim]['soft'] = (s0, s1) if range_type not in ('combined', 'data'): - return super(BarsMixin, self).get_extents(element, ranges, range_type) + return super().get_extents(element, ranges, range_type) # Compute stack heights xdim = element.kdims[0] diff --git a/holoviews/plotting/mpl/__init__.py b/holoviews/plotting/mpl/__init__.py index 6a49ea4e1c..693925693b 100644 --- a/holoviews/plotting/mpl/__init__.py +++ b/holoviews/plotting/mpl/__init__.py @@ -1,11 +1,10 @@ -from __future__ import absolute_import, division, unicode_literals - import os from matplotlib import rc_params_from_file from matplotlib.colors import ListedColormap, LinearSegmentedColormap from matplotlib.cm import register_cmap from param import concrete_descendents +from colorcet import kbc from ...core import Layout, Collator, GridMatrix, config from ...core.options import Cycle, Palette, Options @@ -139,7 +138,6 @@ def grid_selector(grid): # Chart 3D Surface: SurfacePlot, TriSurface: TriSurfacePlot, - Trisurface: TriSurfacePlot, # Alias, remove in 2.0 Scatter3D: Scatter3DPlot, Path3D: Path3DPlot, @@ -219,10 +217,15 @@ def grid_selector(grid): register_cmap("fire", cmap=fire_cmap) register_cmap("fire_r", cmap=fire_r_cmap) +register_cmap('kbc_r', + cmap=LinearSegmentedColormap.from_list('kbc_r', + list(reversed(kbc)), N=len(kbc))) + options = Store.options(backend='matplotlib') -dflt_cmap = 'fire' +dflt_cmap = config.default_cmap + # Default option definitions -# Note: *No*short aliases here! e.g use 'facecolor' instead of 'fc' +# Note: *No*short aliases here! e.g. use 'facecolor' instead of 'fc' # Charts options.Curve = Options('style', color=Cycle(), linewidth=2) @@ -238,7 +241,7 @@ def grid_selector(grid): options.Path3D = Options('plot', fig_size=150) options.Surface = Options('plot', fig_size=150) options.Surface = Options('style', cmap='fire') -options.Spikes = Options('style', color='black', cmap='fire') +options.Spikes = Options('style', color='black', cmap=dflt_cmap) options.Area = Options('style', facecolor=Cycle(), edgecolor='black') options.BoxWhisker = Options('style', boxprops=dict(color='k', linewidth=1.5), whiskerprops=dict(color='k', linewidth=1.5)) @@ -247,10 +250,10 @@ def grid_selector(grid): options.Rectangles = Options('style', edgecolor='black') # Rasters -options.Image = Options('style', cmap=dflt_cmap, interpolation='nearest') -options.Raster = Options('style', cmap=dflt_cmap, interpolation='nearest') -options.QuadMesh = Options('style', cmap=dflt_cmap) -options.HeatMap = Options('style', cmap='RdYlBu_r', edgecolors='white', +options.Image = Options('style', cmap=config.default_gridded_cmap, interpolation='nearest') +options.Raster = Options('style', cmap=config.default_gridded_cmap, interpolation='nearest') +options.QuadMesh = Options('style', cmap=config.default_gridded_cmap) +options.HeatMap = Options('style', cmap=config.default_heatmap_cmap, edgecolors='white', annular_edgecolors='white', annular_linewidth=0.5, xmarks_edgecolor='white', xmarks_linewidth=3, ymarks_edgecolor='white', ymarks_linewidth=3, @@ -270,13 +273,15 @@ def grid_selector(grid): options.HSpan = Options('style', alpha=0.5, facecolor=Cycle()) options.Spline = Options('style', edgecolor=Cycle()) -options.Arrow = Options('style', color='k', linewidth=2, fontsize=13) +options.Arrow = Options('style', color='k', linewidth=2, textsize=13) # Paths -options.Contours = Options('style', color=Cycle(), cmap='viridis') +options.Contours = Options('style', color=Cycle(), cmap=dflt_cmap) options.Contours = Options('plot', show_legend=True) -options.Path = Options('style', color=Cycle(), cmap='viridis') +options.Path = Options('style', color=Cycle(), cmap=dflt_cmap) options.Polygons = Options('style', facecolor=Cycle(), edgecolor='black', - cmap='viridis') + cmap=dflt_cmap) +options.Rectangles = Options('style', cmap=dflt_cmap) +options.Segments = Options('style', cmap=dflt_cmap) options.Box = Options('style', color='black') options.Bounds = Options('style', color='black') options.Ellipse = Options('style', color='black') @@ -288,7 +293,7 @@ def grid_selector(grid): options.Graph = Options('style', node_edgecolors='black', node_facecolors=Cycle(), edge_color='black', node_size=15) options.TriMesh = Options('style', node_edgecolors='black', node_facecolors='white', - edge_color='black', node_size=5, edge_linewidth=1) + edge_color='black', node_size=5, edge_linewidth=1, cmap=dflt_cmap) options.Chord = Options('style', node_edgecolors='black', node_facecolors=Cycle(), edge_color='black', node_size=10, edge_linewidth=0.5) options.Chord = Options('plot', xaxis=None, yaxis=None) diff --git a/holoviews/plotting/mpl/annotation.py b/holoviews/plotting/mpl/annotation.py index 3dc76d497b..7e6abc663c 100644 --- a/holoviews/plotting/mpl/annotation.py +++ b/holoviews/plotting/mpl/annotation.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np import matplotlib @@ -7,7 +5,7 @@ from matplotlib import patches as patches from matplotlib.lines import Line2D -from ...core.util import match_spec, basestring +from ...core.util import match_spec from ...core.options import abbreviated_exception from .element import ElementPlot, ColorbarPlot from .plot import mpl_rc_context @@ -24,7 +22,7 @@ def __init__(self, slope, intercept, *args, **kwargs): ax = kwargs['axes'] # init the line, add it to the axes - super(ABLine2D, self).__init__([], [], *args, **kwargs) + super().__init__([], [], *args, **kwargs) self._slope = slope self._intercept = intercept ax.add_line(self) @@ -55,7 +53,7 @@ class AnnotationPlot(ElementPlot): def __init__(self, annotation, **params): self._annotation = annotation - super(AnnotationPlot, self).__init__(annotation, **params) + super().__init__(annotation, **params) self.handles['annotations'] = [] @mpl_rc_context @@ -167,10 +165,6 @@ def draw_annotation(self, axis, data, opts): class LabelsPlot(ColorbarPlot): - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the color will the drawn""") - xoffset = param.Number(default=None, doc=""" Amount of offset to apply to labels along x-axis.""") @@ -197,14 +191,7 @@ def get_data(self, element, ranges, style): if self.yoffset is not None: ys += self.yoffset - cs = None - cdim = element.get_dimension(self.color_index) - if cdim: - self._norm_kwargs(element, ranges, style, cdim) - cs = element.dimension_values(cdim) - if 'c' in style: - cs = style.pop('c') - + cs = style.pop('c', None) if 'size' in style: style['fontsize'] = style.pop('size') if 'horizontalalignment' not in style: style['horizontalalignment'] = 'center' if 'verticalalignment' not in style: style['verticalalignment'] = 'center' @@ -262,7 +249,7 @@ def draw_annotation(self, axis, data, opts): elif direction in ['>', '<']: xytext = (points if direction=='<' else -points, 0) if 'fontsize' in textopts: - self.param.warning('Arrow textsize style option is deprecated, ' + self.param.warning('Arrow fontsize style option is deprecated, ' 'use textsize option instead.') if 'textsize' in textopts: textopts['fontsize'] = textopts.pop('textsize') diff --git a/holoviews/plotting/mpl/chart.py b/holoviews/plotting/mpl/chart.py index 813da303a1..813c978456 100644 --- a/holoviews/plotting/mpl/chart.py +++ b/holoviews/plotting/mpl/chart.py @@ -1,25 +1,21 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np -import matplotlib as mpl from matplotlib import cm from matplotlib.collections import LineCollection from matplotlib.dates import DateFormatter, date2num -from ...core.dimension import Dimension, dimension_name +from ...core.dimension import Dimension from ...core.options import Store, abbreviated_exception from ...core.util import ( - match_spec, basestring, isfinite, dt_to_int, dt64_to_dt, search_indices, - unique_array, isscalar, isdatetime + match_spec, isfinite, dt_to_int, dt64_to_dt, isscalar, isdatetime ) from ...element import Raster, HeatMap from ...operation import interpolate_curve from ...util.transform import dim from ..plot import PlotSelector from ..mixins import AreaMixin, BarsMixin, SpikesMixin -from ..util import compute_sizes, get_sideplot_ranges, get_min_distance +from ..util import get_sideplot_ranges, get_min_distance from .element import ElementPlot, ColorbarPlot, LegendPlot from .path import PathPlot from .plot import AdjoinedPlot, mpl_rc_context @@ -133,15 +129,17 @@ def init_artists(self, ax, plot_data, plot_kwargs): _, (bottoms, tops), verts = handles return {'bottoms': bottoms, 'tops': tops, 'verts': verts[0], 'artist': verts[0]} - def get_data(self, element, ranges, style): with abbreviated_exception(): style = self._apply_transforms(element, ranges, style) - color = style.get('color') + color = style.pop('color', None) if isinstance(color, np.ndarray): style['ecolor'] = color if 'edgecolor' in style: style['ecolor'] = style.pop('edgecolor') + if 'linewidth' in style: + # Raise ValueError if a numpy array, so needs to be a list. + style["elinewidth"] = np.asarray(style.pop('linewidth')).tolist() c = style.get('c') if isinstance(c, np.ndarray): with abbreviated_exception(): @@ -193,8 +191,8 @@ def update_handles(self, key, axis, element, ranges, style): tops.set_ydata(tys) if 'ecolor' in style: verts.set_edgecolors(style['ecolor']) - if 'linewidth' in style: - verts.set_linewidths(style['linewidth']) + if 'elinewidth' in style: + verts.set_linewidths(style['elinewidth']) return axis_kwargs @@ -254,7 +252,6 @@ class SideAreaPlot(AdjoinedPlot, AreaPlot): 'right', 'bare' 'left-bare' and 'right-bare'.""") - class SpreadPlot(AreaPlot): """ SpreadPlot plots the Spread Element type. @@ -266,7 +263,7 @@ class SpreadPlot(AreaPlot): Whether to show legend for the plot.""") def __init__(self, element, **params): - super(SpreadPlot, self).__init__(element, **params) + super().__init__(element, **params) def get_data(self, element, ranges, style): with abbreviated_exception(): @@ -282,7 +279,6 @@ def get_extents(self, element, ranges, range_type='combined'): return ChartPlot.get_extents(self, element, ranges, range_type) - class HistogramPlot(ColorbarPlot): """ HistogramPlot can plot DataHistograms and ViewMaps of @@ -300,7 +296,7 @@ def __init__(self, histograms, **params): self.center = False self.cyclic = False - super(HistogramPlot, self).__init__(histograms, **params) + super().__init__(histograms, **params) if self.invert_axes: self.axis_settings = ['ylabel', 'xlabel', 'yticks'] @@ -348,7 +344,7 @@ def initialize_plot(self, ranges=None): # Plot bars and make any adjustments legend = hist.label if self.show_legend else '' bars = self.plotfn(edges, hvals, widths, zorder=self.zorder, label=legend, align='edge', **style) - self.handles['artist'] = self._update_plot(self.keys[-1], hist, bars, lims, ranges) # Indexing top + self.handles['artist'] = self._update_plot(self.keys[-1], hist, bars, lims, ranges) ticks = self._compute_ticks(hist, edges, widths, lims) ax_settings = self._process_axsettings(hist, lims, ticks) @@ -376,7 +372,6 @@ def _process_hist(self, hist): widths = np.diff(edges) return edges[:-1], hist_vals, widths, xlim+ylim, is_datetime - def _compute_ticks(self, element, edges, widths, lims): """ Compute the ticks either as cyclic values in degrees or as roughly @@ -396,15 +391,13 @@ def _compute_ticks(self, element, edges, widths, lims): labels = [dim.pprint_value(v) for v in xvals] return [xvals, labels] - def get_extents(self, element, ranges, range_type='combined'): ydim = element.get_dimension(1) s0, s1 = ranges[ydim.name]['soft'] s0 = min(s0, 0) if isfinite(s0) else 0 s1 = max(s1, 0) if isfinite(s1) else 0 ranges[ydim.name]['soft'] = (s0, s1) - return super(HistogramPlot, self).get_extents(element, ranges, range_type) - + return super().get_extents(element, ranges, range_type) def _process_axsettings(self, hist, lims, ticks): """ @@ -414,7 +407,6 @@ def _process_axsettings(self, hist, lims, ticks): axis_settings = dict(zip(self.axis_settings, [None, None, (None if self.overlaid else ticks)])) return axis_settings - def _update_plot(self, key, hist, bars, lims, ranges): """ Process bars can be subclassed to manually adjust bars @@ -422,7 +414,6 @@ def _update_plot(self, key, hist, bars, lims, ranges): """ return bars - def _update_artists(self, key, hist, edges, hvals, widths, lims, ranges): """ Update all the artists in the histogram. Subclassable to @@ -450,7 +441,6 @@ def update_handles(self, key, axis, element, ranges, style): return ax_settings - class SideHistogramPlot(AdjoinedPlot, HistogramPlot): bgcolor = param.Parameter(default=(1, 1, 1, 0), doc=""" @@ -466,19 +456,17 @@ def _process_hist(self, hist): """ Subclassed to offset histogram by defined amount. """ - edges, hvals, widths, lims, isdatetime = super(SideHistogramPlot, self)._process_hist(hist) + edges, hvals, widths, lims, isdatetime = super()._process_hist(hist) offset = self.offset * lims[3] - hvals *= 1-self.offset + hvals = hvals * (1-self.offset) hvals += offset lims = lims[0:3] + (lims[3] + offset,) return edges, hvals, widths, lims, isdatetime - def _update_artists(self, n, element, edges, hvals, widths, lims, ranges): - super(SideHistogramPlot, self)._update_artists(n, element, edges, hvals, widths, lims, ranges) + super()._update_artists(n, element, edges, hvals, widths, lims, ranges) self._update_plot(n, element, self.handles['artist'], lims, ranges) - def _update_plot(self, key, element, bars, lims, ranges): """ Process the bars and draw the offset line as necessary. If a @@ -495,20 +483,9 @@ def _update_plot(self, key, element, bars, lims, ranges): plot_type = Store.registry['matplotlib'].get(type(range_item)) if isinstance(plot_type, PlotSelector): plot_type = plot_type.get_plot_class(range_item) - opts = self.lookup_options(range_item, 'plot') - if plot_type and issubclass(plot_type, ColorbarPlot): - cidx = opts.options.get('color_index', None) - if cidx is None: - opts = self.lookup_options(range_item, 'style') - cidx = opts.kwargs.get('color', None) - if cidx not in range_item: - cidx = None - cdim = None if cidx is None else range_item.get_dimension(cidx) - else: - cdim = None # Get colormapping options - if isinstance(range_item, (HeatMap, Raster)) or cdim: + if isinstance(range_item, (HeatMap, Raster)): style = self.lookup_options(range_item, 'style')[self.cyclic_index] cmap = cm.get_cmap(style.get('cmap')) main_range = style.get('clims', main_range) @@ -522,11 +499,10 @@ def _update_plot(self, key, element, bars, lims, ranges): elif offset: self._update_separator(offset) - if cmap is not None: + if cmap is not None and main_range and (None not in main_range): self._colorize_bars(cmap, bars, element, main_range, dim) return bars - def _colorize_bars(self, cmap, bars, element, main_range, dim): """ Use the given cmap to color the bars, applying the correct @@ -540,7 +516,6 @@ def _colorize_bars(self, cmap, bars, element, main_range, dim): bar.set_facecolor(cmap(c)) bar.set_clip_on(False) - def _update_separator(self, offset): """ Compute colorbar offset and update separator line @@ -566,30 +541,6 @@ class PointPlot(ChartPlot, ColorbarPlot): show_grid = param.Boolean(default=False, doc=""" Whether to draw grid lines at the tick positions.""") - # Deprecated parameters - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `color=dim('color')`""") - - size_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of size style mapping, e.g. `size=dim('size')`""") - - scaling_method = param.ObjectSelector(default="area", - objects=["width", "area"], - doc=""" - Deprecated in favor of size style mapping, e.g. - size=dim('size')**2.""") - - scaling_factor = param.Number(default=1, bounds=(0, None), doc=""" - Scaling factor which is applied to either the width or area - of each point, depending on the value of `scaling_method`.""") - - size_fn = param.Callable(default=np.abs, doc=""" - Function applied to size values before applying scaling, - to remove values lower than zero.""") - style_opts = ['alpha', 'color', 'edgecolors', 'facecolors', 'linewidth', 'marker', 'size', 'visible', 'cmap', 'vmin', 'vmax', 'norm'] @@ -602,55 +553,11 @@ class PointPlot(ChartPlot, ColorbarPlot): def get_data(self, element, ranges, style): xs, ys = (element.dimension_values(i) for i in range(2)) - self._compute_styles(element, ranges, style) + style['edgecolors'] = style.pop('edgecolors', style.pop('edgecolor', 'none')) with abbreviated_exception(): style = self._apply_transforms(element, ranges, style) return (ys, xs) if self.invert_axes else (xs, ys), style, {} - - def _compute_styles(self, element, ranges, style): - cdim = element.get_dimension(self.color_index) - color = style.pop('color', None) - cmap = style.get('cmap', None) - - if cdim and ((isinstance(color, basestring) and color in element) or isinstance(color, dim)): - self.param.warning( - "Cannot declare style mapping for 'color' option and " - "declare a color_index; ignoring the color_index.") - cdim = None - if cdim and cmap: - cs = element.dimension_values(self.color_index) - # Check if numeric otherwise treat as categorical - if cs.dtype.kind in 'uif': - style['c'] = cs - else: - style['c'] = search_indices(cs, unique_array(cs)) - self._norm_kwargs(element, ranges, style, cdim) - elif color is not None: - style['color'] = color - style['edgecolors'] = style.pop('edgecolors', style.pop('edgecolor', 'none')) - - ms = style.get('s', mpl.rcParams['lines.markersize']) - sdim = element.get_dimension(self.size_index) - if sdim and ((isinstance(ms, basestring) and ms in element) or isinstance(ms, dim)): - self.param.warning( - "Cannot declare style mapping for 's' option and " - "declare a size_index; ignoring the size_index.") - sdim = None - if sdim: - sizes = element.dimension_values(self.size_index) - sizes = compute_sizes(sizes, self.size_fn, self.scaling_factor, - self.scaling_method, ms) - if sizes is None: - eltype = type(element).__name__ - self.param.warning( - '%s dimension is not numeric, cannot use to ' - 'scale %s size.' % (sdim.pprint_label, eltype)) - else: - style['s'] = sizes - style['edgecolors'] = style.pop('edgecolors', 'none') - - def update_handles(self, key, axis, element, ranges, style): paths = self.handles['artist'] (xs, ys), style, _ = self.get_data(element, ranges, style) @@ -704,7 +611,7 @@ class VectorFieldPlot(ColorbarPlot): they may be customized with the 'headlength' and 'headaxislength' style options.""") - magnitude = param.ClassSelector(class_=(basestring, dim), doc=""" + magnitude = param.ClassSelector(class_=(str, dim), doc=""" Dimension or dimension value transform that declares the magnitude of each vector. Magnitude is expected to be scaled between 0-1, by default the magnitudes are rescaled relative to the minimum @@ -717,25 +624,6 @@ class VectorFieldPlot(ColorbarPlot): Whether the lengths will be rescaled to take into account the smallest non-zero distance between two vectors.""") - # Deprecated parameters - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of dimension value transform on color option, - e.g. `color=dim('Magnitude')`. - """) - - size_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of the magnitude option, e.g. - `magnitude=dim('Magnitude')`. - """) - - normalize_lengths = param.Boolean(default=True, doc=""" - Deprecated in favor of rescaling length using dimension value - transforms using the magnitude option, e.g. - `dim('Magnitude').norm()`.""") - style_opts = ['alpha', 'color', 'edgecolors', 'facecolors', 'linewidth', 'marker', 'visible', 'cmap', 'scale', 'headlength', 'headaxislength', 'pivot', @@ -748,24 +636,14 @@ class VectorFieldPlot(ColorbarPlot): _plot_methods = dict(single='quiver') def _get_magnitudes(self, element, style, ranges): - size_dim = element.get_dimension(self.size_index) mag_dim = self.magnitude - if size_dim and mag_dim: - self.param.warning( - "Cannot declare style mapping for 'magnitude' option " - "and declare a size_index; ignoring the size_index.") - elif size_dim: - mag_dim = size_dim - elif isinstance(mag_dim, basestring): + if isinstance(mag_dim, str): mag_dim = element.get_dimension(mag_dim) if mag_dim is not None: if isinstance(mag_dim, dim): magnitudes = mag_dim.apply(element, flat=True) else: magnitudes = element.dimension_values(mag_dim) - _, max_magnitude = ranges[dimension_name(mag_dim)]['combined'] - if self.normalize_lengths and max_magnitude != 0: - magnitudes = magnitudes / max_magnitude else: magnitudes = np.ones(len(element)) return magnitudes @@ -788,21 +666,6 @@ def get_data(self, element, ranges, style): args = (xs, ys, magnitudes, [0.0] * len(element)) - # Compute color - cdim = element.get_dimension(self.color_index) - color = style.get('color', None) - if cdim and ((isinstance(color, basestring) and color in element) or isinstance(color, dim)): - self.param.warning( - "Cannot declare style mapping for 'color' option and " - "declare a color_index; ignoring the color_index.") - cdim = None - if cdim: - colors = element.dimension_values(self.color_index) - style['c'] = colors - cdim = element.get_dimension(self.color_index) - self._norm_kwargs(element, ranges, style, cdim) - style.pop('color', None) - # Process style with abbreviated_exception(): style = self._apply_transforms(element, ranges, style) @@ -907,7 +770,6 @@ def initialize_plot(self, ranges=None): return self._finalize_axis(key, ranges=ranges, element=element, dimensions=[xdims, vdim], **kwargs) - def _finalize_ticks(self, axis, element, xticks, yticks, zticks): """ Apply ticks with appropriate offsets. @@ -921,7 +783,7 @@ def _finalize_ticks(self, axis, element, xticks, yticks, zticks): xticks = ticks elif yticks: yticks = ticks - super(BarPlot, self)._finalize_ticks(axis, element, xticks, yticks, zticks) + super()._finalize_ticks(axis, element, xticks, yticks, zticks) if alignments: if xticks: for t, y in zip(axis.get_xticklabels(), alignments): @@ -930,7 +792,6 @@ def _finalize_ticks(self, axis, element, xticks, yticks, zticks): for t, x in zip(axis.get_yticklabels(), alignments): t.set_x(x) - def _create_bars(self, axis, element, ranges, style): # Get values dimensions, and style information (gdim, cdim, sdim), values = self._get_values(element, ranges) @@ -1033,10 +894,6 @@ class SpikesPlot(SpikesMixin, PathPlot, ColorbarPlot): explicit aspect ratio as width/height as well as 'square' and 'equal' options.""") - color_index = param.ClassSelector(default=None, allow_None=True, - class_=(basestring, int), doc=""" - Index of the dimension from which the color will the drawn""") - spike_length = param.Number(default=0.1, doc=""" The length of each spike if Spikes object is one dimensional.""") @@ -1052,6 +909,8 @@ def init_artists(self, ax, plot_args, plot_kwargs): plot_kwargs['array'] = plot_kwargs.pop('c') if 'vmin' in plot_kwargs and 'vmax' in plot_kwargs: plot_kwargs['clim'] = plot_kwargs.pop('vmin'), plot_kwargs.pop('vmax') + if not 'array' in plot_kwargs and 'cmap' in plot_kwargs: + del plot_kwargs['cmap'] line_segments = LineCollection(*plot_args, **plot_kwargs) ax.add_collection(line_segments) return {'artist': line_segments} @@ -1091,17 +950,6 @@ def get_data(self, element, ranges, style): cols.append(vs) clean_spikes.append(np.column_stack(cols)) - cdim = element.get_dimension(self.color_index) - color = style.get('color', None) - if cdim and ((isinstance(color, basestring) and color in element) or isinstance(color, dim)): - self.param.warning( - "Cannot declare style mapping for 'color' option and " - "declare a color_index; ignoring the color_index.") - cdim = None - if cdim: - style['array'] = element.dimension_values(cdim) - self._norm_kwargs(element, ranges, style, cdim) - if 'spike_length' in opts: axis_dims = (element.dimensions()[0], None) elif len(element.dimensions()) == 1: diff --git a/holoviews/plotting/mpl/chart3d.py b/holoviews/plotting/mpl/chart3d.py index 7213230158..fc770b4382 100644 --- a/holoviews/plotting/mpl/chart3d.py +++ b/holoviews/plotting/mpl/chart3d.py @@ -1,14 +1,9 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np import param -import matplotlib.cm as cm from mpl_toolkits.mplot3d.art3d import Line3DCollection from ...core import Dimension from ...core.options import abbreviated_exception -from ...core.util import basestring -from ..util import map_colors from .element import ColorbarPlot from .chart import PointPlot from .path import PathPlot @@ -84,8 +79,7 @@ def _finalize_axis(self, key, **kwargs): axis.set_axis_bgcolor(self.bgcolor) else: axis.set_facecolor(self.bgcolor) - return super(Plot3D, self)._finalize_axis(key, **kwargs) - + return super()._finalize_axis(key, **kwargs) def _draw_colorbar(self, element=None, dim=None, redraw=True): if element is None: @@ -96,10 +90,7 @@ def _draw_colorbar(self, element=None, dim=None, redraw=True): ax = self.handles['axis'] # Get colorbar label if dim is None: - if hasattr(self, 'color_index'): - dim = element.get_dimension(self.color_index) - else: - dim = element.get_dimension(2) + dim = element.get_dimension(2) elif not isinstance(dim, Dimension): dim = element.get_dimension(dim) label = dim.pprint_label @@ -116,19 +107,10 @@ class Scatter3DPlot(Plot3D, PointPlot): onto a particular Dimension of the data. """ - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the color will the drawn""") - - size_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the sizes will the drawn.""") - _plot_methods = dict(single='scatter') def get_data(self, element, ranges, style): xs, ys, zs = (element.dimension_values(i) for i in range(3)) - self._compute_styles(element, ranges, style) with abbreviated_exception(): style = self._apply_transforms(element, ranges, style) if style.get('edgecolors') == 'none': @@ -138,13 +120,6 @@ def get_data(self, element, ranges, style): def update_handles(self, key, axis, element, ranges, style): artist = self.handles['artist'] artist._offsets3d, style, _ = self.get_data(element, ranges, style) - cdim = element.get_dimension(self.color_index) - if cdim and 'cmap' in style: - clim = style['vmin'], style['vmax'] - cmap = cm.get_cmap(style['cmap']) - artist._facecolor3d = map_colors(style['c'], clim, cmap, hex=False) - if element.get_dimension(self.size_index): - artist.set_sizes(style['s']) class Path3DPlot(Plot3D, PathPlot): diff --git a/holoviews/plotting/mpl/element.py b/holoviews/plotting/mpl/element.py index 35fe949077..926df5577d 100644 --- a/holoviews/plotting/mpl/element.py +++ b/holoviews/plotting/mpl/element.py @@ -1,5 +1,4 @@ -from __future__ import absolute_import, division, unicode_literals - +import copy import math import warnings from types import FunctionType @@ -22,7 +21,7 @@ from ..plot import GenericElementPlot, GenericOverlayPlot from ..util import process_cmap, color_intervals, dim_range_key from .plot import MPLPlot, mpl_rc_context -from .util import mpl_version, validate, wrap_formatter +from .util import EqHistNormalize, mpl_version, validate, wrap_formatter class ElementPlot(GenericElementPlot, MPLPlot): @@ -55,15 +54,15 @@ class ElementPlot(GenericElementPlot, MPLPlot): Whether to apply log scaling to the y-axis of the Chart.""") xformatter = param.ClassSelector( - default=None, class_=(util.basestring, ticker.Formatter, FunctionType), doc=""" + default=None, class_=(str, ticker.Formatter, FunctionType), doc=""" Formatter for ticks along the x-axis.""") yformatter = param.ClassSelector( - default=None, class_=(util.basestring, ticker.Formatter, FunctionType), doc=""" + default=None, class_=(str, ticker.Formatter, FunctionType), doc=""" Formatter for ticks along the y-axis.""") zformatter = param.ClassSelector( - default=None, class_=(util.basestring, ticker.Formatter, FunctionType), doc=""" + default=None, class_=(str, ticker.Formatter, FunctionType), doc=""" Formatter for ticks along the z-axis.""") zaxis = param.Boolean(default=True, doc=""" @@ -92,7 +91,7 @@ class ElementPlot(GenericElementPlot, MPLPlot): _has_axes = True def __init__(self, element, **params): - super(ElementPlot, self).__init__(element, **params) + super().__init__(element, **params) check = self.hmap.last if isinstance(check, CompositeOverlay): check = check.values()[0] # Should check if any are 3D plots @@ -106,7 +105,6 @@ def __init__(self, element, **params): self.param.warning("Plotting hook %r could not be " "applied:\n\n %s" % (hook, e)) - def _finalize_axis(self, key, element=None, title=None, dimensions=None, ranges=None, xticks=None, yticks=None, zticks=None, xlabel=None, ylabel=None, zlabel=None): """ @@ -192,8 +190,7 @@ def _finalize_axis(self, key, element=None, title=None, dimensions=None, ranges= self._finalize_artist(element) self._execute_hooks(element) - return super(ElementPlot, self)._finalize_axis(key) - + return super()._finalize_axis(key) def _finalize_ticks(self, axis, dimensions, xticks, yticks, zticks): """ @@ -239,7 +236,6 @@ def _finalize_ticks(self, axis, dimensions, xticks, yticks, zticks): tick_fontsize = self._fontsize('%sticks' % ax,'labelsize',common=False) if tick_fontsize: ax_obj.set_tick_params(**tick_fontsize) - def _finalize_artist(self, element): """ Allows extending the _finalize_axis method with Element @@ -247,7 +243,6 @@ def _finalize_artist(self, element): """ pass - def _set_labels(self, axes, dimensions, xlabel=None, ylabel=None, zlabel=None): """ Sets the labels of the axes using the supplied list of dimensions. @@ -300,7 +295,7 @@ def _set_aspect(self, axes, aspect): if self.projection == '3d': return - if ((isinstance(aspect, util.basestring) and aspect != 'square') or + if ((isinstance(aspect, str) and aspect != 'square') or self.data_aspect): data_ratio = self.data_aspect or aspect else: @@ -417,12 +412,14 @@ def _set_axis_ticks(self, axis, ticks, log=False, rotation=0): if an integer number of ticks is supplied and setting a rotation for the ticks. """ + if isinstance(ticks, np.ndarray): + ticks = list(ticks) if isinstance(ticks, (list, tuple)) and all(isinstance(l, list) for l in ticks): axis.set_ticks(ticks[0]) axis.set_ticklabels(ticks[1]) elif isinstance(ticks, ticker.Locator): axis.set_major_locator(ticks) - elif not ticks and ticks is not None: + elif ticks is not None and not ticks: axis.set_ticks([]) elif isinstance(ticks, int): if log: @@ -453,6 +450,7 @@ def update_frame(self, key, ranges=None, element=None): using the last available frame. """ reused = isinstance(self.hmap, DynamicMap) and self.overlaid + self.prev_frame = self.current_frame if not reused and element is None: element = self._get_frame(key) elif element is not None: @@ -528,6 +526,9 @@ def init_artists(self, ax, plot_args, plot_kwargs): """ plot_method = self._plot_methods.get('batched' if self.batched else 'single') plot_fn = getattr(ax, plot_method) + if 'norm' in plot_kwargs: # vmin/vmax should now be exclusively in norm + plot_kwargs.pop('vmin', None) + plot_kwargs.pop('vmax', None) artist = plot_fn(*plot_args, **plot_kwargs) return {'artist': artist[0] if isinstance(artist, list) and len(artist) == 1 else artist} @@ -549,7 +550,7 @@ def update_handles(self, key, axis, element, ranges, style): def _apply_transforms(self, element, ranges, style): new_style = dict(style) for k, v in style.items(): - if isinstance(v, util.basestring): + if isinstance(v, str): if validate(k, v) == True: continue elif v in element or (isinstance(element, Graph) and v in element.nodes): @@ -666,11 +667,17 @@ class ColorbarPlot(ElementPlot): over the title key in colorbar_opts.""") clim = param.NumericTuple(default=(np.nan, np.nan), length=2, doc=""" - User-specified colorbar axis range limits for the plot, as a tuple (low,high). - If specified, takes precedence over data and dimension ranges.""") + User-specified colorbar axis range limits for the plot, as a + tuple (low,high). If specified, takes precedence over data + and dimension ranges.""") + + clim_percentile = param.ClassSelector(default=False, class_=(int, float, bool), doc=""" + Percentile value to compute colorscale robust to outliers. If + True, uses 2nd and 98th percentile; otherwise uses the specified + numerical percentile value.""") cformatter = param.ClassSelector( - default=None, class_=(util.basestring, ticker.Formatter, FunctionType), doc=""" + default=None, class_=(str, ticker.Formatter, FunctionType), doc=""" Formatter for ticks along the colorbar axis.""") colorbar = param.Boolean(default=False, doc=""" @@ -683,6 +690,9 @@ class ColorbarPlot(ElementPlot): Number of discrete colors to use when colormapping or a set of color intervals defining the range of values to map each color to.""") + cnorm = param.ObjectSelector(default='linear', objects=['linear', 'log', 'eq_hist'], doc=""" + Color normalization to be applied during colormapping.""") + clipping_colors = param.Dict(default={}, doc=""" Dictionary to specify colors for clipped values, allows setting color for NaN values and for values above and below @@ -717,7 +727,7 @@ class ColorbarPlot(ElementPlot): _default_nan = '#8b8b8b' def __init__(self, *args, **kwargs): - super(ColorbarPlot, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _adjust_cbar(self, cbar, label, dim): noalpha = math.floor(self.style[self.cyclic_index].get('alpha', 1)) == 1 @@ -850,7 +860,10 @@ def _norm_kwargs(self, element, ranges, opts, vdim, values=None, prefix=''): categorical = False elif values.dtype.kind in 'uif': if dim_name in ranges: - clim = ranges[dim_name]['combined'] + if self.clim_percentile and 'robust' in ranges[dim_name]: + clim = ranges[dim_name]['robust'] + else: + clim = ranges[dim_name]['combined'] elif isinstance(vdim, dim): if values.dtype.kind == 'M': clim = values.min(), values.max() @@ -885,7 +898,10 @@ def _norm_kwargs(self, element, ranges, opts, vdim, values=None, prefix=''): else: categorical = values.dtype.kind not in 'uif' - if self.logz: + if self.cnorm == 'eq_hist': + opts[prefix+'norm'] = EqHistNormalize( + vmin=clim[0], vmax=clim[1]) + if self.cnorm == 'log' or self.logz: if self.symmetric: norm = mpl_colors.SymLogNorm(vmin=clim[0], vmax=clim[1], linthresh=clim[1]/np.e) @@ -934,7 +950,7 @@ def _norm_kwargs(self, element, ranges, opts, vdim, values=None, prefix=''): elif isinstance(val, tuple): colors[k] = {'color': val[:3], 'alpha': val[3] if len(val) > 3 else 1} - elif isinstance(val, util.basestring): + elif isinstance(val, str): color = val alpha = 1 if color.startswith('#') and len(color) == 9: @@ -958,13 +974,14 @@ def _norm_kwargs(self, element, ranges, opts, vdim, values=None, prefix=''): if isinstance(self.color_levels, list): palette, (vmin, vmax) = color_intervals(palette, self.color_levels, clip=(vmin, vmax)) cmap = mpl_colors.ListedColormap(palette) + + cmap = copy.copy(cmap) if 'max' in colors: cmap.set_over(**colors['max']) if 'min' in colors: cmap.set_under(**colors['min']) if 'NaN' in colors: cmap.set_bad(**colors['NaN']) opts[prefix+'cmap'] = cmap - class LegendPlot(ElementPlot): show_legend = param.Boolean(default=True, doc=""" @@ -1005,7 +1022,6 @@ class LegendPlot(ElementPlot): 'bottom_right': dict(loc=4)} - class OverlayPlot(LegendPlot, GenericOverlayPlot): """ OverlayPlot supports compositors processing of Overlays across maps. @@ -1026,8 +1042,7 @@ class OverlayPlot(LegendPlot, GenericOverlayPlot): def __init__(self, overlay, ranges=None, **params): if 'projection' not in params: params['projection'] = self._get_projection(overlay) - super(OverlayPlot, self).__init__(overlay, ranges=ranges, **params) - + super().__init__(overlay, ranges=ranges, **params) def _finalize_artist(self, element): for subplot in self.subplots.values(): @@ -1088,7 +1103,6 @@ def _adjust_legend(self, overlay, axis): self.handles['bbox_extra_artists'].append(leg) self.handles['legend_data'] = data - @mpl_rc_context def initialize_plot(self, ranges=None): axis = self.handles['axis'] @@ -1108,11 +1122,11 @@ def initialize_plot(self, ranges=None): return self._finalize_axis(key, element=element, ranges=ranges, title=self._format_title(key)) - @mpl_rc_context def update_frame(self, key, ranges=None, element=None): axis = self.handles['axis'] reused = isinstance(self.hmap, DynamicMap) and self.overlaid + self.prev_frame = self.current_frame if element is None and not reused: element = self._get_frame(key) elif element is not None: diff --git a/holoviews/plotting/mpl/geometry.py b/holoviews/plotting/mpl/geometry.py index 83e02c94d5..ef5be05b30 100644 --- a/holoviews/plotting/mpl/geometry.py +++ b/holoviews/plotting/mpl/geometry.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np from matplotlib.collections import LineCollection, PatchCollection @@ -27,6 +25,8 @@ def init_artists(self, ax, plot_args, plot_kwargs): plot_kwargs['array'] = plot_kwargs.pop('c') if 'vmin' in plot_kwargs and 'vmax' in plot_kwargs: plot_kwargs['clim'] = plot_kwargs.pop('vmin'), plot_kwargs.pop('vmax') + if not 'array' in plot_kwargs and 'cmap' in plot_kwargs: + del plot_kwargs['cmap'] line_segments = LineCollection(*plot_args, **plot_kwargs) ax.add_collection(line_segments) return {'artist': line_segments} @@ -57,6 +57,8 @@ def init_artists(self, ax, plot_args, plot_kwargs): plot_kwargs['array'] = plot_kwargs.pop('c') if 'vmin' in plot_kwargs and 'vmax' in plot_kwargs: plot_kwargs['clim'] = plot_kwargs.pop('vmin'), plot_kwargs.pop('vmax') + if not 'array' in plot_kwargs and 'cmap' in plot_kwargs: + del plot_kwargs['cmap'] line_segments = PatchCollection(*plot_args, **plot_kwargs) ax.add_collection(line_segments) return {'artist': line_segments} diff --git a/holoviews/plotting/mpl/graphs.py b/holoviews/plotting/mpl/graphs.py index fe42846f1d..5d3e6abcc5 100644 --- a/holoviews/plotting/mpl/graphs.py +++ b/holoviews/plotting/mpl/graphs.py @@ -1,16 +1,14 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np from matplotlib.collections import LineCollection, PolyCollection from ...core.data import Dataset -from ...core.options import Cycle, abbreviated_exception -from ...core.util import basestring, unique_array, search_indices, is_number, isscalar +from ...core.options import abbreviated_exception +from ...core.util import is_number, isscalar from ...util.transform import dim from ..mixins import ChordMixin -from ..util import process_cmap, get_directed_graph_paths +from ..util import get_directed_graph_paths from .element import ColorbarPlot from .util import filter_styles @@ -25,17 +23,6 @@ class GraphPlot(ColorbarPlot): Whether to draw arrows on the graph edges to indicate the directionality of each edge.""") - # Deprecated options - - color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `node_color=dim('color')`""") - - - edge_color_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Deprecated in favor of color style mapping, e.g. `edge_color=dim('color')`""") - style_opts = ['edge_alpha', 'edge_color', 'edge_linestyle', 'edge_linewidth', 'node_alpha', 'node_color', 'node_edgecolors', 'node_facecolors', 'node_linewidth', 'node_marker', 'node_size', 'visible', 'cmap', @@ -53,70 +40,17 @@ class GraphPlot(ColorbarPlot): def _compute_styles(self, element, ranges, style): elstyle = self.lookup_options(element, 'style') color = elstyle.kwargs.get('node_color') - cdim = element.nodes.get_dimension(self.color_index) - cmap = elstyle.kwargs.get('cmap', 'tab20') - if cdim: - cs = element.nodes.dimension_values(self.color_index) - # Check if numeric otherwise treat as categorical - if cs.dtype.kind == 'f': - style['node_c'] = cs - else: - factors = unique_array(cs) - cmap = color if isinstance(color, Cycle) else cmap - if isinstance(cmap, dict): - colors = [cmap.get(f, cmap.get('NaN', {'color': self._default_nan})['color']) - for f in factors] - else: - colors = process_cmap(cmap, len(factors)) - cs = search_indices(cs, factors) - style['node_facecolors'] = [colors[v%len(colors)] for v in cs] - style.pop('node_color', None) - if 'node_c' in style: - self._norm_kwargs(element.nodes, ranges, style, cdim) - elif color and 'node_color' in style: + if color and 'node_color' in style: style['node_facecolors'] = style.pop('node_color') style['node_edgecolors'] = style.pop('node_edgecolors', 'none') if is_number(style.get('node_size')): style['node_s'] = style.pop('node_size')**2 - edge_cdim = element.get_dimension(self.edge_color_index) - if not edge_cdim: - if not isscalar(style.get('edge_color')): - opt = 'edge_facecolors' if self.filled else 'edge_edgecolors' - style[opt] = style.pop('edge_color') - return style - - elstyle = self.lookup_options(element, 'style') - cycle = elstyle.kwargs.get('edge_color') - idx = element.get_dimension_index(edge_cdim) - cvals = element.dimension_values(edge_cdim) - if idx in [0, 1]: - factors = element.nodes.dimension_values(2, expanded=False) - elif idx == 2 and cvals.dtype.kind in 'uif': - factors = None - else: - factors = unique_array(cvals) - if factors is None or (factors.dtype.kind == 'f' and idx not in [0, 1]): - style['edge_c'] = cvals - else: - cvals = search_indices(cvals, factors) - factors = list(factors) - cmap = elstyle.kwargs.get('edge_cmap', 'tab20') - cmap = cycle if isinstance(cycle, Cycle) else cmap - if isinstance(cmap, dict): - colors = [cmap.get(f, cmap.get('NaN', {'color': self._default_nan})['color']) - for f in factors] - else: - colors = process_cmap(cmap, len(factors)) - style['edge_colors'] = [colors[v%len(colors)] for v in cvals] - style.pop('edge_color', None) - if 'edge_c' in style: - self._norm_kwargs(element, ranges, style, edge_cdim, prefix='edge_') - else: - style.pop('edge_cmap', None) + if not isscalar(style.get('edge_color')): + opt = 'edge_facecolors' if self.filled else 'edge_edgecolors' + style[opt] = style.pop('edge_color') return style - def get_data(self, element, ranges, style): with abbreviated_exception(): style = self._apply_transforms(element, ranges, style) @@ -144,11 +78,9 @@ def get_data(self, element, ranges, style): paths = [p[:, ::-1] for p in paths] return {'nodes': (pxs, pys), 'edges': paths}, style, {'dimensions': dims} - def get_extents(self, element, ranges, range_type='combined'): return super(GraphPlot, self).get_extents(element.nodes, ranges, range_type) - def init_artists(self, ax, plot_args, plot_kwargs): # Draw edges color_opts = ['c', 'cmap', 'vmin', 'vmax', 'norm'] @@ -177,7 +109,6 @@ def init_artists(self, ax, plot_args, plot_kwargs): return {'nodes': nodes, 'edges': edges} - def _update_nodes(self, element, data, style): nodes = self.handles['nodes'] xs, ys = data['nodes'] @@ -201,7 +132,6 @@ def _update_nodes(self, element, data, style): else: nodes.set_sizes(sizes) - def _update_edges(self, element, data, style): edges = self.handles['edges'] paths = data['edges'] @@ -225,7 +155,6 @@ def _update_edges(self, element, data, style): if 'edge_linewidth' in style: edges.set_linewidths(style['edge_linewidth']) - def update_handles(self, key, axis, element, ranges, style): data, style, axis_kwargs = self.get_data(element, ranges, style) self._update_nodes(element, data, style) @@ -233,7 +162,6 @@ def update_handles(self, key, axis, element, ranges, style): return axis_kwargs - class TriMeshPlot(GraphPlot): filled = param.Boolean(default=False, doc=""" @@ -243,38 +171,29 @@ class TriMeshPlot(GraphPlot): def get_data(self, element, ranges, style): edge_color = style.get('edge_color') - if edge_color not in element.nodes: - edge_color = self.edge_color_index simplex_dim = element.get_dimension(edge_color) vertex_dim = element.nodes.get_dimension(edge_color) - if not isinstance(self.edge_color_index, int) and vertex_dim and not simplex_dim: + if vertex_dim and not simplex_dim: simplices = element.array([0, 1, 2]) z = element.nodes.dimension_values(vertex_dim) z = z[simplices].mean(axis=1) element = element.add_dimension(vertex_dim, len(element.vdims), z, vdim=True) # Ensure the edgepaths for the triangles are generated before plotting element.edgepaths - return super(TriMeshPlot, self).get_data(element, ranges, style) - + return super().get_data(element, ranges, style) class ChordPlot(ChordMixin, GraphPlot): - labels = param.ClassSelector(class_=(basestring, dim), doc=""" + labels = param.ClassSelector(class_=(str, dim), doc=""" The dimension or dimension value transform used to draw labels from.""") - # Deprecated options - - label_index = param.ClassSelector(default=None, class_=(basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the node labels will be drawn""") - style_opts = GraphPlot.style_opts + ['text_font_size', 'label_offset'] _style_groups = ['edge', 'node', 'arc'] def get_data(self, element, ranges, style): - data, style, plot_kwargs = super(ChordPlot, self).get_data(element, ranges, style) + data, style, plot_kwargs = super().get_data(element, ranges, style) angles = element._angles paths = [] for i in range(len(element.nodes)): @@ -290,15 +209,8 @@ def get_data(self, element, ranges, style): style['arc_colors'] = style['node_facecolors'] style['arc_linewidth'] = 10 - label_dim = element.nodes.get_dimension(self.label_index) labels = self.labels - if label_dim and labels: - self.param.warning( - "Cannot declare style mapping for 'labels' option " - "and declare a label_index; ignoring the label_index.") - elif label_dim: - labels = label_dim - if isinstance(labels, basestring): + if isinstance(labels, str): labels = element.nodes.get_dimension(labels) if labels is None: @@ -334,7 +246,7 @@ def init_artists(self, ax, plot_args, plot_kwargs): ax.add_collection(edges) artists['arcs'] = edges - artists.update(super(ChordPlot, self).init_artists(ax, plot_args, plot_kwargs)) + artists.update(super().init_artists(ax, plot_args, plot_kwargs)) if 'text' in plot_args: fontsize = plot_kwargs.get('text_font_size', 8) labels = [] diff --git a/holoviews/plotting/mpl/hex_tiles.py b/holoviews/plotting/mpl/hex_tiles.py index 9f5752cb4a..73ca18ba34 100644 --- a/holoviews/plotting/mpl/hex_tiles.py +++ b/holoviews/plotting/mpl/hex_tiles.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np diff --git a/holoviews/plotting/mpl/path.py b/holoviews/plotting/mpl/path.py index d145a9934f..aea01cc229 100644 --- a/holoviews/plotting/mpl/path.py +++ b/holoviews/plotting/mpl/path.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np @@ -20,28 +18,33 @@ class PathPlot(ColorbarPlot): PathPlots axes usually define single space so aspect of Paths follows aspect in data coordinates by default.""") - color_index = param.ClassSelector(default=None, class_=(util.basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the color will the drawn""") - show_legend = param.Boolean(default=False, doc=""" Whether to show legend for the plot.""") style_opts = ['alpha', 'color', 'linestyle', 'linewidth', 'visible', 'cmap'] - def get_data(self, element, ranges, style): - cdim = element.get_dimension(self.color_index) + _collection = LineCollection + + def init_artists(self, ax, plot_args, plot_kwargs): + if 'c' in plot_kwargs: + plot_kwargs['array'] = plot_kwargs.pop('c') + if 'vmin' in plot_kwargs and 'vmax' in plot_kwargs: + plot_kwargs['clim'] = plot_kwargs.pop('vmin'), plot_kwargs.pop('vmax') + if not 'array' in plot_kwargs and 'cmap' in plot_kwargs: + del plot_kwargs['cmap'] + collection = self._collection(*plot_args, **plot_kwargs) + ax.add_collection(collection) + return {'artist': collection} + def get_data(self, element, ranges, style): with abbreviated_exception(): style = self._apply_transforms(element, ranges, style) - scalar = element.interface.isunique(element, cdim, per_geom=True) if cdim else False - style_mapping = any(isinstance(v, util.arraylike_types) and not (k == 'c' and scalar) - for k, v in style.items()) + style_mapping = any(True for v in style.values() if isinstance(v, util.arraylike_types)) dims = element.kdims xdim, ydim = dims generic_dt_format = Dimension.type_formatters[np.datetime64] - paths, cvals, dims = [], [], {} + paths, dims = [], {} for path in element.split(datatype='columns'): xarr, yarr = path[xdim.name], path[ydim.name] if util.isdatetime(xarr): @@ -53,41 +56,31 @@ def get_data(self, element, ranges, style): yarr = date2num(yarr) dims[1] = ydim(value_format=DateFormatter(dt_format)) arr = np.column_stack([xarr, yarr]) - if not (self.color_index is not None or style_mapping): + if not style_mapping: paths.append(arr) continue length = len(xarr) for (s1, s2) in zip(range(length-1), range(1, length+1)): - if cdim: - cvals.append(path[cdim.name]) paths.append(arr[s1:s2+1]) if self.invert_axes: paths = [p[::-1] for p in paths] - if not (self.color_index or style_mapping): - if cdim: - style['array'] = style.pop('c') - style['clim'] = style.pop('vmin', None), style.pop('vmax', None) + if not style_mapping: return (paths,), style, {'dimensions': dims} - if cdim: - self._norm_kwargs(element, ranges, style, cdim) - style['array'] = np.array(cvals) if 'c' in style: style['array'] = style.pop('c') if 'vmin' in style: style['clim'] = style.pop('vmin', None), style.pop('vmax', None) return (paths,), style, {'dimensions': dims} - def init_artists(self, ax, plot_args, plot_kwargs): - line_segments = LineCollection(*plot_args, **plot_kwargs) - ax.add_collection(line_segments) - return {'artist': line_segments} - def update_handles(self, key, axis, element, ranges, style): artist = self.handles['artist'] data, style, axis_kwargs = self.get_data(element, ranges, style) artist.set_paths(data[0]) if 'array' in style: artist.set_array(style['array']) + if 'vmin' in style and 'vmax' in style: + artist.set_clim((style['vmin'], style['vmax'])) + if 'clim' in style: artist.set_clim(style['clim']) if 'norm' in style: artist.set_norm(style['norm']) @@ -103,10 +96,6 @@ def update_handles(self, key, axis, element, ranges, style): class ContourPlot(PathPlot): - color_index = param.ClassSelector(default=0, class_=(util.basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the color will the drawn""") - def init_artists(self, ax, plot_args, plot_kwargs): line_segments = LineCollection(*plot_args, **plot_kwargs) ax.add_collection(line_segments) @@ -136,33 +125,6 @@ def get_data(self, element, ranges, style): elif isinstance(style.get('color'), np.ndarray): style[color_prop] = style.pop('color') - # Process deprecated color_index - if None not in [element.level, self.color_index]: - cdim = element.vdims[0] - elif 'array' not in style: - cidx = self.color_index+2 if isinstance(self.color_index, int) else self.color_index - cdim = element.get_dimension(cidx) - else: - cdim = None - - if cdim is None: - return (paths,), style, {} - - if element.level is not None: - array = np.full(len(paths), element.level) - else: - array = element.dimension_values(cdim, expanded=False) - if len(paths) != len(array): - # If there are multi-geometries the list of scalar values - # will not match the list of paths and has to be expanded - array = np.array([v for v, sps in zip(array, subpaths) - for _ in range(len(sps))]) - - if array.dtype.kind not in 'uif': - array = util.search_indices(array, util.unique_array(array)) - style['array'] = array - self._norm_kwargs(element, ranges, style, cdim) - style['clim'] = style.pop('vmin'), style.pop('vmax') return (paths,), style, {} @@ -182,7 +144,4 @@ class PolygonPlot(ContourPlot): 'hatch', 'linestyle', 'joinstyle', 'fill', 'capstyle', 'color'] - def init_artists(self, ax, plot_args, plot_kwargs): - polys = PatchCollection(*plot_args, **plot_kwargs) - ax.add_collection(polys) - return {'artist': polys} + _collection = PatchCollection diff --git a/holoviews/plotting/mpl/plot.py b/holoviews/plotting/mpl/plot.py index f4dd01973b..5b105e1ef8 100644 --- a/holoviews/plotting/mpl/plot.py +++ b/holoviews/plotting/mpl/plot.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from itertools import chain from contextlib import contextmanager @@ -16,7 +14,7 @@ GridSpace, Element, CompositeOverlay, Empty, Collator, GridMatrix, Layout) from ...core.options import Store, SkipRendering -from ...core.util import int_to_roman, int_to_alpha, basestring, wrap_tuple_streams +from ...core.util import int_to_roman, int_to_alpha, wrap_tuple_streams from ..plot import (DimensionedPlot, GenericLayoutPlot, GenericCompositePlot, GenericElementPlot, GenericAdjointLayoutPlot) from ..util import attach_streams, collate, displayable @@ -117,7 +115,7 @@ class MPLPlot(DimensionedPlot): def __init__(self, fig=None, axis=None, **params): self._create_fig = True - super(MPLPlot, self).__init__(**params) + super().__init__(**params) # List of handles to matplotlib objects for animation update self.fig_scale = self.fig_size/100. if isinstance(self.fig_inches, (tuple, list)): @@ -282,7 +280,7 @@ def _update_title(self, key): title = self._format_title(key) if self.show_title else '' if 'title' in self.handles: self.handles['title'].set_text(title) - else: + elif 'axis' in self.handles and self.handles['axis'].figure is not None: title = self.handles['axis'].set_title(title, **self._fontsize('title')) self.handles['title'] = title @@ -338,8 +336,8 @@ def __init__(self, layout, axis=None, create_axes=True, ranges=None, layout_num=1, keys=None, **params): if not isinstance(layout, GridSpace): raise Exception("GridPlot only accepts GridSpace.") - super(GridPlot, self).__init__(layout, layout_num=layout_num, - ranges=ranges, keys=keys, **params) + super().__init__(layout, layout_num=layout_num, + ranges=ranges, keys=keys, **params) # Compute ranges layoutwise grid_kwargs = {} if axis is not None: @@ -639,8 +637,7 @@ def __init__(self, layout, layout_type, subaxes, subplots, **params): # The supplied (axes, view) objects as indexed by position self.subaxes = {pos: ax for ax, pos in zip(subaxes, self.view_positions)} - super(AdjointLayoutPlot, self).__init__(subplots=subplots, **params) - + super().__init__(subplots=subplots, **params) @mpl_rc_context def initialize_plot(self, ranges=None): @@ -665,7 +662,6 @@ def initialize_plot(self, ranges=None): self.adjust_positions() self.drawn = True - def adjust_positions(self, redraw=True): """ Make adjustments to the positions of subplots (if available) @@ -713,7 +709,6 @@ def adjust_positions(self, redraw=True): if isinstance(subplot, GridPlot): ax.set_aspect('equal') - @mpl_rc_context def update_frame(self, key, ranges=None): for pos in self.view_positions: @@ -721,7 +716,6 @@ def update_frame(self, key, ranges=None): if subplot is not None: subplot.update_frame(key, ranges) - def __len__(self): return max([1 if self.keys is None else len(self.keys), 1]) @@ -767,14 +761,13 @@ class LayoutPlot(GenericLayoutPlot, CompositePlot): v17_layout_format = True def __init__(self, layout, keys=None, **params): - super(LayoutPlot, self).__init__(layout=layout, keys=keys, **params) + super().__init__(layout=layout, keys=keys, **params) with mpl.rc_context(rc=self.fig_rcparams): self.subplots, self.subaxes, self.layout = self._compute_gridspec(layout) if self.top_level: self.traverse(lambda x: attach_streams(self, x.hmap, 2), [GenericElementPlot]) - def _compute_gridspec(self, layout): """ Computes the tallest and widest cell for each row and column @@ -807,7 +800,7 @@ def _compute_gridspec(self, layout): main = layout_view.main main = main.last if isinstance(main, HoloMap) else main main_options = self.lookup_options(main, 'plot').options if main else {} - if main and not isinstance(main_options.get('aspect', 1), basestring): + if main and not isinstance(main_options.get('aspect', 1), str): main_aspect = np.nan if isinstance(main, Empty) else main_options.get('aspect', 1) main_aspect = self.aspect_weight*main_aspect + 1-self.aspect_weight else: @@ -838,7 +831,7 @@ def _compute_gridspec(self, layout): else: height_ratios = [4] - if not isinstance(main_aspect, (basestring, type(None))): + if not isinstance(main_aspect, (str, type(None))): width_ratios = [wratio * main_aspect for wratio in width_ratios] height_ratios = [hratio * inv_aspect for hratio in height_ratios] layout_shape = (len(width_ratios), len(height_ratios)) @@ -982,7 +975,6 @@ def _compute_gridspec(self, layout): return layout_subplots, layout_axes, collapsed_layout - def grid_situate(self, current_idx, layout_type, subgrid_width): """ Situate the current AdjointLayoutPlot in a LayoutPlot. The @@ -1016,7 +1008,6 @@ def grid_situate(self, current_idx, layout_type, subgrid_width): return start, inds - def _create_subplots(self, layout, positions, layout_dimensions, ranges, axes={}, num=1, create=True): """ Plot all the views contained in the AdjointLayout Object using axes diff --git a/holoviews/plotting/mpl/raster.py b/holoviews/plotting/mpl/raster.py index a2e2ea7777..14c3e5ffdf 100644 --- a/holoviews/plotting/mpl/raster.py +++ b/holoviews/plotting/mpl/raster.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np @@ -19,6 +17,11 @@ class RasterBasePlot(ElementPlot): Images by default but may be set to an explicit aspect ratio or to 'square'.""") + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + padding = param.ClassSelector(default=0, class_=(int, float, tuple)) show_legend = param.Boolean(default=False, doc=""" @@ -30,7 +33,7 @@ class RasterBasePlot(ElementPlot): _plot_methods = dict(single='imshow') def get_extents(self, element, ranges, range_type='combined'): - extents = super(RasterBasePlot, self).get_extents(element, ranges, range_type) + extents = super().get_extents(element, ranges, range_type) if self.situate_axes or range_type not in ('combined', 'data'): return extents else: @@ -97,7 +100,6 @@ def update_handles(self, key, axis, element, ranges, style): return axis_kwargs - class RGBPlot(RasterBasePlot): style_opts = ['alpha', 'interpolation', 'visible', 'filterrad'] @@ -123,11 +125,15 @@ def update_handles(self, key, axis, element, ranges, style): return axis_kwargs - class QuadMeshPlot(ColorbarPlot): clipping_colors = param.Dict(default={'NaN': 'transparent'}) + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + padding = param.ClassSelector(default=0, class_=(int, float, tuple)) show_legend = param.Boolean(default=False, doc=""" @@ -141,6 +147,7 @@ class QuadMeshPlot(ColorbarPlot): def get_data(self, element, ranges, style): zdata = element.dimension_values(2, flat=False) data = np.ma.array(zdata, mask=np.logical_not(np.isfinite(zdata))) + expanded = element.interface.irregular(element, element.kdims[0]) edges = style.get('shading') != 'gouraud' coords = [element.interface.coords(element, d, ordered=True, @@ -156,7 +163,6 @@ def get_data(self, element, ranges, style): self._norm_kwargs(element, ranges, style, vdim) return tuple(cmesh_data), style, {} - def init_artists(self, ax, plot_args, plot_kwargs): locs = plot_kwargs.pop('locs', None) artist = ax.pcolormesh(*plot_args, **plot_kwargs) @@ -174,7 +180,6 @@ def init_artists(self, ax, plot_args, plot_kwargs): return {'artist': artist, 'locs': locs} - class RasterGridPlot(GridPlot, OverlayPlot): """ RasterGridPlot evenly spaces out plots of individual projections on diff --git a/holoviews/plotting/mpl/renderer.py b/holoviews/plotting/mpl/renderer.py index ec01c37b93..babb769c9a 100644 --- a/holoviews/plotting/mpl/renderer.py +++ b/holoviews/plotting/mpl/renderer.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import os import base64 diff --git a/holoviews/plotting/mpl/sankey.py b/holoviews/plotting/mpl/sankey.py index a1c2126f56..febc9a7b6e 100644 --- a/holoviews/plotting/mpl/sankey.py +++ b/holoviews/plotting/mpl/sankey.py @@ -1,11 +1,9 @@ -from __future__ import absolute_import, division, unicode_literals - import param from matplotlib.patches import Rectangle from matplotlib.collections import PatchCollection -from ...core.util import basestring, max_range +from ...core.util import max_range from ...util.transform import dim from .graphs import GraphPlot from .util import filter_styles @@ -13,7 +11,7 @@ class SankeyPlot(GraphPlot): - labels = param.ClassSelector(class_=(basestring, dim), doc=""" + labels = param.ClassSelector(class_=(str, dim), doc=""" The dimension or dimension value transform used to draw labels from.""") show_values = param.Boolean(default=True, doc=""" @@ -37,11 +35,11 @@ class SankeyPlot(GraphPlot): # Deprecated options - color_index = param.ClassSelector(default=2, class_=(basestring, int), + color_index = param.ClassSelector(default=2, class_=(str, int), allow_None=True, doc=""" Index of the dimension from which the node labels will be drawn""") - label_index = param.ClassSelector(default=2, class_=(basestring, int), + label_index = param.ClassSelector(default=2, class_=(str, int), allow_None=True, doc=""" Index of the dimension from which the node labels will be drawn""") @@ -70,7 +68,7 @@ def get_extents(self, element, ranges, range_type='combined'): return (x0, y0, x1, y1) def get_data(self, element, ranges, style): - data, style, axis_kwargs = super(SankeyPlot, self).get_data(element, ranges, style) + data, style, axis_kwargs = super().get_data(element, ranges, style) rects, labels = [], [] label_dim = element.nodes.get_dimension(self.label_index) @@ -82,7 +80,7 @@ def get_data(self, element, ranges, style): "and declare a label_index; ignoring the label_index.") elif label_dim: labels = label_dim - if isinstance(labels, basestring): + if isinstance(labels, str): labels = element.nodes.get_dimension(labels) if labels is None: @@ -139,7 +137,7 @@ def _update_labels(self, ax, data, style): def init_artists(self, ax, plot_args, plot_kwargs): fontsize = plot_kwargs.pop('label_text_font_size', 8) - artists = super(SankeyPlot, self).init_artists(ax, plot_args, plot_kwargs) + artists = super().init_artists(ax, plot_args, plot_kwargs) groups = [g for g in self._style_groups if g != 'node'] node_opts = filter_styles(plot_kwargs, 'node', groups, ('s', 'node_s')) rects = [Rectangle(**rect) for rect in plot_args['rects']] diff --git a/holoviews/plotting/mpl/stats.py b/holoviews/plotting/mpl/stats.py index 905dd64591..b0915427b7 100644 --- a/holoviews/plotting/mpl/stats.py +++ b/holoviews/plotting/mpl/stats.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np @@ -24,7 +22,6 @@ class DistributionPlot(AreaPlot): Whether the bivariate contours should be filled.""") - class BivariatePlot(PolygonPlot): """ Bivariate plot visualizes two-dimensional kernel density @@ -46,7 +43,6 @@ class BivariatePlot(PolygonPlot): A list of scalar values used to specify the contour levels.""") - class BoxPlot(ChartPlot): """ BoxPlot plots the ErrorBar Element type and supporting @@ -65,7 +61,7 @@ class BoxPlot(ChartPlot): _plot_methods = dict(single='boxplot') def get_extents(self, element, ranges, range_type='combined'): - return super(BoxPlot, self).get_extents( + return super().get_extents( element, ranges, range_type, 'categorical', element.vdims[0] ) @@ -102,7 +98,6 @@ def teardown_handles(self): v.remove() - class SideBoxPlot(AdjoinedPlot, BoxPlot): bgcolor = param.Parameter(default=(1, 1, 1, 0), doc=""" @@ -126,7 +121,7 @@ class SideBoxPlot(AdjoinedPlot, BoxPlot): 'right', 'bare' 'left-bare' and 'right-bare'.""") def __init__(self, *args, **kwargs): - super(SideBoxPlot, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.adjoined: self.invert_axes = not self.invert_axes diff --git a/holoviews/plotting/mpl/tabular.py b/holoviews/plotting/mpl/tabular.py index 0245182d95..c13115a62a 100644 --- a/holoviews/plotting/mpl/tabular.py +++ b/holoviews/plotting/mpl/tabular.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - from collections import defaultdict import param @@ -50,13 +48,12 @@ class TablePlot(ElementPlot): _has_axes = False def __init__(self, table, **params): - super(TablePlot, self).__init__(table, **params) + super().__init__(table, **params) if not self.dynamic: self.cell_widths = self._format_table() else: self.cell_widths = None - def _format_table(self): cell_widths = defaultdict(int) for key in self.keys: @@ -66,7 +63,6 @@ def _format_table(self): self._update_cell_widths(element, cell_widths) return cell_widths - def _update_cell_widths(self, element, cell_widths): # Mapping from the cell coordinates to the dictionary key. summarize = element.rows > self.max_rows @@ -86,7 +82,6 @@ def _update_cell_widths(self, element, cell_widths): if len(cell_text) + 2 > cell_widths[col]: cell_widths[col] = len(cell_text) + 2 - def _cell_value(self, element, row, col): summarize = element.rows > self.max_rows half_rows = self.max_rows//2 @@ -100,7 +95,6 @@ def _cell_value(self, element, row, col): cell_text = cell_text[:(self.max_value_len-3)]+'...' return cell_text - @mpl_rc_context def initialize_plot(self, ranges=None): @@ -115,7 +109,6 @@ def initialize_plot(self, ranges=None): axes.add_table(table) return self._finalize_axis(self.keys[-1], element=element) - def _render_table(self, element, axes): if self.dynamic: cell_widths = defaultdict(int) @@ -147,7 +140,6 @@ def _render_table(self, element, axes): table.auto_set_font_size(True) return table - def update_handles(self, key, axes, element, ranges, style): table = self._render_table(element, axes) self.handles['artist'].remove() diff --git a/holoviews/plotting/mpl/util.py b/holoviews/plotting/mpl/util.py index 5441dacb3c..acf8f149a1 100644 --- a/holoviews/plotting/mpl/util.py +++ b/holoviews/plotting/mpl/util.py @@ -1,20 +1,26 @@ -from __future__ import absolute_import, division, unicode_literals - +import inspect import re import warnings import numpy as np import matplotlib + from matplotlib import units as munits from matplotlib import ticker -from matplotlib.colors import cnames +from matplotlib.colors import Normalize, cnames from matplotlib.lines import Line2D from matplotlib.markers import MarkerStyle from matplotlib.patches import Path, PathPatch from matplotlib.transforms import Bbox, TransformedBbox, Affine2D from matplotlib.rcsetup import ( - validate_capstyle, validate_fontsize, validate_fonttype, validate_hatch, - validate_joinstyle) + validate_fontsize, validate_fonttype, validate_hatch) + +try: # starting Matplotlib 3.4.0 + from matplotlib._enums import CapStyle as validate_capstyle + from matplotlib._enums import JoinStyle as validate_joinstyle +except: # before Matplotlib 3.4.0 + from matplotlib.rcsetup import ( + validate_capstyle, validate_joinstyle) try: from nc_time_axis import NetCDFTimeConverter, CalendarDateTime @@ -25,8 +31,8 @@ nc_axis_available = False from ...core.util import ( - LooseVersion, _getargspec, arraylike_types, basestring, - cftime_types, is_number,) + LooseVersion, arraylike_types, cftime_types, is_number +) from ...element import Raster, RGB, Polygons from ..util import COLOR_ALIASES, RGB_HEX_REGEX @@ -37,7 +43,7 @@ def is_color(color): """ Checks if supplied object is a valid color spec. """ - if not isinstance(color, basestring): + if not isinstance(color, str): return False elif RGB_HEX_REGEX.match(color): return True @@ -57,7 +63,7 @@ def is_color(color): 'joinstyle': validate_joinstyle, 'marker': lambda x: (x in Line2D.markers or isinstance(x, MarkerStyle) or isinstance(x, Path) or - (isinstance(x, basestring) and x.startswith('$') + (isinstance(x, str) and x.startswith('$') and x.endswith('$'))), 's': lambda x: is_number(x) and (x >= 0) } @@ -164,14 +170,14 @@ def wrap_formatter(formatter): if isinstance(formatter, ticker.Formatter): return formatter elif callable(formatter): - args = [arg for arg in _getargspec(formatter).args + args = [arg for arg in inspect.getfullargspec(formatter).args if arg != 'self'] wrapped = formatter if len(args) == 1: def wrapped(val, pos=None): return formatter(val) return ticker.FuncFormatter(wrapped) - elif isinstance(formatter, basestring): + elif isinstance(formatter, str): if re.findall(r"\{(\w+)\}", formatter): return ticker.StrMethodFormatter(formatter) else: @@ -393,7 +399,74 @@ def convert(cls, value, unit, axis): value = CalendarDateTime(value.datetime, value.calendar) elif isinstance(value, np.ndarray): value = np.array([CalendarDateTime(v.datetime, v.calendar) for v in value]) - return super(CFTimeConverter, cls).convert(value, unit, axis) + return super().convert(value, unit, axis) + + +class EqHistNormalize(Normalize): + + def __init__(self, vmin=None, vmax=None, clip=False, nbins=256**2, ncolors=256): + super().__init__(vmin, vmax, clip) + self._nbins = nbins + self._bin_edges = None + self._ncolors = ncolors + self._color_bins = np.linspace(0, 1, ncolors) + + def binning(self, data, n=256): + low = data.min() if self.vmin is None else self.vmin + high = data.max() if self.vmax is None else self.vmax + nbins = self._nbins + eq_bin_edges = np.linspace(low, high, nbins+1) + hist, _ = np.histogram(data, eq_bin_edges) + + eq_bin_centers = np.convolve(eq_bin_edges, [0.5, 0.5], mode='valid') + cdf = np.cumsum(hist) + cdf_max = cdf[-1] + norm_cdf = cdf/cdf_max + + # Iteratively find as many finite bins as there are colors + finite_bins = n-1 + binning = [] + iterations = 0 + guess = n*2 + while ((finite_bins != n) and (iterations < 4) and (finite_bins != 0)): + ratio = guess/finite_bins + if (ratio > 1000): + #Abort if distribution is extremely skewed + break + guess = np.round(max(n*ratio, n)) + + # Interpolate + palette_edges = np.arange(0, guess) + palette_cdf = norm_cdf*(guess-1) + binning = np.interp(palette_edges, palette_cdf, eq_bin_centers) + + # Evaluate binning + uniq_bins = np.unique(binning) + finite_bins = len(uniq_bins)-1 + iterations += 1 + if (finite_bins == 0): + binning = [low]+[high]*(n-1) + else: + binning = binning[-n:] + if (finite_bins != n): + warnings.warn("EqHistColorMapper warning: Histogram equalization did not converge.") + return binning + + def __call__(self, data, clip=None): + return self.process_value(data)[0] + + def process_value(self, data): + if isinstance(data, np.ndarray): + self._bin_edges = self.binning(data, self._ncolors) + isscalar = np.isscalar(data) + data = np.array([data]) if isscalar else data + interped = np.interp(data, self._bin_edges, self._color_bins) + return np.ma.array(interped), isscalar + + def inverse(self, value): + if self._bin_edges is None: + raise ValueError("Not invertible until eq_hist has been computed") + return np.interp([value], self._color_bins, self._bin_edges)[0] for cft in cftime_types: diff --git a/holoviews/plotting/plot.py b/holoviews/plotting/plot.py index 809a2a7438..44f0b147f4 100644 --- a/holoviews/plotting/plot.py +++ b/holoviews/plotting/plot.py @@ -3,8 +3,6 @@ plotting package or backend. Every plotting classes must be a subclass of this Plot baseclass. """ -from __future__ import absolute_import - import threading import uuid import warnings @@ -25,7 +23,7 @@ from ..selection import NoOpSelectionDisplay from ..core import OrderedDict from ..core import util, traversal -from ..core.data import Dataset +from ..core.data import Dataset, disable_pipeline from ..core.element import Element, Element3D from ..core.overlay import Overlay, CompositeOverlay from ..core.layout import Empty, NdLayout, Layout @@ -33,7 +31,7 @@ from ..core.overlay import NdOverlay from ..core.spaces import HoloMap, DynamicMap from ..core.util import stream_parameters, isfinite -from ..element import Table, Graph, Contours +from ..element import Table, Graph from ..streams import Stream, RangeXY, RangeX, RangeY from ..util.transform import dim from .util import ( @@ -63,7 +61,7 @@ class Plot(param.Parameterized): def __init__(self, renderer=None, root=None, **params): params = {k: v for k, v in params.items() if k in self.param} - super(Plot, self).__init__(**params) + super().__init__(**params) self.renderer = renderer if renderer else Store.renderers[self.backend].instance() self._force = False self._comm = None @@ -200,11 +198,9 @@ def cleanup(self): util.get_method_owner(subscriber) not in plots ] - def _session_destroy(self, session_context): self.cleanup() - def refresh(self, **kwargs): """ Refreshes the plot by rerendering it and then pushing @@ -256,7 +252,6 @@ def _trigger_refresh(self, key): with unlocked(): self.update(key) - def push(self): """ Pushes plot updates to the frontend. @@ -274,19 +269,16 @@ def push(self): 'embedded' not in root.tags and self.document and self.comm): push(self.document, self.comm) - @property def id(self): return self.comm.id if self.comm else id(self.state) - def __len__(self): """ Returns the total number of available frames. """ raise NotImplementedError - @classmethod def lookup_options(cls, obj, group): return lookup_options(obj, group, cls.backend) @@ -331,12 +323,10 @@ def _define_interface(self, plots, allow_mismatch): plot_params = {p: v for params in parameters for p, v in params.items()} return [s for style in styles for s in style], plot_params - def __call__(self, obj, **kwargs): plot_class = self.get_plot_class(obj) return plot_class(obj, **kwargs) - def get_plot_class(self, obj): key = self.selector(obj) if key not in self.plot_classes: @@ -344,10 +334,9 @@ def get_plot_class(self, obj): raise Exception(msg % (key, ', '.join(self.plot_classes.keys()))) return self.plot_classes[key] - def __setattr__(self, label, value): try: - return super(PlotSelector, self).__setattr__(label, value) + return super().__setattr__(label, value) except: raise Exception("Please set class parameters directly on classes %s" % ', '.join(str(cls) for cls in self.__dict__['plot_classes'].values())) @@ -431,8 +420,7 @@ def __init__(self, keys=None, dimensions=None, layout_dimensions=None, self.current_key = None self.ranges = {} self._updated = False # Whether the plot should be marked as updated - super(DimensionedPlot, self).__init__(**params) - + super().__init__(**params) def __getitem__(self, frame): """ @@ -446,7 +434,6 @@ def __getitem__(self, frame): self.update_frame(frame) return self.state - def _get_frame(self, key): """ Required on each MPLPlot type to get the data corresponding @@ -454,7 +441,6 @@ def _get_frame(self, key): """ pass - def matches(self, spec): """ Matches a specification against the current Plot. @@ -490,7 +476,6 @@ def traverse(self, fn=None, specs=None, full_breadth=True): if not full_breadth: break return accumulator - def _frame_title(self, key, group_size=2, separator='\n'): """ Returns the formatted dimension group strings @@ -509,7 +494,6 @@ def _frame_title(self, key, group_size=2, separator='\n'): for i in range(len(dimension_labels))] return util.bytes_to_unicode(separator.join(g for g in groups if g)) - def _format_title(self, key, dimensions=True, separator='\n'): if self.title_format: self.param.warning('title_format is deprecated. Please use title instead') @@ -534,7 +518,6 @@ def _format_title(self, key, dimensions=True, separator='\n'): ) return title.strip(' \n') - def _format_title_components(self, key, dimensions=True, separator='\n'): """ Determine components of title as used by _format_title method. @@ -545,7 +528,6 @@ def _format_title_components(self, key, dimensions=True, separator='\n'): """ return (self.label, self.group, type(self).__name__, '') - def _get_fontsize_defaults(self): """ Should returns default fontsize for the following keywords: @@ -562,7 +544,6 @@ def _get_fontsize_defaults(self): """ return {} - def _fontsize(self, key, label='fontsize', common=True): if not self.fontsize and not self.fontscale: return {} @@ -594,7 +575,6 @@ def _fontsize(self, key, label='fontsize', common=True): return {label: scale_fontsize(size, self.fontscale)} - def compute_ranges(self, obj, key, ranges): """ Given an object, a specific key, and the normalization options, @@ -604,6 +584,7 @@ def compute_ranges(self, obj, key, ranges): over the whole animation) and finally compute the dimension ranges in each group. The new set of ranges is returned. """ + prev_frame = getattr(self, 'prev_frame', None) all_table = all(isinstance(el, Table) for el in obj.traverse(lambda x: x, [Element])) if obj is None or not self.normalize or all_table: return OrderedDict() @@ -618,7 +599,7 @@ def compute_ranges(self, obj, key, ranges): # at this level, and ranges for the group have not # been supplied from a composite plot return_fn = lambda x: x if isinstance(x, Element) else None - for group, (axiswise, framewise) in norm_opts.items(): + for group, (axiswise, framewise, robust) in norm_opts.items(): axiswise = (not getattr(self, 'shared_axes', True)) or (axiswise) elements = [] # Skip if ranges are cached or already computed by a @@ -629,16 +610,17 @@ def compute_ranges(self, obj, key, ranges): elif key is not None: # Traverse to get elements for each frame frame = self._get_frame(key) elements = [] if frame is None else frame.traverse(return_fn, [group]) - + # Only compute ranges if not axiswise on a composite plot # or not framewise on a Overlay or ElementPlot if (not (axiswise and not isinstance(obj, HoloMap)) or (not framewise and isinstance(obj, HoloMap))): - self._compute_group_range(group, elements, ranges, framewise, self.top_level) + self._compute_group_range(group, elements, ranges, framewise, + axiswise, robust, self.top_level, + prev_frame) self.ranges.update(ranges) return ranges - def _get_norm_opts(self, obj): """ Gets the normalization options for a LabelledData object by @@ -676,29 +658,56 @@ def _get_norm_opts(self, obj): for i in range(1, 4)) if applies and 'norm' in opts.groups: nopts = opts['norm'].options - if 'axiswise' in nopts or 'framewise' in nopts: + popts = opts['plot'].options + if 'axiswise' in nopts or 'framewise' in nopts or 'clim_percentile' in popts: norm_opts.update({path: (nopts.get('axiswise', False), - nopts.get('framewise', False))}) + nopts.get('framewise', False), + popts.get('clim_percentile', False))}) element_specs = [spec for _, spec in element_specs] - norm_opts.update({spec: (False, False) for spec in element_specs + norm_opts.update({spec: (False, False, False) for spec in element_specs if not any(spec[:i] in norm_opts.keys() for i in range(1, 4))}) return norm_opts + @classmethod + def _merge_group_ranges(cls, ranges): + hard_range = util.max_range(ranges['hard'], combined=False) + soft_range = util.max_range(ranges['soft']) + robust_range = util.max_range(ranges.get('robust', [])) + data_range = util.max_range(ranges['data']) + combined = util.dimension_range(data_range[0], data_range[1], + hard_range, soft_range) + dranges = {'data': data_range, 'hard': hard_range, + 'soft': soft_range, 'combined': combined, + 'robust': robust_range, 'values': ranges} + if 'factors' in ranges: + all_factors = ranges['factors'] + factor_dtypes = {fs.dtype for fs in all_factors} if all_factors else [] + dtype = list(factor_dtypes)[0] if len(factor_dtypes) == 1 else None + expanded = [v for fctrs in all_factors for v in fctrs] + if dtype is not None: + try: + # Try to keep the same dtype + expanded = np.array(expanded, dtype=dtype) + except Exception: + pass + dranges['factors'] = util.unique_array(expanded) + return dranges @classmethod - def _compute_group_range(cls, group, elements, ranges, framewise, top_level): + def _compute_group_range(cls, group, elements, ranges, framewise, + axiswise, robust, top_level, prev_frame): # Iterate over all elements in a normalization group # and accumulate their ranges into the supplied dictionary. elements = [el for el in elements if el is not None] + data_ranges = {} + robust_ranges = {} categorical_dims = [] for el in elements: for el_dim in el.dimensions('ranges'): if hasattr(el, 'interface'): if isinstance(el, Graph) and el_dim in el.nodes.dimensions(): dtype = el.nodes.interface.dtype(el.nodes, el_dim) - elif isinstance(el, Contours) and el.level is not None: - dtype = np.array([el.level]).dtype # Remove when deprecating level else: dtype = el.interface.dtype(el, el_dim) elif hasattr(el, '__len__') and len(el): @@ -719,9 +728,15 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): data_range = el.range(el_dim, dimension_range=False) data_ranges[(el, el_dim)] = data_range - - if (any(isinstance(r, util.basestring) for r in data_range) or - (el_dim.type is not None and issubclass(el_dim.type, util.basestring)) or + if dtype is not None and dtype.kind == 'uif' and robust: + percentile = 2 if isinstance(robust, bool) else robust + robust_ranges[(el, el_dim)] = ( + dim(el_dim, np.nanpercentile, percentile).apply(el), + dim(el_dim, np.nanpercentile, percentile).apply(el) + ) + + if (any(isinstance(r, str) for r in data_range) or + (el_dim.type is not None and issubclass(el_dim.type, str)) or (dtype is not None and dtype.kind in 'SU')): categorical_dims.append(el_dim) @@ -759,7 +774,9 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): except: factors = util.unique_array(values) if dim_name not in group_ranges: - group_ranges[dim_name] = {'data': [], 'hard': [], 'soft': []} + group_ranges[dim_name] = { + 'id': [], 'data': [], 'hard': [], 'soft': [] + } if factors is not None: if 'factors' not in group_ranges[dim_name]: @@ -767,6 +784,7 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): group_ranges[dim_name]['factors'].append(factors) else: group_ranges[dim_name]['data'].append(drange) + group_ranges[dim_name]['id'].append(id(el)) # Compute dimension normalization for el_dim in el.dimensions('ranges'): @@ -775,10 +793,14 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): continue data_range = data_ranges[(el, el_dim)] if dim_name not in group_ranges: - group_ranges[dim_name] = {'data': [], 'hard': [], 'soft': []} + group_ranges[dim_name] = { + 'id': [], 'data': [], 'hard': [], 'soft': [], 'robust': [] + } group_ranges[dim_name]['data'].append(data_range) group_ranges[dim_name]['hard'].append(el_dim.range) group_ranges[dim_name]['soft'].append(el_dim.soft_range) + if (el, el_dim) in robust_ranges: + group_ranges[dim_name]['robust'].append(robust_ranges[(el, el_dim)]) if el_dim in categorical_dims: if 'factors' not in group_ranges[dim_name]: group_ranges[dim_name]['factors'] = [] @@ -797,12 +819,14 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): values = np.concatenate(values) if len(values) else [] factors = util.unique_array(values) group_ranges[dim_name]['factors'].append(factors) + group_ranges[dim_name]['id'].append(id(el)) + # Avoid merging ranges with non-matching types group_dim_ranges = defaultdict(dict) for gdim, values in group_ranges.items(): matching = True for t, rs in values.items(): - if t == 'factors': + if t in ('factors', 'id'): continue matching &= ( len({'date' if isinstance(v, util.datetime_types) else 'number' @@ -811,29 +835,37 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): if matching: group_dim_ranges[gdim] = values + # Merge ranges across elements dim_ranges = [] for gdim, values in group_dim_ranges.items(): - hard_range = util.max_range(values['hard'], combined=False) - soft_range = util.max_range(values['soft']) - data_range = util.max_range(values['data']) - combined = util.dimension_range(data_range[0], data_range[1], - hard_range, soft_range) - dranges = {'data': data_range, 'hard': hard_range, - 'soft': soft_range, 'combined': combined} - if 'factors' in values: - all_factors = values['factors'] - factor_dtypes = {fs.dtype for fs in all_factors} if all_factors else [] - dtype = list(factor_dtypes)[0] if len(factor_dtypes) == 1 else None - expanded = [v for fctrs in all_factors for v in fctrs] - if dtype is not None: - try: - # Try to keep the same dtype - expanded = np.array(expanded, dtype=dtype) - except Exception: - pass - dranges['factors'] = util.unique_array(expanded) + dranges = cls._merge_group_ranges(values) dim_ranges.append((gdim, dranges)) - if prev_ranges and not (framewise and top_level): + + # Merge local ranges into global range dictionary + if prev_ranges and not (top_level or axiswise) and framewise and prev_frame is not None: + # Partially update global ranges with local changes + prev_ids = prev_frame.traverse(lambda o: id(o)) + for d, dranges in dim_ranges: + values = prev_ranges.get(d, {}).get('values', None) + + if values is None or 'id' not in values: + for g, drange in dranges.items(): + if d not in prev_ranges: + prev_ranges[d] = {} + prev_ranges[d][g] = drange + continue + + ids = values.get('id') + + # Filter out ranges of updated elements and append new ranges + merged = {} + for g, drange in dranges['values'].items(): + filtered = [r for i, r in zip(ids, values[g]) if i not in prev_ids] + filtered += drange + merged[g] = filtered + prev_ranges[d] = cls._merge_group_ranges(merged) + elif prev_ranges and not (framewise and (top_level or axiswise)): + # Combine local with global range for d, dranges in dim_ranges: for g, drange in dranges.items(): prange = prev_ranges.get(d, {}).get(g, None) @@ -841,15 +873,15 @@ def _compute_group_range(cls, group, elements, ranges, framewise, top_level): if d not in prev_ranges: prev_ranges[d] = {} prev_ranges[d][g] = drange - elif g == 'factors': + elif g in ('factors', 'values'): prev_ranges[d][g] = drange else: prev_ranges[d][g] = util.max_range([prange, drange], combined=g=='hard') else: + # Override global range ranges[group] = OrderedDict(dim_ranges) - @classmethod def _traverse_options(cls, obj, opt_type, opts, specs=None, keyfn=None, defaults=True): """ @@ -896,7 +928,6 @@ def lookup(x): options[key][opt] = v return options if keyfn else options[None] - def _get_projection(cls, obj): """ Uses traversal to find the appropriate projection @@ -920,7 +951,6 @@ def _get_projection(cls, obj): raise Exception("An axis may only be assigned one projection type") return custom_projs[0] if custom_projs else None - def update(self, key): if len(self) == 1 and ((key == 0) or (key == self.keys[0])) and not self.drawn: return self.initialize_plot() @@ -928,7 +958,6 @@ def update(self, key): self.traverse(lambda x: setattr(x, '_updated', True)) return item - def __len__(self): """ Returns the total number of available frames. @@ -936,7 +965,6 @@ def __len__(self): return len(self.keys) - class CallbackPlot(object): backend = None @@ -946,6 +974,7 @@ def _construct_callbacks(self): Initializes any callbacks for streams which have defined the plotted object as a source. """ + source_streams = [] cb_classes = set() registry = list(Stream.registry.items()) callbacks = Stream._callbacks[self.backend] @@ -961,8 +990,11 @@ def _construct_callbacks(self): sorted_cbs = sorted(cb_classes, key=lambda x: id(x[0])) for cb, group in groupby(sorted_cbs, lambda x: x[0]): cb_streams = [s for _, s in group] + for cb_stream in cb_streams: + if cb_stream not in source_streams: + source_streams.append(cb_stream) cbs.append(cb(self, cb_streams, source)) - return cbs + return cbs, source_streams @property def link_sources(self): @@ -1166,9 +1198,8 @@ def __init__(self, element, keys=None, ranges=None, dimensions=None, plot_opts.update(**{k: v[0] for k, v in inherited.items() if k not in plot_opts}) - super(GenericElementPlot, self).__init__(keys=keys, dimensions=dimensions, - dynamic=dynamic, - **dict(params, **plot_opts)) + super().__init__(keys=keys, dimensions=dimensions, + dynamic=dynamic, **dict(params, **plot_opts)) self.streams = get_nested_streams(self.hmap) if streams is None else streams # Attach streams if not overlaid and not a batched ElementPlot @@ -1185,7 +1216,6 @@ def __init__(self, element, keys=None, ranges=None, dimensions=None, else: self.ordering = [] - def get_zorder(self, overlay, key, el): """ Computes the z-order of element in the NdOverlay @@ -1194,14 +1224,12 @@ def get_zorder(self, overlay, key, el): spec = util.get_overlay_spec(overlay, key, el) return self.ordering.index(spec) - def _updated_zorders(self, overlay): specs = [util.get_overlay_spec(overlay, key, el) for key, el in overlay.data.items()] self.ordering = sorted(set(self.ordering+specs)) return [self.ordering.index(spec) for spec in specs] - def _get_frame(self, key): if isinstance(self.hmap, DynamicMap) and self.overlaid and self.current_frame: self.current_key = key @@ -1220,7 +1248,6 @@ def _get_frame(self, key): self.current_key = key return frame - def _execute_hooks(self, element): """ Executes finalize hooks @@ -1241,13 +1268,11 @@ def _execute_hooks(self, element): self.param.warning("Plotting hook %r could not be " "applied:\n\n %s" % (hook, e)) - def get_aspect(self, xspan, yspan): """ Should define the aspect ratio of the plot. """ - def get_padding(self, obj, extents): """ Computes padding along the axes taking into account the plot aspect. @@ -1281,7 +1306,6 @@ def get_padding(self, obj, extents): ypad = tuple(yp*aspect for yp in ypad) if isinstance(ypad, tuple) else ypad*aspect return xpad, ypad, zpad - def _get_range_extents(self, element, ranges, range_type, xdim, ydim, zdim): dims = element.dimensions() ndims = len(dims) @@ -1296,27 +1320,17 @@ def _get_range_extents(self, element, ranges, range_type, xdim, ydim, zdim): (y0, y1), ysrange, yhrange = get_range(element, ranges, ydim) (z0, z1), zsrange, zhrange = get_range(element, ranges, zdim) + trigger = False if not self.overlaid and not self.batched: xspan, yspan, zspan = (v/2. for v in get_axis_padding(self.default_span)) mx0, mx1 = get_minimum_span(x0, x1, xspan) - - # If auto-padding is enabled ensure RangeXY dependent plots - # are recomputed before initial render if x0 != mx0 or x1 != mx1: - for stream in self.streams: - if isinstance(stream, (RangeX, RangeXY)): - stream.update(x_range=(mx0, mx1)) - if stream not in self._trigger: - self._trigger.append(stream) x0, x1 = mx0, mx1 + trigger = True my0, my1 = get_minimum_span(y0, y1, yspan) if y0 != my0 or y1 != my1: - for stream in self.streams: - if isinstance(stream, (RangeY, RangeXY)): - stream.update(y_range=(my0, my1)) - if stream not in self._trigger: - self._trigger.append(stream) y0, y1 = my0, my1 + trigger = True mz0, mz1 = get_minimum_span(z0, z1, zspan) xpad, ypad, zpad = self.get_padding(element, (x0, y0, z0, x1, y1, z1)) @@ -1353,8 +1367,14 @@ def _get_range_extents(self, element, ranges, range_type, xdim, ydim, zdim): elif zdim is None: z0, z1 = np.NaN, np.NaN return (x0, y0, z0, x1, y1, z1) - return (x0, y0, x1, y1) + if not self.drawn: + for stream in getattr(self, 'source_streams', []): + if (isinstance(stream, (RangeX, RangeY, RangeXY)) and + trigger and stream not in self._trigger): + self._trigger.append(stream) + + return (x0, y0, x1, y1) def get_extents(self, element, ranges, range_type='combined', xdim=None, ydim=None, zdim=None): """ @@ -1415,12 +1435,27 @@ def get_extents(self, element, ranges, range_type='combined', xdim=None, ydim=No x0, x1 = util.dimension_range(x0, x1, self.xlim, (None, None)) y0, y1 = util.dimension_range(y0, y1, self.ylim, (None, None)) + + if not self.drawn: + x_range, y_range = ((y0, y1), (x0, x1)) if self.invert_axes else ((x0, x1), (y0, y1)) + for stream in getattr(self, 'source_streams', []): + if isinstance(stream, RangeX): + params = {'x_range': x_range} + elif isinstance(stream, RangeY): + params = {'y_range': y_range} + elif isinstance(stream, RangeXY): + params = {'x_range': x_range, 'y_range': y_range} + else: + continue + stream.update(**params) + if stream not in self._trigger and (self.xlim or self.ylim): + self._trigger.append(stream) + if self.projection == '3d': z0, z1 = util.dimension_range(z0, z1, self.zlim, (None, None)) return (x0, y0, z0, x1, y1, z1) return (x0, y0, x1, y1) - def _get_axis_labels(self, dimensions, xlabel=None, ylabel=None, zlabel=None): if self.xlabel is not None: xlabel = self.xlabel @@ -1440,7 +1475,6 @@ def _get_axis_labels(self, dimensions, xlabel=None, ylabel=None, zlabel=None): zlabel = dim_axis_label(dimensions[2]) if dimensions[2] else '' return xlabel, ylabel, zlabel - def _format_title_components(self, key, dimensions=True, separator='\n'): frame = self._get_frame(key) if frame is None: @@ -1456,7 +1490,6 @@ def _format_title_components(self, key, dimensions=True, separator='\n'): return (label, group, type_name, dim_title) - def update_frame(self, key, ranges=None): """ Set the plot(s) to the given frame number. Operates by @@ -1498,8 +1531,8 @@ def __init__(self, overlay, ranges=None, batched=True, keys=None, group_counter= if 'projection' not in params: params['projection'] = self._get_projection(overlay) - super(GenericOverlayPlot, self).__init__(overlay, ranges=ranges, keys=keys, - batched=batched, **params) + super().__init__(overlay, ranges=ranges, keys=keys, + batched=batched, **params) # Apply data collapse self.hmap = self._apply_compositor(self.hmap, ranges, self.keys) @@ -1515,7 +1548,6 @@ def __init__(self, overlay, ranges=None, batched=True, keys=None, group_counter= self.traverse(lambda x: attach_streams(self, x.hmap, 2), [GenericElementPlot]) - def _apply_compositor(self, holomap, ranges=None, keys=None, dimensions=None): """ Given a HoloMap compute the appropriate (mapwise or framewise) @@ -1536,8 +1568,9 @@ def _apply_compositor(self, holomap, ranges=None, keys=None, dimensions=None): for key in holomap.data.keys()]) ranges = frame_ranges.values() - return Compositor.collapse(holomap, (ranges, frame_ranges.keys()), mode='display') - + with disable_pipeline(): + collapsed = Compositor.collapse(holomap, (ranges, frame_ranges.keys()), mode='display') + return collapsed def _create_subplots(self, ranges): # Check if plot should be batched @@ -1581,7 +1614,6 @@ def _create_subplots(self, ranges): "in the Overlay." % self.renderer.backend) return subplots - def _create_subplot(self, key, obj, streams, ranges): registry = Store.registry[self.renderer.backend] ordering = util.layer_sort(self.hmap) @@ -1695,7 +1727,6 @@ def _create_dynamic_subplots(self, key, items, ranges, **init_kwargs): subplot.update_frame(key, ranges, element=obj) self.dynamic_subplots.append(subplot) - def _update_subplot(self, subplot, spec): """ Updates existing subplots when the subplot has been assigned @@ -1719,7 +1750,6 @@ def _update_subplot(self, subplot, spec): new_dims = zip(subplot.overlay_dims, odim_key) subplot.overlay_dims = util.OrderedDict(new_dims) - def _get_subplot_extents(self, overlay, ranges, range_type): """ Iterates over all subplots and collects the extents of each. @@ -1759,7 +1789,6 @@ def _get_subplot_extents(self, overlay, ranges, range_type): extents[rt].append(extent) return extents - def get_extents(self, overlay, ranges, range_type='combined'): subplot_extents = self._get_subplot_extents(overlay, ranges, range_type) zrange = self.projection == '3d' @@ -1824,10 +1853,8 @@ def __init__(self, layout, keys=None, dimensions=None, **params): if unbounded: initialize_unbounded(layout, dimensions, keys[0]) self.layout = layout - super(GenericCompositePlot, self).__init__(keys=keys, - dynamic=dynamic, - dimensions=dimensions, - **params) + super().__init__(keys=keys, dynamic=dynamic, + dimensions=dimensions, **params) nested_streams = layout.traverse(lambda x: get_nested_streams(x), [DynamicMap]) self.streams = list(set([s for streams in nested_streams for s in streams])) @@ -1888,7 +1915,7 @@ def __init__(self, layout, **params): if len(layout.values()) == 0: raise SkipRendering(warn=False) - super(GenericLayoutPlot, self).__init__(layout, **params) + super().__init__(layout, **params) self.subplots = {} self.rows, self.cols = layout.shape[::-1] if self.transpose else layout.shape self.coords = list(product(range(self.rows), diff --git a/holoviews/plotting/plotly/__init__.py b/holoviews/plotting/plotly/__init__.py index af055e45d4..e19b2fba34 100644 --- a/holoviews/plotting/plotly/__init__.py +++ b/holoviews/plotting/plotly/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import plotly from param import concrete_descendents @@ -65,7 +63,6 @@ Surface: SurfacePlot, Path3D: Path3DPlot, TriSurface: TriSurfacePlot, - Trisurface: TriSurfacePlot, # Alias, remove in 2.0 # Tabular Table: TablePlot, @@ -103,7 +100,7 @@ for plot in concrete_descendents(ElementPlot).values(): plot.padding = 0 -dflt_cmap = 'fire' +dflt_cmap = config.default_cmap dflt_shape_line_color = '#2a3f5f' # Line color of default plotly template point_size = np.sqrt(6) # Matches matplotlib default @@ -113,18 +110,18 @@ # Charts options.Curve = Options('style', color=Cycle(), line_width=2) options.ErrorBars = Options('style', color='black') -options.Scatter = Options('style', color=Cycle()) -options.Points = Options('style', color=Cycle()) +options.Scatter = Options('style', color=Cycle(), cmap=dflt_cmap) +options.Points = Options('style', color=Cycle(), cmap=dflt_cmap) options.Area = Options('style', color=Cycle(), line_width=2) options.Spread = Options('style', color=Cycle(), line_width=2) -options.TriSurface = Options('style', cmap='viridis') +options.TriSurface = Options('style', cmap=dflt_cmap) options.Histogram = Options('style', color=Cycle(), line_width=1, line_color='black') # Rasters -options.Image = Options('style', cmap=dflt_cmap) -options.Raster = Options('style', cmap=dflt_cmap) -options.QuadMesh = Options('style', cmap=dflt_cmap) -options.HeatMap = Options('style', cmap='RdBu_r') +options.Image = Options('style', cmap=config.default_gridded_cmap) +options.Raster = Options('style', cmap=config.default_gridded_cmap) +options.QuadMesh = Options('style', cmap=config.default_gridded_cmap) +options.HeatMap = Options('style', cmap=config.default_heatmap_cmap) # Disable padding for image-like elements options.Image = Options("plot", padding=0) diff --git a/holoviews/plotting/plotly/annotation.py b/holoviews/plotting/plotly/annotation.py index 549e3582f3..56843e5912 100644 --- a/holoviews/plotting/plotly/annotation.py +++ b/holoviews/plotting/plotly/annotation.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param from .chart import ScatterPlot diff --git a/holoviews/plotting/plotly/callbacks.py b/holoviews/plotting/plotly/callbacks.py index 5690a3e52d..c783137c09 100644 --- a/holoviews/plotting/plotly/callbacks.py +++ b/holoviews/plotting/plotly/callbacks.py @@ -21,13 +21,13 @@ class PlotlyCallbackMetaClass(type): """ def __init__(cls, name, bases, attrs): - super(PlotlyCallbackMetaClass, cls).__init__(name, bases, attrs) + super().__init__(name, bases, attrs) # Create weak-value dictionary to hold instances of the class cls.instances = WeakValueDictionary() def __call__(cls, *args, **kwargs): - inst = super(PlotlyCallbackMetaClass, cls).__call__(*args, **kwargs) + inst = super().__call__(*args, **kwargs) # Store weak reference to the callback instance in the _instances # WeakValueDictionary. This will allow instances to be garbage collected and diff --git a/holoviews/plotting/plotly/chart.py b/holoviews/plotting/plotly/chart.py index 2086e48504..4ca1bdde58 100644 --- a/holoviews/plotting/plotly/chart.py +++ b/holoviews/plotting/plotly/chart.py @@ -1,14 +1,12 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np from .selection import PlotlyOverlaySelectionDisplay -from ...core import util from ...operation import interpolate_curve from ...element import Tiles from ..mixins import AreaMixin, BarsMixin from .element import ElementPlot, ColorbarPlot +from .selection import PlotlyOverlaySelectionDisplay class ChartPlot(ElementPlot): @@ -36,10 +34,6 @@ def get_data(self, element, ranges, style, is_geo=False, **kwargs): class ScatterPlot(ChartPlot, ColorbarPlot): - color_index = param.ClassSelector(default=None, class_=(util.basestring, int), - allow_None=True, doc=""" - Index of the dimension from which the color will the drawn""") - style_opts = [ 'visible', 'marker', @@ -51,12 +45,12 @@ class ScatterPlot(ChartPlot, ColorbarPlot): 'selectedpoints', ] + selection_display = PlotlyOverlaySelectionDisplay() + _nonvectorized_styles = ['visible', 'cmap', 'alpha', 'sizemin', 'selectedpoints'] _style_key = 'marker' - selection_display = PlotlyOverlaySelectionDisplay() - _supports_geo = True @classmethod @@ -68,12 +62,6 @@ def trace_kwargs(cls, is_geo=False, **kwargs): def graph_options(self, element, ranges, style, **kwargs): opts = super(ScatterPlot, self).graph_options(element, ranges, style, **kwargs) - cdim = element.get_dimension(self.color_index) - if cdim: - copts = self.get_color_opts(cdim, element, ranges, style) - copts['color'] = element.dimension_values(cdim) - opts['marker'].update(copts) - # If cmap was present and applicable, it was processed by get_color_opts above. # Remove it now to avoid plotly validation error opts.get('marker', {}).pop('cmap', None) @@ -111,7 +99,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): def get_data(self, element, ranges, style, **kwargs): if 'steps' in self.interpolation: element = interpolate_curve(element, interpolation=self.interpolation) - return super(CurvePlot, self).get_data(element, ranges, style, **kwargs) + return super().get_data(element, ranges, style, **kwargs) class AreaPlot(AreaMixin, ChartPlot): @@ -129,7 +117,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): def get_data(self, element, ranges, style, **kwargs): x, y = ('y', 'x') if self.invert_axes else ('x', 'y') if len(element.vdims) == 1: - kwargs = super(AreaPlot, self).get_data(element, ranges, style, **kwargs)[0] + kwargs = super().get_data(element, ranges, style, **kwargs)[0] kwargs['fill'] = 'tozero'+y return [kwargs] xs = element.dimension_values(0) @@ -285,7 +273,7 @@ def get_data(self, element, ranges, style, **kwargs): return bars def init_layout(self, key, element, ranges, **kwargs): - layout = super(BarPlot, self).init_layout(key, element, ranges) + layout = super().init_layout(key, element, ranges) stack_dim = None if element.ndims > 1 and self.stacked: stack_dim = element.get_dimension(1) @@ -328,6 +316,6 @@ def get_data(self, element, ranges, style, **kwargs): return [{'x': xs, 'y': ys, 'width': binwidth, 'orientation': orientation}] def init_layout(self, key, element, ranges, **kwargs): - layout = super(HistogramPlot, self).init_layout(key, element, ranges) + layout = super().init_layout(key, element, ranges) layout['barmode'] = 'overlay' return layout diff --git a/holoviews/plotting/plotly/chart3d.py b/holoviews/plotting/plotly/chart3d.py index f8553485d8..6d870d3d15 100644 --- a/holoviews/plotting/plotly/chart3d.py +++ b/holoviews/plotting/plotly/chart3d.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np @@ -49,7 +47,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): return {'type': 'surface'} def graph_options(self, element, ranges, style, **kwargs): - opts = super(SurfacePlot, self).graph_options(element, ranges, style, **kwargs) + opts = super().graph_options(element, ranges, style, **kwargs) copts = self.get_color_opts(element.vdims[0], element, ranges, style) return dict(opts, **copts) @@ -85,7 +83,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): return {'type': 'scatter3d', 'mode': 'lines'} def graph_options(self, element, ranges, style, **kwargs): - opts = super(Path3DPlot, self).graph_options(element, ranges, style, **kwargs) + opts = super().graph_options(element, ranges, style, **kwargs) opts['line'].pop('showscale', None) return opts @@ -113,7 +111,7 @@ def get_data(self, element, ranges, style, **kwargs): return [dict(x=x, y=y, z=z, simplices=simplices)] def graph_options(self, element, ranges, style, **kwargs): - opts = super(TriSurfacePlot, self).graph_options( + opts = super().graph_options( element, ranges, style, **kwargs ) copts = self.get_color_opts(element.dimensions()[2], element, ranges, style) diff --git a/holoviews/plotting/plotly/dash.py b/holoviews/plotting/plotly/dash.py index cc903b7c73..eb4b95d3da 100644 --- a/holoviews/plotting/plotly/dash.py +++ b/holoviews/plotting/plotly/dash.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - # standard library imports import uuid import copy @@ -62,7 +60,9 @@ def get_layout_ranges(plot): return layout_ranges -def plot_to_figure(plot, reset_nclicks=0, layout_ranges=None, responsive=True): +def plot_to_figure( + plot, reset_nclicks=0, layout_ranges=None, responsive=True, use_ranges=True +): """ Convert a HoloViews plotly plot to a plotly.py Figure. @@ -82,7 +82,7 @@ def plot_to_figure(plot, reset_nclicks=0, layout_ranges=None, responsive=True): fig_dict['layout']['uirevision'] = "reset-" + str(reset_nclicks) # Remove range specification so plotly.js autorange + uirevision is in control - if layout_ranges: + if layout_ranges and use_ranges: for k in fig_dict['layout']: if k.startswith('xaxis') or k.startswith('yaxis'): fig_dict['layout'][k].pop('range', None) @@ -103,7 +103,7 @@ def plot_to_figure(plot, reset_nclicks=0, layout_ranges=None, responsive=True): # Pass to figure constructor to expand magic underscore notation fig = go.Figure(fig_dict) - if layout_ranges: + if layout_ranges and use_ranges: fig.update_layout(layout_ranges) return fig @@ -284,7 +284,7 @@ def decode_store_data(store_data): def to_dash( app, hvobjs, reset_button=False, graph_class=dcc.Graph, - button_class=html.Button, responsive="width", + button_class=html.Button, responsive="width", use_ranges=True, ): """ Build Dash components and callbacks from a collection of HoloViews objects @@ -304,6 +304,9 @@ def to_dash( HoloViews size. If "width" (default), the width is responsive but height matches the HoloViews size. If "height", the height is responsive but the width matches the HoloViews size. + use_ranges: If True, initialize graphs with the dimension ranges specified + in the HoloViews objects. If False, allow Dash to perform its own + auto-range calculations. Returns: DashComponents named tuple with properties: - graphs: List of graph components (with type matching the input @@ -366,7 +369,8 @@ def to_dash( layout_ranges.append(get_layout_ranges(plot)) fig = plot_to_figure( - plot, reset_nclicks=0, layout_ranges=layout_ranges[-1], responsive=responsive + plot, reset_nclicks=0, layout_ranges=layout_ranges[-1], + responsive=responsive, use_ranges=use_ranges, ).to_dict() initial_fig_dicts.append(fig) @@ -591,7 +595,8 @@ def update_figure(*args): plot = PlotlyRenderer.get_plot(hvobj) fig = plot_to_figure( plot, reset_nclicks=reset_nclicks, - layout_ranges=layout_ranges[fig_ind], responsive=responsive + layout_ranges=layout_ranges[fig_ind], responsive=responsive, + use_ranges=use_ranges, ).to_dict() figs[fig_ind] = fig diff --git a/holoviews/plotting/plotly/element.py b/holoviews/plotting/plotly/element.py index 66efd8f9dc..c03cda0d76 100644 --- a/holoviews/plotting/plotly/element.py +++ b/holoviews/plotting/plotly/element.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import uuid import numpy as np import param @@ -109,10 +107,10 @@ class ElementPlot(PlotlyPlot, GenericElementPlot): _nonvectorized_styles = [] def __init__(self, element, plot=None, **params): - super(ElementPlot, self).__init__(element, **params) + super().__init__(element, **params) self.trace_uid = str(uuid.uuid4()) self.static = len(self.hmap) == 1 and len(self.keys) == len(self.hmap) - self.callbacks = self._construct_callbacks() + self.callbacks, self.source_streams = self._construct_callbacks() @classmethod def trace_kwargs(cls, **kwargs): @@ -134,8 +132,11 @@ def initialize_plot(self, ranges=None, is_geo=False): def generate_plot(self, key, ranges, element=None, is_geo=False): + self.prev_frame = self.current_frame if element is None: element = self._get_frame(key) + else: + self.current_frame = element if is_geo and not self._supports_geo: raise ValueError( @@ -333,7 +334,7 @@ def _get_axis_dims(self, element): def _apply_transforms(self, element, ranges, style): new_style = dict(style) for k, v in dict(style).items(): - if isinstance(v, util.basestring): + if isinstance(v, str): if k == 'marker' and v in 'xsdo': continue elif v in element: @@ -553,7 +554,7 @@ def _get_ticks(self, axis, ticker): if isinstance(ticker, (tuple, list)): if all(isinstance(t, tuple) for t in ticker): ticks, labels = zip(*ticker) - labels = [l if isinstance(l, util.basestring) else str(l) + labels = [l if isinstance(l, str) else str(l) for l in labels] axis_props['tickvals'] = ticks axis_props['ticktext'] = labels @@ -572,8 +573,14 @@ def update_frame(self, key, ranges=None, element=None, is_geo=False): class ColorbarPlot(ElementPlot): clim = param.NumericTuple(default=(np.nan, np.nan), length=2, doc=""" - User-specified colorbar axis range limits for the plot, as a tuple (low,high). - If specified, takes precedence over data and dimension ranges.""") + User-specified colorbar axis range limits for the plot, as a + tuple (low,high). If specified, takes precedence over data + and dimension ranges.""") + + clim_percentile = param.ClassSelector(default=False, class_=(int, float, bool), doc=""" + Percentile value to compute colorscale robust to outliers. If + True, uses 2nd and 98th percentile; otherwise uses the specified + numerical percentile value.""") colorbar = param.Boolean(default=False, doc=""" Whether to display a colorbar.""") @@ -608,7 +615,10 @@ def get_color_opts(self, eldim, element, ranges, style): if util.isfinite(self.clim).all(): cmin, cmax = self.clim elif dim_name in ranges: - cmin, cmax = ranges[dim_name]['combined'] + if self.clim_percentile and 'robust' in ranges[dim_name]: + low, high = ranges[dim_name]['robust'] + else: + cmin, cmax = ranges[dim_name]['combined'] elif isinstance(eldim, dim): cmin, cmax = np.nan, np.nan auto = True @@ -714,8 +724,8 @@ def generate_plot(self, key, ranges, element=None, is_geo=False): return figure def update_frame(self, key, ranges=None, element=None, is_geo=False): - reused = isinstance(self.hmap, DynamicMap) and self.overlaid + self.prev_frame = self.current_frame if not reused and element is None: element = self._get_frame(key) elif element is not None: diff --git a/holoviews/plotting/plotly/images.py b/holoviews/plotting/plotly/images.py index 2f55b9dc12..c4d513794d 100644 --- a/holoviews/plotting/plotly/images.py +++ b/holoviews/plotting/plotly/images.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import numpy as np from plotly.graph_objs.layout import Image as _Image diff --git a/holoviews/plotting/plotly/plot.py b/holoviews/plotting/plotly/plot.py index bd42f679da..31e313d595 100644 --- a/holoviews/plotting/plotly/plot.py +++ b/holoviews/plotting/plotly/plot.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param from holoviews.plotting.util import attach_streams @@ -68,7 +66,7 @@ class LayoutPlot(PlotlyPlot, GenericLayoutPlot): disabled switches axiswise normalization option on globally.""") def __init__(self, layout, **params): - super(LayoutPlot, self).__init__(layout, **params) + super().__init__(layout, **params) self.layout, self.subplots, self.paths = self._init_layout(layout) if self.top_level: @@ -252,8 +250,7 @@ def __init__(self, layout, layout_type, subplots, **params): self.view_positions = self.layout_dict[self.layout_type]['positions'] # The supplied (axes, view) objects as indexed by position - super(AdjointLayoutPlot, self).__init__(subplots=subplots, **params) - + super().__init__(subplots=subplots, **params) def initialize_plot(self, ranges=None, is_geo=False): """ @@ -265,7 +262,6 @@ def initialize_plot(self, ranges=None, is_geo=False): """ return self.generate_plot(self.keys[-1], ranges, is_geo=is_geo) - def generate_plot(self, key, ranges=None, is_geo=False): adjoined_plots = [] for pos in ['main', 'right', 'top']: @@ -298,7 +294,7 @@ class GridPlot(PlotlyPlot, GenericCompositePlot): def __init__(self, layout, ranges=None, layout_num=1, **params): if not isinstance(layout, GridSpace): raise Exception("GridPlot only accepts GridSpace.") - super(GridPlot, self).__init__(layout=layout, layout_num=layout_num, + super().__init__(layout=layout, layout_num=layout_num, ranges=ranges, **params) self.cols, self.rows = layout.shape self.subplots, self.layout = self._create_subplots(layout, ranges) diff --git a/holoviews/plotting/plotly/raster.py b/holoviews/plotting/plotly/raster.py index dd361c2138..554779fc91 100644 --- a/holoviews/plotting/plotly/raster.py +++ b/holoviews/plotting/plotly/raster.py @@ -11,6 +11,11 @@ class RasterPlot(ColorbarPlot): + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + padding = param.ClassSelector(default=0, class_=(int, float, tuple)) style_opts = ['visible', 'cmap', 'alpha'] @@ -20,7 +25,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): return {'type': 'heatmap'} def graph_options(self, element, ranges, style, **kwargs): - opts = super(RasterPlot, self).graph_options(element, ranges, style, **kwargs) + opts = super().graph_options(element, ranges, style, **kwargs) copts = self.get_color_opts(element.vdims[0], element, ranges, style) opts['zmin'] = copts.pop('cmin') opts['zmax'] = copts.pop('cmax') @@ -41,13 +46,14 @@ def get_data(self, element, ranges, style, **kwargs): if self.invert_axes: x0, y0, dx, dy = y0, x0, dy, dx array = array.T + return [dict(x0=x0, y0=y0, dx=dx, dy=dy, z=array)] class HeatMapPlot(HeatMapMixin, RasterPlot): def init_layout(self, key, element, ranges, **kwargs): - layout = super(HeatMapPlot, self).init_layout(key, element, ranges) + layout = super().init_layout(key, element, ranges) gridded = element.gridded xdim, ydim = gridded.dimensions()[:2] @@ -101,6 +107,11 @@ def get_data(self, element, ranges, style, **kwargs): class QuadMeshPlot(RasterPlot): + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + def get_data(self, element, ranges, style, **kwargs): x, y, z = element.dimensions()[:3] irregular = element.interface.irregular(element, x) @@ -113,4 +124,5 @@ def get_data(self, element, ranges, style, **kwargs): if self.invert_axes: y, x = 'x', 'y' zdata = zdata.T + return [{x: xc, y: yc, 'z': zdata}] diff --git a/holoviews/plotting/plotly/renderer.py b/holoviews/plotting/plotly/renderer.py index a2df048a43..1290e89ea0 100644 --- a/holoviews/plotting/plotly/renderer.py +++ b/holoviews/plotting/plotly/renderer.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import base64 from io import BytesIO @@ -77,7 +75,7 @@ def get_plot_state(self_or_cls, obj, doc=None, renderer=None, **kwargs): Given a HoloViews Viewable return a corresponding figure dictionary. Allows cleaning the dictionary of any internal properties that were added """ - fig_dict = super(PlotlyRenderer, self_or_cls).get_plot_state(obj, renderer, **kwargs) + fig_dict = super().get_plot_state(obj, renderer, **kwargs) config = fig_dict.get('config', {}) # Remove internal properties (e.g. '_id', '_dim') diff --git a/holoviews/plotting/plotly/shapes.py b/holoviews/plotting/plotly/shapes.py index 71b733ec9a..8a3d854742 100644 --- a/holoviews/plotting/plotly/shapes.py +++ b/holoviews/plotting/plotly/shapes.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param import numpy as np diff --git a/holoviews/plotting/plotly/stats.py b/holoviews/plotting/plotly/stats.py index 8dd8bb9af7..9dfde60e88 100644 --- a/holoviews/plotting/plotly/stats.py +++ b/holoviews/plotting/plotly/stats.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param from .selection import PlotlyOverlaySelectionDisplay @@ -24,7 +22,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): return {'type': 'histogram2dcontour'} def graph_options(self, element, ranges, style, **kwargs): - opts = super(BivariatePlot, self).graph_options(element, ranges, style, **kwargs) + opts = super().graph_options(element, ranges, style, **kwargs) copts = self.get_color_opts(element.vdims[0], element, ranges, style) if self.ncontours: @@ -99,7 +97,7 @@ def get_data(self, element, ranges, style, **kwargs): return plots def get_extents(self, element, ranges, range_type='combined'): - return super(MultiDistributionPlot, self).get_extents( + return super().get_extents( element, ranges, range_type, 'categorical', element.vdims[0] ) @@ -135,7 +133,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): return {'type': 'box'} def graph_options(self, element, ranges, style, **kwargs): - options = super(BoxWhiskerPlot, self).graph_options(element, ranges, style, **kwargs) + options = super().graph_options(element, ranges, style, **kwargs) options['boxmean'] = self.mean options['jitter'] = self.jitter return options @@ -143,7 +141,6 @@ def graph_options(self, element, ranges, style, **kwargs): class ViolinPlot(MultiDistributionPlot): - box = param.Boolean(default=True, doc=""" Whether to draw a boxplot inside the violin""") @@ -161,7 +158,7 @@ def trace_kwargs(cls, is_geo=False, **kwargs): return {'type': 'violin'} def graph_options(self, element, ranges, style, **kwargs): - options = super(ViolinPlot, self).graph_options( + options = super().graph_options( element, ranges, style, **kwargs ) options['meanline'] = {'visible': self.meanline} diff --git a/holoviews/plotting/plotly/tabular.py b/holoviews/plotting/plotly/tabular.py index 26889ed01e..ce4245de5e 100644 --- a/holoviews/plotting/plotly/tabular.py +++ b/holoviews/plotting/plotly/tabular.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, unicode_literals - import param from ...selection import ColorListSelectionDisplay @@ -29,7 +27,7 @@ def get_data(self, element, ranges, style, **kwargs): return [{'header': header, 'cells': cells}] def graph_options(self, element, ranges, style, **kwargs): - opts = super(TablePlot, self).graph_options(element, ranges, style, **kwargs) + opts = super().graph_options(element, ranges, style, **kwargs) # Transpose fill_color array so values apply by rows not column if 'fill' in opts.get('cells', {}): diff --git a/holoviews/plotting/plotly/tiles.py b/holoviews/plotting/plotly/tiles.py index 6cefeef26d..fea92d4092 100644 --- a/holoviews/plotting/plotly/tiles.py +++ b/holoviews/plotting/plotly/tiles.py @@ -45,7 +45,7 @@ def graph_options(self, element, ranges, style, **kwargs): return opts def get_extents(self, element, ranges, range_type='combined'): - extents = super(TilePlot, self).get_extents(element, ranges, range_type) + extents = super().get_extents(element, ranges, range_type) if (not self.overlaid and all(e is None or not np.isfinite(e) for e in extents) and range_type in ('combined', 'data')): x0, x1 = (-20037508.342789244, 20037508.342789244) @@ -61,4 +61,4 @@ def generate_plot(self, key, ranges, element=None, is_geo=False): """ Override to force is_geo to True """ - return super(TilePlot, self).generate_plot(key, ranges, element, is_geo=True) + return super().generate_plot(key, ranges, element, is_geo=True) diff --git a/holoviews/plotting/plotly/util.py b/holoviews/plotting/plotly/util.py index ae53217299..9f98aae56a 100644 --- a/holoviews/plotting/plotly/util.py +++ b/holoviews/plotting/plotly/util.py @@ -1,5 +1,3 @@ -from __future__ import division - import copy import re @@ -896,7 +894,7 @@ def configure_matching_axes_from_dims(fig, matching_prop='_dim'): def clean_internal_figure_properties(fig): """ Remove all HoloViews internal properties (those with leading underscores) from the - inupt figure. + input figure. Note: This function mutates the input figure diff --git a/holoviews/plotting/renderer.py b/holoviews/plotting/renderer.py index e87f3a3c1d..76572cd514 100644 --- a/holoviews/plotting/renderer.py +++ b/holoviews/plotting/renderer.py @@ -3,8 +3,6 @@ Public API for all plotting renderers supported by HoloViews, regardless of plotting package or backend. """ -from __future__ import unicode_literals, absolute_import - import base64 import os @@ -26,15 +24,17 @@ from panel import config from panel.io.notebook import ipywidget, load_notebook, render_model, render_mimebundle from panel.io.state import state +from panel.models.comm_manager import CommManager as PnCommManager from panel.pane import HoloViews as HoloViewsPane from panel.widgets.player import PlayerBase from panel.viewable import Viewable from pyviz_comms import CommManager, JupyterCommManager from ..core import Layout, HoloMap, AdjointLayout, DynamicMap +from ..core.data import disable_pipeline from ..core.io import Exporter from ..core.options import Store, StoreOptions, SkipRendering, Compositor -from ..core.util import basestring, unbound_dimensions, LooseVersion +from ..core.util import unbound_dimensions, LooseVersion from ..streams import Stream from . import Plot from .util import displayable, collate, initialize_dynamic @@ -108,7 +108,7 @@ class Renderer(Exporter): backend = param.String(doc=""" The full, lowercase name of the rendering backend or third - part plotting package used e.g 'matplotlib' or 'cairo'.""") + part plotting package used e.g. 'matplotlib' or 'cairo'.""") dpi = param.Integer(None, doc=""" The render resolution in dpi (dots per inch)""") @@ -183,8 +183,7 @@ class Renderer(Exporter): def __init__(self, **params): self.last_plot = None - super(Renderer, self).__init__(**params) - + super().__init__(**params) def __call__(self, obj, fmt='auto', **kwargs): plot, fmt = self._validate(obj, fmt) @@ -201,7 +200,6 @@ def __call__(self, obj, fmt='auto', **kwargs): data = self._apply_post_render_hooks(data, obj, fmt) return data, info - @bothmethod def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): """ @@ -227,7 +225,9 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): if not displayable(obj): obj = collate(obj) initialize_dynamic(obj) - obj = Compositor.map(obj, mode='data', backend=self_or_cls.backend) + + with disable_pipeline(): + obj = Compositor.map(obj, mode='data', backend=self_or_cls.backend) plot_opts = dict(self_or_cls.plot_options(obj, self_or_cls.size), **kwargs) if isinstance(obj, AdjointLayout): @@ -241,6 +241,18 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): else: plot = obj + # Trigger streams which were marked as requiring an update + triggers = [] + for p in plot.traverse(): + if not hasattr(p, '_trigger'): + continue + for trigger in p._trigger: + if trigger not in triggers: + triggers.append(trigger) + p._trigger = [] + for trigger in triggers: + Stream.trigger([trigger]) + if isinstance(self_or_cls, Renderer): self_or_cls.last_plot = plot @@ -253,7 +265,6 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): plot.document = doc return plot - @bothmethod def get_plot_state(self_or_cls, obj, renderer=None, **kwargs): """ @@ -263,7 +274,6 @@ def get_plot_state(self_or_cls, obj, renderer=None, **kwargs): obj = self_or_cls.get_plot(obj, renderer, **kwargs) return obj.state - def _validate(self, obj, fmt, **kwargs): """ Helper method to be used in the __call__ method to get a @@ -305,7 +315,6 @@ def _validate(self, obj, fmt, **kwargs): self.last_plot = plot return plot, fmt - def _apply_post_render_hooks(self, data, obj, fmt): """ Apply the post-render hooks to the data. @@ -319,7 +328,6 @@ def _apply_post_render_hooks(self, data, obj, fmt): "be applied:\n\n %s" % (hook, e)) return data - def html(self, obj, fmt=None, css=None, resources='CDN', **kwargs): """ Renders plot or data structure and wraps the output in HTML. @@ -328,7 +336,7 @@ def html(self, obj, fmt=None, css=None, resources='CDN', **kwargs): """ plot, fmt = self._validate(obj, fmt) figdata, _ = self(plot, fmt, **kwargs) - if isinstance(resources, basestring): + if isinstance(resources, str): resources = resources.lower() if css is None: css = self.css @@ -360,7 +368,6 @@ def html(self, obj, fmt=None, css=None, resources='CDN', **kwargs): html = tag.format(src=src, mime_type=mime_type, css=css) return html - def components(self, obj, fmt=None, comm=True, **kwargs): """ Returns data and metadata dictionaries containing HTML and JS @@ -372,70 +379,63 @@ def components(self, obj, fmt=None, comm=True, **kwargs): else: plot, fmt = self._validate(obj, fmt) - data, metadata = {}, {} - if isinstance(plot, Viewable): - registry = list(Stream.registry.items()) - objects = plot.object.traverse(lambda x: x) - dynamic, streams = False, False - for source in objects: - dynamic |= isinstance(source, DynamicMap) - streams |= any( - src is source or (src._plot_id is not None and src._plot_id == source._plot_id) - for src, streams in registry for s in streams - ) - embed = (not (dynamic or streams or self.widget_mode == 'live') or config.embed) - - # This part should be factored out in Panel and then imported - # here for HoloViews 2.0, which will be able to require a - # recent Panel version. - if embed or config.comms == 'default': - comm = self.comm_manager.get_server_comm() if comm else None - doc = Document() - with config.set(embed=embed): - model = plot.layout._render_model(doc, comm) - if embed: - return render_model(model, comm) - args = (model, doc, comm) - if panel_version > '0.9.3': - from panel.models.comm_manager import CommManager - ref = model.ref['id'] - manager = CommManager(comm_id=comm.id, plot_id=ref) - client_comm = self.comm_manager.get_client_comm( - on_msg=partial(plot._on_msg, ref, manager), - on_error=partial(plot._on_error, ref), - on_stdout=partial(plot._on_stdout, ref) - ) - manager.client_comm_id = client_comm.id - args = args + (manager,) - return render_mimebundle(*args) - - # Handle rendering object as ipywidget - widget = ipywidget(plot, combine_events=True) - if hasattr(widget, '_repr_mimebundle_'): - return widget._repr_mimebundle() - plaintext = repr(widget) - if len(plaintext) > 110: - plaintext = plaintext[:110] + '…' - data = { - 'text/plain': plaintext, - } - if widget._view_name is not None: - data['application/vnd.jupyter.widget-view+json'] = { - 'version_major': 2, - 'version_minor': 0, - 'model_id': widget._model_id - } - if config.comms == 'vscode': - # Unfortunately VSCode does not yet handle _repr_mimebundle_ - from IPython.display import display - display(data, raw=True) - return {'text/html': '
'}, {} - return data, {} - else: + if not isinstance(plot, Viewable): html = self._figure_data(plot, fmt, as_script=True, **kwargs) - data['text/html'] = html - - return (data, {MIME_TYPES['jlab-hv-exec']: metadata}) + return {'text/html': html}, {MIME_TYPES['jlab-hv-exec']: {}} + + registry = list(Stream.registry.items()) + objects = plot.object.traverse(lambda x: x) + dynamic, streams = False, False + for source in objects: + dynamic |= isinstance(source, DynamicMap) + streams |= any( + src is source or (src._plot_id is not None and src._plot_id == source._plot_id) + for src, streams in registry for s in streams + ) + embed = (not (dynamic or streams or self.widget_mode == 'live') or config.embed) + + if embed or config.comms == 'default': + return self._render_panel(plot, embed, comm) + return self._render_ipywidget(plot) + + def _render_panel(self, plot, embed=False, comm=True): + comm = self.comm_manager.get_server_comm() if comm else None + doc = Document() + with config.set(embed=embed): + model = plot.layout._render_model(doc, comm) + if embed: + return render_model(model, comm) + ref = model.ref['id'] + manager = PnCommManager(comm_id=comm.id, plot_id=ref) + client_comm = self.comm_manager.get_client_comm( + on_msg=partial(plot._on_msg, ref, manager), + on_error=partial(plot._on_error, ref), + on_stdout=partial(plot._on_stdout, ref) + ) + manager.client_comm_id = client_comm.id + return render_mimebundle(model, doc, comm, manager) + + def _render_ipywidget(self, plot): + # Handle rendering object as ipywidget + widget = ipywidget(plot, combine_events=True) + if hasattr(widget, '_repr_mimebundle_'): + return widget._repr_mimebundle() + plaintext = repr(widget) + if len(plaintext) > 110: + plaintext = plaintext[:110] + '…' + data = {'text/plain': plaintext} + if widget._view_name is not None: + data['application/vnd.jupyter.widget-view+json'] = { + 'version_major': 2, + 'version_minor': 0, + 'model_id': widget._model_id + } + if config.comms == 'vscode': + # Unfortunately VSCode does not yet handle _repr_mimebundle_ + from IPython.display import display + display(data, raw=True) + return {'text/html': '
'}, {} + return data, {} def static_html(self, obj, fmt=None, template=None): """ @@ -448,7 +448,6 @@ def static_html(self, obj, fmt=None, template=None): html_bytes.seek(0) return html_bytes.read() - @bothmethod def get_widget(self_or_cls, plot, widget_type, **kwargs): if widget_type == 'scrubber': @@ -464,7 +463,6 @@ def get_widget(self_or_cls, plot, widget_type, **kwargs): player.interval = interval return layout - @bothmethod def export_widgets(self_or_cls, obj, filename, fmt=None, template=None, json=False, json_path='', **kwargs): @@ -481,7 +479,6 @@ def export_widgets(self_or_cls, obj, filename, fmt=None, template=None, "registered widget types.") self_or_cls.get_widget(obj, fmt).save(filename) - @bothmethod def _widget_kwargs(self_or_cls): if self_or_cls.holomap in ('auto', 'widgets'): @@ -492,7 +489,6 @@ def _widget_kwargs(self_or_cls): loc = self_or_cls.widget_location or 'bottom' return {'widget_location': loc, 'widget_type': widget_type, 'center': True} - @bothmethod def app(self_or_cls, plot, show=False, new_window=False, websocket_origin=None, port=0): """ @@ -518,7 +514,6 @@ def app(self_or_cls, plot, show=False, new_window=False, websocket_origin=None, kwargs = {'notebook_url': websocket_origin} if websocket_origin else {} return pane.app(port=port, **kwargs) - @bothmethod def server_doc(self_or_cls, obj, doc=None): """ @@ -531,7 +526,6 @@ def server_doc(self_or_cls, obj, doc=None): **self_or_cls._widget_kwargs()) return obj.layout.server_doc(doc) - @classmethod def plotting_class(cls, obj): """ @@ -551,16 +545,6 @@ class needed to render it with the current renderer. "found".format(element_type.__name__)) return plotclass - - @classmethod - def html_assets(cls, core=True, extras=True, backends=None, script=False): - """ - Deprecated: No longer needed - """ - param.main.warning("Renderer.html_assets is deprecated as all " - "JS and CSS dependencies are now handled by " - "Panel.") - @classmethod def plot_options(cls, obj, percent_size): """ @@ -573,7 +557,6 @@ def plot_options(cls, obj, percent_size): """ raise NotImplementedError - @bothmethod def save(self_or_cls, obj, basename, fmt='auto', key={}, info={}, options=None, resources='inline', title=None, **kwargs): @@ -601,7 +584,7 @@ def save(self_or_cls, obj, basename, fmt='auto', key={}, info={}, resources = CDN elif resources.lower() == 'inline': resources = INLINE - if isinstance(basename, basestring): + if isinstance(basename, str): if title is None: title = os.path.basename(basename) if fmt in MIME_TYPES: @@ -629,7 +612,6 @@ def _save_prefix(self_or_cls, ext): "Hook to prefix content for instance JS when saving HTML" return - @bothmethod def get_size(self_or_cls, plot): """ @@ -650,7 +632,6 @@ def state(cls): """ yield - @classmethod def validate(cls, options): """ @@ -658,7 +639,6 @@ def validate(cls, options): """ return options - @classmethod def load_nb(cls, inline=True): """ @@ -677,7 +657,6 @@ def load_nb(cls, inline=True): cls.comm_manager = JupyterCommManager state._comm_manager = JupyterCommManager - @classmethod def _delete_plot(cls, plot_id): """ diff --git a/holoviews/plotting/util.py b/holoviews/plotting/util.py index 52c9a71c64..e1de7e8866 100644 --- a/holoviews/plotting/util.py +++ b/holoviews/plotting/util.py @@ -1,5 +1,3 @@ -from __future__ import unicode_literals, absolute_import, division - import re import traceback import warnings @@ -10,22 +8,27 @@ import numpy as np import param -from ..core import (HoloMap, DynamicMap, CompositeOverlay, Layout, - Overlay, GridSpace, NdLayout, NdOverlay, AdjointLayout) +from ..core import ( + HoloMap, DynamicMap, CompositeOverlay, Layout, Overlay, GridSpace, + NdLayout, NdOverlay, AdjointLayout +) from ..core.options import CallbackError, Cycle +from ..core.operation import Operation from ..core.ndmapping import item_check from ..core.spaces import get_nested_streams -from ..core.util import (match_spec, wrap_tuple, basestring, get_overlay_spec, - unique_iterator, closest_match, is_number, isfinite, - python2sort, disable_constant, arraylike_types) -from ..streams import LinkedStream +from ..core.util import ( + match_spec, wrap_tuple, get_overlay_spec, unique_iterator, + closest_match, is_number, isfinite, python2sort, disable_constant, + arraylike_types +) +from ..streams import LinkedStream, Params from ..util.transform import dim def displayable(obj): """ Predicate that returns whether the object is displayable or not - (i.e whether the object obeys the nesting hierarchy + (i.e. whether the object obeys the nesting hierarchy) """ if isinstance(obj, Overlay) and any(isinstance(o, (HoloMap, GridSpace, AdjointLayout)) for o in obj): @@ -180,7 +183,8 @@ def compute_overlayable_zorders(obj, path=[]): # If object branches but does not declare inputs (e.g. user defined # DynamicMaps returning (Nd)Overlay) add the items on the DynamicMap.last found = any(isinstance(p, DynamicMap) and p.callback._is_overlay for p in path) - linked = any(isinstance(s, LinkedStream) and s.linked for s in obj.streams) + linked = any(isinstance(s, (LinkedStream, Params)) and s.linked + for s in obj.streams) if (found or linked) and isoverlay and not isdynoverlay: offset = max(zorder_map.keys()) for z, o in enumerate(obj.last): @@ -410,7 +414,7 @@ def get_sideplot_ranges(plot, element, main, ranges): """ key = plot.current_key dims = element.dimensions() - dim = dims[0] if 'frequency' in dims[1].name else dims[1] + dim = dims[0] if 'frequency' in dims[1].name or 'count' in dims[1].name else dims[1] range_item = main if isinstance(main, HoloMap): if issubclass(main.type, CompositeOverlay): @@ -670,7 +674,7 @@ def _list_cmaps(provider=None, records=False): """ if provider is None: provider = providers - elif isinstance(provider, basestring): + elif isinstance(provider, str): if provider not in providers: raise ValueError('Colormap provider %r not recognized, must ' 'be one of %r' % (provider, providers)) @@ -690,14 +694,16 @@ def info(provider,names): else: mpl_cmaps = list(cm.cmaps_listed)+list(cm.datad) cmaps += info('matplotlib', mpl_cmaps) - cmaps += info('matplotlib', [cmap+'_r' for cmap in mpl_cmaps]) + cmaps += info('matplotlib', [cmap+'_r' for cmap in mpl_cmaps + if not cmap.endswith('_r')]) except: pass if 'bokeh' in provider: try: from bokeh import palettes cmaps += info('bokeh', palettes.all_palettes) - cmaps += info('bokeh', [p+'_r' for p in palettes.all_palettes]) + cmaps += info('bokeh', [p+'_r' for p in palettes.all_palettes + if not p.endswith('_r')]) except: pass if 'colorcet' in provider: @@ -705,8 +711,8 @@ def info(provider,names): from colorcet import palette_n, glasbey_hv cet_maps = palette_n.copy() cet_maps['glasbey_hv'] = glasbey_hv # Add special hv-specific map - cmaps += info('colorcet', cet_maps) - cmaps += info('colorcet', [p+'_r' for p in cet_maps]) + cmaps += info('colorcet', cet_maps) + cmaps += info('colorcet', [p+'_r' for p in cet_maps if not p.endswith('_r')]) except: pass return sorted(unique_iterator(cmaps)) @@ -893,7 +899,7 @@ def process_cmap(cmap, ncolors=None, provider=None, categorical=False): palette = list(cmap) elif isinstance(cmap, list): palette = cmap - elif isinstance(cmap, basestring): + elif isinstance(cmap, str): mpl_cmaps = _list_cmaps('matplotlib') bk_cmaps = _list_cmaps('bokeh') cet_cmaps = _list_cmaps('colorcet') @@ -982,7 +988,7 @@ def scale_fontsize(size, scaling): Scales a numeric or string font size. """ ext = None - if isinstance(size, basestring): + if isinstance(size, str): match = re.match(r"[-+]?\d*\.\d+|\d+", size) if match: value = match.group() @@ -1096,6 +1102,39 @@ def hex2rgb(hex): return [int(hex[i:i+2], 16) for i in range(1,6,2)] +class apply_nodata(Operation): + + nodata = param.Integer(default=None, doc=""" + Optional missing-data value for integer data. + If non-None, data with this value will be replaced with NaN so + that it is transparent (by default) when plotted.""") + + def _replace_value(self, data): + "Replace `nodata` value in data with NaN, if specified in opts" + data = data.astype('float64') + mask = data!=self.p.nodata + if hasattr(data, 'where'): + return data.where(mask, np.NaN) + return np.where(mask, data, np.NaN) + + def _process(self, element, key=None): + if self.p.nodata is None: + return element + if hasattr(element, 'interface'): + vdim = element.vdims[0] + dtype = element.interface.dtype(element, vdim) + if dtype.kind not in 'iu': + return element + transform = dim(vdim, self._replace_value) + return element.transform(**{vdim.name: transform}) + else: + array = element.dimension_values(2, flat=False).T + if array.dtype.kind not in 'iu': + return element + array = array.astype('float64') + return element.clone(self._replace_value(array)) + + RGB_HEX_REGEX = re.compile(r'^#(?:[0-9a-fA-F]{3}){1,2}$') COLOR_ALIASES = { diff --git a/holoviews/selection.py b/holoviews/selection.py index 998aeb01dd..e748b8b82e 100644 --- a/holoviews/selection.py +++ b/holoviews/selection.py @@ -10,6 +10,7 @@ from .core.element import Element, Layout from .core.options import CallbackError, Store from .core.overlay import NdOverlay, Overlay +from .core.layout import AdjointLayout from .core.spaces import GridSpace from .streams import ( Stream, SelectionExprSequence, CrossFilterSet, @@ -31,7 +32,7 @@ class _SelectionExprLayers(Derived): exprs = param.List(constant=True) def __init__(self, expr_override, cross_filter_set, **params): - super(_SelectionExprLayers, self).__init__( + super().__init__( [expr_override, cross_filter_set], exclusive=True, **params ) @@ -46,8 +47,6 @@ def transform_function(cls, stream_values, constants): return {"exprs": [True, cross_filter_set_values["selection_expr"]]} - - _Styles = Stream.define('Styles', colors=[], alpha=1.) _RegionElement = Stream.define("RegionElement", region_element=None) @@ -78,7 +77,7 @@ class _base_link_selections(param.ParameterizedFunction): @bothmethod def instance(self_or_cls, **params): - inst = super(_base_link_selections, self_or_cls).instance(**params) + inst = super().instance(**params) # Init private properties inst._cross_filter_stream = CrossFilterSet(mode=inst.cross_filter_mode) @@ -106,13 +105,16 @@ def _register(self, hvobj): Register an Element or DynamicMap that may be capable of generating selection expressions in response to user interaction events """ + from .element import Table + # Create stream that produces element that displays region of selection selection_expr_seq = SelectionExprSequence( - hvobj, mode=self.selection_mode, include_region=self.show_regions, + hvobj, mode=self.selection_mode, + include_region=self.show_regions, index_cols=self.index_cols ) self._selection_expr_streams[hvobj] = selection_expr_seq - self._cross_filter_stream.append_input_stream(self._selection_expr_streams[hvobj]) + self._cross_filter_stream.append_input_stream(selection_expr_seq) self._plot_reset_streams[hvobj] = PlotReset(source=hvobj) @@ -122,8 +124,9 @@ def clear_stream_history(resetting, stream=selection_expr_seq.history_stream): stream.clear_history() stream.event() - mode_stream = SelectMode(source=hvobj) - mode_stream.param.watch(self._update_mode, 'mode') + if not isinstance(hvobj, Table): + mode_stream = SelectMode(source=hvobj) + mode_stream.param.watch(self._update_mode, 'mode') self._plot_reset_streams[hvobj].param.watch( clear_stream_history, ['resetting'] @@ -170,6 +173,10 @@ def _selection_transform(self, hvobj, operations=()): self._selection_transform(el, operations=operations) for el in callback.inputs ]).collate() + elif getattr(hvobj.callback, "name", None) == "dynamic_operation": + obj = callback.inputs[0] + return self._selection_transform(obj, operations=operations).apply( + callback.operation) else: # This is a DynamicMap that we don't know how to recurse into. return hvobj @@ -184,7 +191,7 @@ def _selection_transform(self, hvobj, operations=()): self._selection_expr_streams.get(element, None), cache=self._cache ) return hvobj - elif isinstance(hvobj, (Layout, Overlay, NdOverlay, GridSpace)): + elif isinstance(hvobj, (Layout, Overlay, NdOverlay, GridSpace, AdjointLayout)): data = OrderedDict([(k, self._selection_transform(v, operations)) for k, v in hvobj.items()]) if isinstance(hvobj, NdOverlay): @@ -269,7 +276,7 @@ class link_selections(_base_link_selections): @bothmethod def instance(self_or_cls, **params): - inst = super(link_selections, self_or_cls).instance(**params) + inst = super().instance(**params) # Initialize private properties inst._obj_selections = {} @@ -319,6 +326,27 @@ def selection_param(self, data): self._datasets.append((pipe, data, raw)) return pipe.param.data + def filter(self, data, selection_expr=None): + """ + Filters the provided data based on the current state of the + current selection expression. + + Args: + data: A Dataset type or data which can be cast to a Dataset + selection_expr: Optionally provide your own selection expression + + Returns: + The filtered data + """ + expr = self.selection_expr if selection_expr is None else selection_expr + if expr is None: + return data + is_dataset = isinstance(data, Dataset) + if not is_dataset: + data = Dataset(data) + filtered = data[expr.apply(data)] + return filtered if is_dataset else filtered.data + @bothmethod def _install_param_callbacks(self_or_cls, inst): def update_selection_mode(*_): diff --git a/holoviews/streams.py b/holoviews/streams.py index f303344898..20152f1d31 100644 --- a/holoviews/streams.py +++ b/holoviews/streams.py @@ -4,6 +4,7 @@ server-side or in Javascript in the Jupyter notebook (client-side). """ +import sys import weakref from numbers import Number from collections import defaultdict @@ -19,7 +20,7 @@ from .core.ndmapping import UniformNdMapping # Types supported by Pointer derived streams -pointer_types = (Number, util.basestring, tuple)+util.datetime_types +pointer_types = (Number, str, tuple)+util.datetime_types class _SkipTrigger(): pass @@ -37,13 +38,25 @@ def triggering_streams(streams): stream._triggering = True try: yield - except: - raise finally: for stream in streams: stream._triggering = False +def streams_list_from_dict(streams): + "Converts a streams dictionary into a streams list" + params = {} + for k, v in streams.items(): + if 'panel' in sys.modules: + from panel.depends import param_value_if_widget + v = param_value_if_widget(v) + if isinstance(v, param.Parameter) and v.owner is not None: + params[k] = v + else: + raise TypeError('Cannot handle value %r in streams dictionary' % v) + return Params.from_params(params) + + class Stream(param.Parameterized): """ A Stream is simply a parameterized object with parameters that @@ -258,7 +271,7 @@ def __init__(self, rename={}, source=None, subscribers=[], linked=False, # indicate where the event originated from self._metadata = {} - super(Stream, self).__init__(**params) + super().__init__(**params) self._rename = self._validate_rename(rename) if source is not None: if source in self.registry: @@ -466,7 +479,7 @@ class Pipe(Stream): Arbitrary data being streamed to a DynamicMap callback.""") def __init__(self, data=None, memoize=False, **params): - super(Pipe, self).__init__(data=data, **params) + super().__init__(data=data, **params) self._memoize_counter = 0 def send(self, data): @@ -507,6 +520,9 @@ class Buffer(Pipe): is allowed while streaming. """ + data = param.Parameter(default=None, constant=True, doc=""" + Arbitrary data being streamed to a DynamicMap callback.""") + def __init__(self, data, length=1000, index=True, following=True, **params): if (util.pd and isinstance(data, util.pd.DataFrame)): example = data @@ -542,7 +558,7 @@ def __init__(self, data, length=1000, index=True, following=True, **params): if index and (util.pd and isinstance(example, util.pd.DataFrame)): example = example.reset_index() params['data'] = example - super(Buffer, self).__init__(**params) + super().__init__(**params) self.length = length self.following = following self._chunk_length = 0 @@ -633,7 +649,7 @@ def update(self, **kwargs): self.verify(data) kwargs['data'] = self._concat(data) self._count += 1 - return super(Buffer, self).update(**kwargs) + return super().update(**kwargs) @property @@ -676,15 +692,20 @@ def __init__(self, parameterized=None, parameters=None, watch=True, watch_only=F rename.update({(o, k): v for o in owners}) params['rename'] = rename + if 'linked' not in params: + for p in parameters: + if isinstance(p.owner, (LinkedStream, Params)) and p.owner.linked: + params['linked'] = True + self._watch_only = watch_only - super(Params, self).__init__(parameterized=parameterized, parameters=parameters, **params) + super().__init__(parameterized=parameterized, parameters=parameters, **params) self._memoize_counter = 0 self._events = [] self._watchers = [] if watch: # Subscribe to parameters keyfn = lambda x: id(x.owner) - for _, group in groupby(sorted(parameters, key=keyfn)): + for _, group in groupby(sorted(parameters, key=keyfn), key=keyfn): group = list(group) watcher = group[0].owner.param.watch(self._watcher, [p.name for p in group]) self._watchers.append(watcher) @@ -695,7 +716,6 @@ def unwatch(self): watcher.inst.param.unwatch(watcher) self._watchers.clear() - @classmethod def from_params(cls, params, **kwargs): """Returns Params streams given a dictionary of parameters @@ -711,7 +731,7 @@ def from_params(cls, params, **kwargs): for _, group in groupby(sorted(params.items(), key=key_fn), key_fn): group = list(group) inst = [p.owner for _, p in group][0] - if not isinstance(inst, param.Parameterized): + if inst is None: continue names = [p.name for _, p in group] rename = {p.name: n for n, p in group} @@ -758,8 +778,21 @@ def reset(self): pass def update(self, **kwargs): - for k, v in kwargs.items(): - setattr(self.parameterized, k, v) + if self._rename: + owner_updates = defaultdict(dict) + for (owner, pname), rname in self._rename.items(): + if rname in kwargs: + owner_updates[owner][pname] = kwargs[rname] + for owner, updates in owner_updates.items(): + if isinstance(owner, Stream): + owner.update(**updates) + else: + owner.param.set_param(**updates) + elif isinstance(self.parameterized, Stream): + self.parameterized.update(**kwargs) + return + else: + self.parameterized.param.set_param(**kwargs) @property def contents(self): @@ -778,6 +811,14 @@ class ParamMethod(Params): change. """ + parameterized = param.ClassSelector(class_=(param.Parameterized, + param.parameterized.ParameterizedMetaclass), + constant=True, allow_None=True, doc=""" + Parameterized instance to watch for parameter changes.""") + + parameters = param.List([], constant=True, doc=""" + Parameters on the parameterized to watch.""") + def __init__(self, parameterized, parameters=None, watch=True, **params): if not util.is_param_method(parameterized): raise ValueError('ParamMethod stream expects a method on a ' @@ -789,7 +830,7 @@ def __init__(self, parameterized, parameters=None, watch=True, **params): parameters = [p.pobj for p in parameterized.param.params_depended_on(method.__name__)] params['watch_only'] = True - super(ParamMethod, self).__init__(parameterized, parameters, watch, **params) + super().__init__(parameterized, parameters, watch, **params) class Derived(Stream): @@ -800,7 +841,7 @@ class Derived(Stream): If exclusive=True, then all streams except the most recently updated are cleared. """ def __init__(self, input_streams, exclusive=False, **params): - super(Derived, self).__init__(**params) + super().__init__(**params) self.input_streams = [] self._updating = set() self._register_streams(input_streams) @@ -897,7 +938,7 @@ class History(Stream): List containing the historical values of the input stream""") def __init__(self, input_stream, **params): - super(History, self).__init__(**params) + super().__init__(**params) self.input_stream = input_stream self._register_input_stream() # Trigger event on input stream after registering so that current value is @@ -954,7 +995,7 @@ def __init__(self, source, include_region=True, **params): ) input_streams = self._build_selection_streams(source) - super(SelectionExpr, self).__init__( + super().__init__( source=source, input_streams=input_streams, exclusive=True, **params ) @@ -992,7 +1033,7 @@ def transform(self): if (isinstance(stream, Selection1D) and stream._triggering and not self._index_cols): return - return super(SelectionExpr, self).transform() + return super().transform() @classmethod def transform_function(cls, stream_values, constants): @@ -1080,7 +1121,7 @@ def __init__( self.history_stream = History(sel_expr) input_streams = [self.history_stream] - super(SelectionExprSequence, self).__init__( + super().__init__( source=source, input_streams=input_streams, **params ) @@ -1094,7 +1135,7 @@ def constants(self): def reset(self): self.input_streams[0].clear_history() - super(SelectionExprSequence, self).reset() + super().reset() @classmethod def transform_function(cls, stream_values, constants): @@ -1152,7 +1193,7 @@ def __init__(self, selection_streams=(), mode="intersection", index_cols=None, * self._index_cols = index_cols input_streams = list(selection_streams) exclusive = mode == "overwrite" - super(CrossFilterSet, self).__init__( + super().__init__( input_streams, exclusive=exclusive, **params ) @@ -1175,7 +1216,7 @@ def constants(self): } def reset(self): - super(CrossFilterSet, self).reset() + super().reset() for stream in self.input_streams: stream.reset() @@ -1215,7 +1256,7 @@ class LinkedStream(Stream): """ def __init__(self, linked=True, **params): - super(LinkedStream, self).__init__(linked=linked, **params) + super().__init__(linked=linked, **params) class PointerX(LinkedStream): @@ -1273,6 +1314,13 @@ class Draw(PointerXY): A series of updating x/y-positions when drawing, together with the current stroke count """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") stroke_count = param.Integer(default=0, constant=True, doc=""" The current drawing stroke count. Increments every time a new @@ -1283,32 +1331,74 @@ class SingleTap(PointerXY): The x/y-position of a single tap or click in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class Tap(PointerXY): """ The x/y-position of a tap or click in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class DoubleTap(PointerXY): """ The x/y-position of a double-tap or -click in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class PressUp(PointerXY): """ The x/y position of a mouse pressup event in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class PanEnd(PointerXY): """The x/y position of a the end of a pan event in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class MouseEnter(PointerXY): """ The x/y-position where the mouse/cursor entered the plot area in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class MouseLeave(PointerXY): @@ -1316,6 +1406,13 @@ class MouseLeave(PointerXY): The x/y-position where the mouse/cursor entered the plot area in data coordinates. """ + x = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the x-axis in data coordinates""") + + y = param.ClassSelector(class_=pointer_types, default=None, + constant=True, doc=""" + Pointer position along the y-axis in data coordinates""") class PlotSize(LinkedStream): @@ -1403,6 +1500,10 @@ class SelectionXY(BoundsXY): selections. """ + bounds = param.Tuple(default=None, constant=True, length=4, + allow_None=True, doc=""" + Bounds defined as (left, bottom, right, top) tuple.""") + x_selection = param.ClassSelector(class_=(tuple, list), allow_None=True, constant=True, doc=""" The current selection along the x-axis, either a numerical range @@ -1454,7 +1555,7 @@ class PlotReset(LinkedStream): Whether a reset event is being signalled.""") def __init__(self, *args, **params): - super(PlotReset, self).__init__(self, *args, **dict(params, transient=True)) + super().__init__(self, *args, **dict(params, transient=True)) class CDSStream(LinkedStream): @@ -1494,6 +1595,12 @@ class PointDraw(CDSStream): An optional tooltip to override the default """ + data = param.Dict(constant=True, doc=""" + Data synced from Bokeh ColumnDataSource supplied as a + dictionary of columns, where each column is a list of values + (for point-like data) or list of lists of values (for + path-like data).""") + def __init__(self, empty_value=None, add=True, drag=True, num_objects=0, styles={}, tooltip=None, **params): self.add = add @@ -1503,7 +1610,7 @@ def __init__(self, empty_value=None, add=True, drag=True, num_objects=0, self.styles = styles self.tooltip = tooltip self.styles = styles - super(PointDraw, self).__init__(**params) + super().__init__(**params) @property def element(self): @@ -1532,6 +1639,12 @@ class CurveEdit(PointDraw): An optional tooltip to override the default """ + data = param.Dict(constant=True, doc=""" + Data synced from Bokeh ColumnDataSource supplied as a + dictionary of columns, where each column is a list of values + (for point-like data) or list of lists of values (for + path-like data).""") + def __init__(self, style={}, tooltip=None, **params): self.style = style or {'size': 10} self.tooltip = tooltip @@ -1569,6 +1682,12 @@ class PolyDraw(CDSStream): line_alpha, size, etc. """ + data = param.Dict(constant=True, doc=""" + Data synced from Bokeh ColumnDataSource supplied as a + dictionary of columns, where each column is a list of values + (for point-like data) or list of lists of values (for + path-like data).""") + def __init__(self, empty_value=None, drag=True, num_objects=0, show_vertices=False, vertex_style={}, styles={}, tooltip=None, **params): @@ -1579,7 +1698,7 @@ def __init__(self, empty_value=None, drag=True, num_objects=0, self.vertex_style = vertex_style self.styles = styles self.tooltip = tooltip - super(PolyDraw, self).__init__(**params) + super().__init__(**params) @property def element(self): @@ -1622,12 +1741,18 @@ class FreehandDraw(CDSStream): An optional tooltip to override the default """ + data = param.Dict(constant=True, doc=""" + Data synced from Bokeh ColumnDataSource supplied as a + dictionary of columns, where each column is a list of values + (for point-like data) or list of lists of values (for + path-like data).""") + def __init__(self, empty_value=None, num_objects=0, styles={}, tooltip=None, **params): self.empty_value = empty_value self.num_objects = num_objects self.styles = styles self.tooltip = tooltip - super(FreehandDraw, self).__init__(**params) + super().__init__(**params) @property def element(self): @@ -1670,12 +1795,18 @@ class BoxEdit(CDSStream): An optional tooltip to override the default """ + data = param.Dict(constant=True, doc=""" + Data synced from Bokeh ColumnDataSource supplied as a + dictionary of columns, where each column is a list of values + (for point-like data) or list of lists of values (for + path-like data).""") + def __init__(self, empty_value=None, num_objects=0, styles={}, tooltip=None, **params): self.empty_value = empty_value self.num_objects = num_objects self.styles = styles self.tooltip = tooltip - super(BoxEdit, self).__init__(**params) + super().__init__(**params) @property def element(self): @@ -1726,6 +1857,12 @@ class PolyEdit(PolyDraw): line_alpha, size, etc. """ + data = param.Dict(constant=True, doc=""" + Data synced from Bokeh ColumnDataSource supplied as a + dictionary of columns, where each column is a list of values + (for point-like data) or list of lists of values (for + path-like data).""") + def __init__(self, vertex_style={}, shared=True, **params): self.shared = shared - super(PolyEdit, self).__init__(vertex_style=vertex_style, **params) + super().__init__(vertex_style=vertex_style, **params) diff --git a/holoviews/tests/core/data/base.py b/holoviews/tests/core/data/base.py index e77ce2ba0a..d5d531735f 100644 --- a/holoviews/tests/core/data/base.py +++ b/holoviews/tests/core/data/base.py @@ -83,7 +83,7 @@ def init_data(self): class HomogeneousColumnTests(object): """ Tests for data formats that require all dataset to have the same - type (e.g numpy arrays) + type (e.g. numpy arrays) """ __test__ = False @@ -429,7 +429,7 @@ def init_column_data(self): 'weight':self.weight, 'height':self.height}, kdims=self.alias_kdims, vdims=self.alias_vdims) - super(HeterogeneousColumnTests, self).init_column_data() + super().init_column_data() self.ys = np.linspace(0, 1, 11) self.zs = np.sin(self.xs) self.dataset_ht = Dataset({'x':self.xs, 'y':self.ys}, @@ -530,6 +530,10 @@ def test_dataset_mixed_type_range(self): ds = Dataset((['A', 'B', 'C', None],), 'A') self.assertEqual(ds.range(0), ('A', 'C')) + def test_dataset_nodata_range(self): + table = self.table.clone(vdims=[Dimension('Weight', nodata=10), 'Height']) + self.assertEqual(table.range('Weight'), (15, 18)) + def test_dataset_sort_vdim_ht(self): dataset = Dataset({'x':self.xs, 'y':-self.ys}, kdims=['x'], vdims=['y']) @@ -687,28 +691,28 @@ def test_dataset_index_row_gender_female(self): 'Weight':[10], 'Height':[0.8]}, kdims=self.kdims, vdims=self.vdims) row = self.table['F',:] - self.assertEquals(row, indexed) + self.assertEqual(row, indexed) def test_dataset_index_rows_gender_male(self): row = self.table['M',:] indexed = Dataset({'Gender':['M', 'M'], 'Age':[10, 16], 'Weight':[15,18], 'Height':[0.8,0.6]}, kdims=self.kdims, vdims=self.vdims) - self.assertEquals(row, indexed) + self.assertEqual(row, indexed) def test_dataset_select_rows_gender_male(self): row = self.table.select(Gender='M') indexed = Dataset({'Gender':['M', 'M'], 'Age':[10, 16], 'Weight':[15,18], 'Height':[0.8,0.6]}, kdims=self.kdims, vdims=self.vdims) - self.assertEquals(row, indexed) + self.assertEqual(row, indexed) def test_dataset_select_rows_gender_male_expr(self): row = self.table.select(selection_expr=dim('Gender') == 'M') indexed = Dataset({'Gender': ['M', 'M'], 'Age': [10, 16], 'Weight': [15, 18], 'Height': [0.8,0.6]}, kdims=self.kdims, vdims=self.vdims) - self.assertEquals(row, indexed) + self.assertEqual(row, indexed) def test_dataset_select_rows_gender_male_alias(self): row = self.alias_table.select(Gender='M') @@ -716,26 +720,26 @@ def test_dataset_select_rows_gender_male_alias(self): indexed = Dataset({'gender':['M', 'M'], 'age':[10, 16], 'weight':[15,18], 'height':[0.8,0.6]}, kdims=self.alias_kdims, vdims=self.alias_vdims) - self.assertEquals(row, indexed) - self.assertEquals(alias_row, indexed) + self.assertEqual(row, indexed) + self.assertEqual(alias_row, indexed) def test_dataset_index_row_age(self): indexed = Dataset({'Gender':['F'], 'Age':[12], 'Weight':[10], 'Height':[0.8]}, kdims=self.kdims, vdims=self.vdims) - self.assertEquals(self.table[:, 12], indexed) + self.assertEqual(self.table[:, 12], indexed) def test_dataset_index_item_table(self): indexed = Dataset({'Gender':['F'], 'Age':[12], 'Weight':[10], 'Height':[0.8]}, kdims=self.kdims, vdims=self.vdims) - self.assertEquals(self.table['F', 12], indexed) + self.assertEqual(self.table['F', 12], indexed) def test_dataset_index_value1(self): - self.assertEquals(self.table['F', 12, 'Weight'], 10) + self.assertEqual(self.table['F', 12, 'Weight'], 10) def test_dataset_index_value2(self): - self.assertEquals(self.table['F', 12, 'Height'], 0.8) + self.assertEqual(self.table['F', 12, 'Height'], 0.8) def test_dataset_index_column_ht(self): self.compare_arrays(self.dataset_ht['y'], self.ys) @@ -745,18 +749,18 @@ def test_dataset_boolean_index(self): indexed = Dataset({'Gender':['M', 'M'], 'Age':[10, 16], 'Weight':[15,18], 'Height':[0.8,0.6]}, kdims=self.kdims, vdims=self.vdims) - self.assertEquals(row, indexed) + self.assertEqual(row, indexed) def test_dataset_value_dim_index(self): row = self.table[:, :, 'Weight'] indexed = Dataset({'Gender':['M', 'M', 'F'], 'Age':[10, 16, 12], 'Weight':[15,18, 10]}, kdims=self.kdims, vdims=self.vdims[:1]) - self.assertEquals(row, indexed) + self.assertEqual(row, indexed) def test_dataset_value_dim_scalar_index(self): row = self.table['M', 10, 'Weight'] - self.assertEquals(row, 15) + self.assertEqual(row, 15) # Tabular indexing @@ -983,6 +987,10 @@ def test_select_tuple(self): ) self.assertEqual(self.dataset_grid.select(y=(0, 0.25)), ds) + def test_nodata_range(self): + ds = self.dataset_grid.clone(vdims=[Dimension('z', nodata=0)]) + self.assertEqual(ds.range('z'), (1, 5)) + def test_dataset_ndloc_index(self): xs, ys = np.linspace(0.12, 0.81, 10), np.linspace(0.12, 0.391, 5) arr = np.arange(10)*np.arange(5)[np.newaxis].T diff --git a/holoviews/tests/core/data/testarrayinterface.py b/holoviews/tests/core/data/test_arrayinterface.py similarity index 100% rename from holoviews/tests/core/data/testarrayinterface.py rename to holoviews/tests/core/data/test_arrayinterface.py diff --git a/holoviews/tests/core/data/testbinneddatasets.py b/holoviews/tests/core/data/test_binneddatasets.py similarity index 99% rename from holoviews/tests/core/data/testbinneddatasets.py rename to holoviews/tests/core/data/test_binneddatasets.py index da20b08d61..ea0fbcf792 100644 --- a/holoviews/tests/core/data/testbinneddatasets.py +++ b/holoviews/tests/core/data/test_binneddatasets.py @@ -167,7 +167,7 @@ def test_qmesh_transform_replace_vdim(self): expected = QuadMesh((self.xs, self.ys, self.zs*2)) self.assertEqual(expected, transformed) - + class Irregular2DBinsTest(ComparisonTestCase): diff --git a/holoviews/tests/core/data/testcudfinterface.py b/holoviews/tests/core/data/test_cudfinterface.py similarity index 99% rename from holoviews/tests/core/data/testcudfinterface.py rename to holoviews/tests/core/data/test_cudfinterface.py index 89c0b4c3c4..a8195856a2 100644 --- a/holoviews/tests/core/data/testcudfinterface.py +++ b/holoviews/tests/core/data/test_cudfinterface.py @@ -26,7 +26,7 @@ class cuDFInterfaceTests(HeterogeneousColumnTests, InterfaceTests): __test__ = True def setUp(self): - super(cuDFInterfaceTests, self).setUp() + super().setUp() logging.getLogger('numba.cuda.cudadrv.driver').setLevel(30) def test_dataset_2D_aggregate_spread_fn_with_duplicates(self): diff --git a/holoviews/tests/core/data/testdaskinterface.py b/holoviews/tests/core/data/test_daskinterface.py similarity index 98% rename from holoviews/tests/core/data/testdaskinterface.py rename to holoviews/tests/core/data/test_daskinterface.py index 5997dc6af6..ea4dd0ec05 100644 --- a/holoviews/tests/core/data/testdaskinterface.py +++ b/holoviews/tests/core/data/test_daskinterface.py @@ -11,7 +11,7 @@ from holoviews.core.data import Dataset from holoviews.util.transform import dim -from .testpandasinterface import BasePandasInterfaceTests +from .test_pandasinterface import BasePandasInterfaceTests class DaskDatasetTest(BasePandasInterfaceTests): diff --git a/holoviews/tests/core/data/testdictinterface.py b/holoviews/tests/core/data/test_dictinterface.py similarity index 96% rename from holoviews/tests/core/data/testdictinterface.py rename to holoviews/tests/core/data/test_dictinterface.py index 3bbdb039a2..002be888c3 100644 --- a/holoviews/tests/core/data/testdictinterface.py +++ b/holoviews/tests/core/data/test_dictinterface.py @@ -1,5 +1,3 @@ -import sys - from collections import OrderedDict import numpy as np @@ -27,7 +25,7 @@ def test_dataset_simple_dict_sorted(self): def test_dataset_dataset_ht_dtypes(self): ds = self.table - str_type = '= 3 else 'S1' + str_type = '') def test_partial_name(self): - py2match = '') def test_generator_name(self): @@ -249,7 +244,7 @@ def fn(A,B): return Scatter([(A,2)], label=A) regexp="Callable 'fn' accepts more positional arguments than there are kdims and stream parameters" - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(fn, kdims=['A']) @@ -298,7 +293,7 @@ def fn(x=1, y=2, B='default'): xy = streams.PointerXY(x=1, y=2) regexp = "Callback 'fn' signature over (.+?) does not accommodate required kdims" - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(fn, kdims=['A'], streams=[xy]) def test_dynamic_split_mismatched_kdims(self): @@ -322,7 +317,7 @@ def fn(x, y, B): xy = streams.PointerXY(x=1, y=2) regexp = ("Unmatched positional kdim arguments only allowed " "at the start of the signature") - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(fn, kdims=['A'], streams=[xy]) def test_dynamic_split_args_and_kwargs(self): diff --git a/holoviews/tests/core/testcollation.py b/holoviews/tests/core/test_collation.py similarity index 100% rename from holoviews/tests/core/testcollation.py rename to holoviews/tests/core/test_collation.py diff --git a/holoviews/tests/core/testcomposites.py b/holoviews/tests/core/test_composites.py similarity index 99% rename from holoviews/tests/core/testcomposites.py rename to holoviews/tests/core/test_composites.py index f042700adb..305b75146c 100644 --- a/holoviews/tests/core/testcomposites.py +++ b/holoviews/tests/core/test_composites.py @@ -1,5 +1,5 @@ """ -Test cases for the composite types built with + and * i.e Layout +Test cases for the composite types built with + and *, i.e. Layout and Overlay (does *not* test HoloMaps). """ @@ -26,7 +26,7 @@ def test_element_init(self): class LayoutTestCase(ElementTestCase): def setUp(self): - super(LayoutTestCase, self).setUp() + super().setUp() def test_layouttree_keys_1(self): t = self.el1 + self.el2 @@ -198,9 +198,6 @@ class OverlayTestCase(ElementTestCase): associated with * instead of the + operator) """ - def setUp(self): - super(OverlayTestCase, self).setUp() - def test_overlay_keys(self): t = self.el1 * self.el2 self.assertEqual(t.keys(), [('Element', 'I'), ('Element', 'II')]) diff --git a/holoviews/tests/core/testdatasetproperty.py b/holoviews/tests/core/test_datasetproperty.py similarity index 99% rename from holoviews/tests/core/testdatasetproperty.py rename to holoviews/tests/core/test_datasetproperty.py index a9dd880efc..1c6324a75f 100644 --- a/holoviews/tests/core/testdatasetproperty.py +++ b/holoviews/tests/core/test_datasetproperty.py @@ -337,8 +337,9 @@ def test_iloc_curve(self): class NdlocTestCase(DatasetPropertyTestCase): + def setUp(self): - super(NdlocTestCase, self).setUp() + super().setUp() self.ds_grid = Dataset( (np.arange(4), np.arange(3), @@ -631,7 +632,7 @@ def test_add_dimension_dataset(self): class HistogramTestCase(DatasetPropertyTestCase): def setUp(self): - super(HistogramTestCase, self).setUp() + super().setUp() self.hist = self.ds.hist('a', adjoin=False, normed=False) def test_construction(self): @@ -709,7 +710,7 @@ def test_hist_to_curve(self): class DistributionTestCase(DatasetPropertyTestCase): def setUp(self): - super(DistributionTestCase, self).setUp() + super().setUp() self.distribution = self.ds.to(Distribution, kdims='a', groupby=[]) def test_distribution_dataset(self): @@ -727,7 +728,7 @@ class DatashaderTestCase(DatasetPropertyTestCase): def setUp(self): if None in (rasterize, datashade, dynspread): raise SkipTest('Datashader could not be imported and cannot be tested.') - super(DatashaderTestCase, self).setUp() + super().setUp() def test_rasterize_curve(self): img = rasterize( diff --git a/holoviews/tests/core/testdecollation.py b/holoviews/tests/core/test_decollation.py similarity index 98% rename from holoviews/tests/core/testdecollation.py rename to holoviews/tests/core/test_decollation.py index 8af371c828..460e832152 100644 --- a/holoviews/tests/core/testdecollation.py +++ b/holoviews/tests/core/test_decollation.py @@ -27,7 +27,7 @@ class XY(Stream): class TestDecollation(ComparisonTestCase): def setUp(self): - from holoviews.tests.teststreams import Sum, Val + from holoviews.tests.test_streams import Sum, Val # kdims: a and b self.dmap_ab = DynamicMap( @@ -309,7 +309,7 @@ def perform_decollate_dmap_container_streams(self, ContainerType): self.assertEqual(expected, result) def test_traverse_derived_streams(self): - from holoviews.tests.teststreams import Val + from holoviews.tests.test_streams import Val decollated = self.dmap_derived.decollate() # Check decollated types diff --git a/holoviews/tests/core/testdimensioned.py b/holoviews/tests/core/test_dimensioned.py similarity index 76% rename from holoviews/tests/core/testdimensioned.py rename to holoviews/tests/core/test_dimensioned.py index ac18417f92..14fe2f07bc 100644 --- a/holoviews/tests/core/testdimensioned.py +++ b/holoviews/tests/core/test_dimensioned.py @@ -5,9 +5,11 @@ from holoviews.core.options import Store, Keywords, Options, OptionTree from ..utils import LoggingComparisonTestCase -class TestObj(Element): + +class ExampleElement(Element): pass + class MockRenderer(object): def __init__(self, backend): @@ -20,14 +22,14 @@ class CustomBackendTestCase(LoggingComparisonTestCase): """ def setUp(self): - super(CustomBackendTestCase, self).setUp() + super().setUp() self.current_backend = Store.current_backend - self.register_custom(TestObj, 'backend_1', ['plot_custom1']) - self.register_custom(TestObj, 'backend_2', ['plot_custom2']) + self.register_custom(ExampleElement, 'backend_1', ['plot_custom1']) + self.register_custom(ExampleElement, 'backend_2', ['plot_custom2']) Store.set_current_backend('backend_1') def tearDown(self): - super(CustomBackendTestCase, self).tearDown() + super().tearDown() Store._weakrefs = {} Store._options.pop('backend_1') Store._options.pop('backend_2') @@ -57,115 +59,115 @@ def register_custom(cls, objtype, backend, custom_plot=[], custom_style=[]): class TestDimensioned_options(CustomBackendTestCase): def test_apply_options_current_backend_style(self): - obj = TestObj([]).options(style_opt1='A') + obj = ExampleElement([]).options(style_opt1='A') opts = Store.lookup_options('backend_1', obj, 'style') assert opts.options == {'style_opt1': 'A'} def test_apply_options_current_backend_style_invalid(self): - err = ("Unexpected option 'style_opt3' for TestObj type " + err = ("Unexpected option 'style_opt3' for ExampleElement type " "across all extensions. Similar options for current " "extension \('backend_1'\) are: \['style_opt1', 'style_opt2'\]\.") - with self.assertRaisesRegexp(ValueError, err): - TestObj([]).options(style_opt3='A') + with self.assertRaisesRegex(ValueError, err): + ExampleElement([]).options(style_opt3='A') def test_apply_options_current_backend_style_invalid_no_match(self): - err = ("Unexpected option 'zxy' for TestObj type across all extensions\. " + err = ("Unexpected option 'zxy' for ExampleElement type across all extensions\. " "No similar options found\.") - with self.assertRaisesRegexp(ValueError, err): - TestObj([]).options(zxy='A') + with self.assertRaisesRegex(ValueError, err): + ExampleElement([]).options(zxy='A') def test_apply_options_explicit_backend_style_invalid_cross_backend(self): - err = ("Unexpected option 'style_opt3' for TestObj type when " + err = ("Unexpected option 'style_opt3' for ExampleElement type when " "using the 'backend_2' extension. Similar options are: " "\['style_opt1', 'style_opt2'\]\.") - with self.assertRaisesRegexp(ValueError, err): - TestObj([]).options(style_opt3='A', backend='backend_2') + with self.assertRaisesRegex(ValueError, err): + ExampleElement([]).options(style_opt3='A', backend='backend_2') def test_apply_options_explicit_backend_style_invalid_no_match(self): - err = ("Unexpected option 'zxy' for TestObj type when using the " - "'backend_2' extension. No similar options founds\.") - with self.assertRaisesRegexp(ValueError, err): - TestObj([]).options(zxy='A', backend='backend_2') + err = ("Unexpected option 'zxy' for ExampleElement type when using the " + "'backend_2' extension. No similar options found\.") + with self.assertRaisesRegex(ValueError, err): + ExampleElement([]).options(zxy='A', backend='backend_2') def test_apply_options_current_backend_style_invalid_cross_backend_match(self): - TestObj([]).options(plot_custom2='A') - substr = ("Option 'plot_custom2' for TestObj type not valid for " + ExampleElement([]).options(plot_custom2='A') + substr = ("Option 'plot_custom2' for ExampleElement type not valid for " "selected backend ('backend_1'). Option only applies to " "following backends: ['backend_2']") self.log_handler.assertEndsWith('WARNING', substr) def test_apply_options_explicit_backend_style_invalid(self): - err = ("Unexpected option 'style_opt3' for TestObj type when " + err = ("Unexpected option 'style_opt3' for ExampleElement type when " "using the 'backend_2' extension. Similar options are: " "\['style_opt1', 'style_opt2'\]\.") - with self.assertRaisesRegexp(ValueError, err): - TestObj([]).options(style_opt3='A', backend='backend_2') + with self.assertRaisesRegex(ValueError, err): + ExampleElement([]).options(style_opt3='A', backend='backend_2') def test_apply_options_current_backend_style_multiple(self): - obj = TestObj([]).options(style_opt1='A', style_opt2='B') + obj = ExampleElement([]).options(style_opt1='A', style_opt2='B') opts = Store.lookup_options('backend_1', obj, 'style') assert opts.options == {'style_opt1': 'A', 'style_opt2': 'B'} def test_apply_options_current_backend_plot(self): - obj = TestObj([]).options(plot_opt1='A') + obj = ExampleElement([]).options(plot_opt1='A') opts = Store.lookup_options('backend_1', obj, 'plot') assert opts.options == {'plot_opt1': 'A'} def test_apply_options_current_backend_plot_multiple(self): - obj = TestObj([]).options(plot_opt1='A', plot_opt2='B') + obj = ExampleElement([]).options(plot_opt1='A', plot_opt2='B') opts = Store.lookup_options('backend_1', obj, 'plot') assert opts.options == {'plot_opt1': 'A', 'plot_opt2': 'B'} def test_apply_options_current_backend_plot_and_style(self): - obj = TestObj([]).options(style_opt1='A', plot_opt1='B') + obj = ExampleElement([]).options(style_opt1='A', plot_opt1='B') plot_opts = Store.lookup_options('backend_1', obj, 'plot') assert plot_opts.options == {'plot_opt1': 'B'} style_opts = Store.lookup_options('backend_1', obj, 'style') assert style_opts.options == {'style_opt1': 'A'} def test_apply_options_explicit_backend_style(self): - obj = TestObj([]).options(style_opt1='A', backend='backend_2') + obj = ExampleElement([]).options(style_opt1='A', backend='backend_2') opts = Store.lookup_options('backend_2', obj, 'style') assert opts.options == {'style_opt1': 'A'} def test_apply_options_explicit_backend_style_multiple(self): - obj = TestObj([]).options(style_opt1='A', style_opt2='B', backend='backend_2') + obj = ExampleElement([]).options(style_opt1='A', style_opt2='B', backend='backend_2') opts = Store.lookup_options('backend_2', obj, 'style') assert opts.options == {'style_opt1': 'A', 'style_opt2': 'B'} def test_apply_options_explicit_backend_plot(self): - obj = TestObj([]).options(plot_opt1='A', backend='backend_2') + obj = ExampleElement([]).options(plot_opt1='A', backend='backend_2') opts = Store.lookup_options('backend_2', obj, 'plot') assert opts.options == {'plot_opt1': 'A'} def test_apply_options_explicit_backend_plot_multiple(self): - obj = TestObj([]).options(plot_opt1='A', plot_opt2='B', backend='backend_2') + obj = ExampleElement([]).options(plot_opt1='A', plot_opt2='B', backend='backend_2') opts = Store.lookup_options('backend_2', obj, 'plot') assert opts.options == {'plot_opt1': 'A', 'plot_opt2': 'B'} def test_apply_options_explicit_backend_plot_and_style(self): - obj = TestObj([]).options(style_opt1='A', plot_opt1='B', backend='backend_2') + obj = ExampleElement([]).options(style_opt1='A', plot_opt1='B', backend='backend_2') plot_opts = Store.lookup_options('backend_2', obj, 'plot') assert plot_opts.options == {'plot_opt1': 'B'} style_opts = Store.lookup_options('backend_2', obj, 'style') assert style_opts.options == {'style_opt1': 'A'} def test_apply_options_not_cloned(self): - obj1 = TestObj([]) + obj1 = ExampleElement([]) obj2 = obj1.options(style_opt1='A', clone=False) opts = Store.lookup_options('backend_1', obj1, 'style') assert opts.options == {'style_opt1': 'A'} assert obj1 is obj2 def test_apply_options_cloned(self): - obj1 = TestObj([]) + obj1 = ExampleElement([]) obj2 = obj1.options(style_opt1='A') opts = Store.lookup_options('backend_1', obj2, 'style') assert opts.options == {'style_opt1': 'A'} assert obj1 is not obj2 def test_apply_options_explicit_backend_persist_old_backend(self): - obj = TestObj([]) + obj = ExampleElement([]) obj.opts(style_opt1='A', plot_opt1='B', backend='backend_1') obj.opts(style_opt1='C', plot_opt1='D', backend='backend_2') plot_opts = Store.lookup_options('backend_1', obj, 'plot') @@ -178,7 +180,7 @@ def test_apply_options_explicit_backend_persist_old_backend(self): assert style_opts.options == {'style_opt1': 'C'} def test_apply_options_explicit_backend_persists_other_backend_inverted(self): - obj = TestObj([]) + obj = ExampleElement([]) obj.opts(style_opt1='A', plot_opt1='B', backend='backend_2') obj.opts(style_opt1='C', plot_opt1='D', backend='backend_1') plot_opts = Store.lookup_options('backend_1', obj, 'plot') @@ -191,7 +193,7 @@ def test_apply_options_explicit_backend_persists_other_backend_inverted(self): assert style_opts.options == {'style_opt1': 'A'} def test_apply_options_when_backend_switched(self): - obj = TestObj([]) + obj = ExampleElement([]) Store.current_backend = 'backend_2' obj.opts(style_opt1='A', plot_opt1='B') Store.current_backend = 'backend_1' @@ -206,13 +208,13 @@ def test_apply_options_when_backend_switched(self): class TestOptionsCleanup(CustomBackendTestCase): def test_opts_resassignment_cleans_unused_tree(self): - obj = TestObj([]).opts(style_opt1='A').opts(plot_opt1='B') + obj = ExampleElement([]).opts(style_opt1='A').opts(plot_opt1='B') custom_options = Store._custom_options['backend_1'] self.assertIn(obj.id, custom_options) self.assertEqual(len(custom_options), 1) def test_opts_multiple_resassignment_cleans_unused_tree(self): - obj = HoloMap({0: TestObj([]), 1: TestObj([])}).opts(style_opt1='A').opts(plot_opt1='B') + obj = HoloMap({0: ExampleElement([]), 1: ExampleElement([])}).opts(style_opt1='A').opts(plot_opt1='B') custom_options = Store._custom_options['backend_1'] self.assertIn(obj.last.id, custom_options) self.assertEqual(len(custom_options), 2) @@ -221,7 +223,7 @@ def test_opts_multiple_resassignment_cleans_unused_tree(self): self.assertEqual(len(custom_options), 0) def test_opts_resassignment_cleans_unused_tree_cross_backend(self): - obj = TestObj([]).opts(style_opt1='A').opts(plot_opt1='B', backend='backend_2') + obj = ExampleElement([]).opts(style_opt1='A').opts(plot_opt1='B', backend='backend_2') custom_options = Store._custom_options['backend_1'] self.assertIn(obj.id, custom_options) self.assertEqual(len(custom_options), 1) @@ -230,14 +232,14 @@ def test_opts_resassignment_cleans_unused_tree_cross_backend(self): self.assertEqual(len(custom_options), 1) def test_garbage_collect_cleans_unused_tree(self): - obj = TestObj([]).opts(style_opt1='A') + obj = ExampleElement([]).opts(style_opt1='A') del obj gc.collect() custom_options = Store._custom_options['backend_1'] self.assertEqual(len(custom_options), 0) def test_partial_garbage_collect_does_not_clear_tree(self): - obj = HoloMap({0: TestObj([]), 1: TestObj([])}).opts(style_opt1='A') + obj = HoloMap({0: ExampleElement([]), 1: ExampleElement([])}).opts(style_opt1='A') obj.pop(0) gc.collect() custom_options = Store._custom_options['backend_1'] @@ -248,6 +250,6 @@ def test_partial_garbage_collect_does_not_clear_tree(self): self.assertEqual(len(custom_options), 0) def test_opts_clear_cleans_unused_tree(self): - TestObj([]).opts(style_opt1='A').opts.clear() + ExampleElement([]).opts(style_opt1='A').opts.clear() custom_options = Store._custom_options['backend_1'] self.assertEqual(len(custom_options), 0) diff --git a/holoviews/tests/core/testdimensions.py b/holoviews/tests/core/test_dimensions.py similarity index 96% rename from holoviews/tests/core/testdimensions.py rename to holoviews/tests/core/test_dimensions.py index a37c2573d5..4516398961 100644 --- a/holoviews/tests/core/testdimensions.py +++ b/holoviews/tests/core/test_dimensions.py @@ -15,7 +15,7 @@ class DimensionNameLabelTest(LoggingComparisonTestCase): def setUp(self): - super(DimensionNameLabelTest, self).setUp() + super().setUp() def test_dimension_name(self): dim = Dimension('test') @@ -46,12 +46,12 @@ def test_dimension_label_kwarg_and_tuple(self): def test_dimension_invalid_name(self): regexp = 'Dimension name must only be passed as the positional argument' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): Dimension('test', name='something else') def test_dimension_invalid_name_tuple(self): regexp = 'Dimension name must only be passed as the positional argument' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): Dimension(('test', 'test dimension'), name='something else') @@ -230,12 +230,12 @@ class DimensionDefaultTest(ComparisonTestCase): def test_validate_default_against_values(self): msg = "Dimension\('A'\) default 1\.1 not found in declared values: \[0, 1\]" - with self.assertRaisesRegexp(ValueError, msg): + with self.assertRaisesRegex(ValueError, msg): Dimension('A', values=[0, 1], default=1.1) def test_validate_default_against_range(self): msg = "Dimension\('A'\) default 1\.1 not in declared range: \(0, 1\)" - with self.assertRaisesRegexp(ValueError, msg): + with self.assertRaisesRegex(ValueError, msg): Dimension('A', range=(0, 1), default=1.1) @@ -265,7 +265,7 @@ def test_dimensioned_redim_dict_label(self): def test_dimensioned_redim_dict_label_existing_error(self): dimensioned = Dimensioned('Arbitrary Data', kdims=[('x', 'Test1')]) - with self.assertRaisesRegexp(ValueError, 'Cannot override an existing Dimension label'): + with self.assertRaisesRegex(ValueError, 'Cannot override an existing Dimension label'): dimensioned.redim.label(x='Test2') def test_dimensioned_redim_dimension(self): diff --git a/holoviews/tests/core/testdynamic.py b/holoviews/tests/core/test_dynamic.py similarity index 93% rename from holoviews/tests/core/testdynamic.py rename to holoviews/tests/core/test_dynamic.py index a3a2b196ad..51fb80ba35 100644 --- a/holoviews/tests/core/testdynamic.py +++ b/holoviews/tests/core/test_dynamic.py @@ -1,6 +1,8 @@ import uuid import time +import sys from collections import deque +from unittest import SkipTest import param import numpy as np @@ -10,12 +12,12 @@ from holoviews.element import Image, Scatter, Curve, Text, Points from holoviews.operation import histogram from holoviews.plotting.util import initialize_dynamic -from holoviews.streams import Stream, LinkedStream, PointerXY, PointerX, PointerY, RangeX, Buffer +from holoviews.streams import Stream, LinkedStream, PointerXY, PointerX, PointerY, RangeX, Buffer, pointer_types from holoviews.util import Dynamic from holoviews.element.comparison import ComparisonTestCase from ..utils import LoggingComparisonTestCase -from .testdimensioned import CustomBackendTestCase, TestObj +from .test_dimensioned import CustomBackendTestCase, ExampleElement XY = Stream.define('XY', x=0,y=0) X = Stream.define('X', x=0) @@ -28,7 +30,8 @@ def sine_array(phase, freq): return np.sin(phase + (freq*x**2+freq*y**2)) - +class ExampleParameterized(param.Parameterized): + example = param.Number(default=1) class DynamicMapConstructor(ComparisonTestCase): @@ -38,33 +41,55 @@ def test_simple_constructor_kdims(self): def test_simple_constructor_invalid_no_kdims(self): regexp = ("Callable '' accepts more positional arguments than there are " "kdims and stream parameters") - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(lambda x: x) def test_simple_constructor_invalid(self): regexp = ("Callback '' signature over \['x'\] does not accommodate " "required kdims \['x', 'y'\]") - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(lambda x: x, kdims=['x','y']) def test_simple_constructor_streams(self): DynamicMap(lambda x: x, streams=[PointerX()]) + def test_simple_constructor_streams_dict(self): + pointerx = PointerX() + DynamicMap(lambda x: x, streams=dict(x=pointerx.param.x)) + + def test_simple_constructor_streams_dict_panel_widget(self): + if 'panel' not in sys.modules: + raise SkipTest('Panel not available') + import panel + DynamicMap(lambda x: x, streams=dict(x=panel.widgets.FloatSlider())) + + def test_simple_constructor_streams_dict_parameter(self): + test = ExampleParameterized() + DynamicMap(lambda x: x, streams=dict(x=test.param.example)) + + def test_simple_constructor_streams_dict_class_parameter(self): + DynamicMap(lambda x: x, streams=dict(x=ExampleParameterized.param.example)) + + def test_simple_constructor_streams_dict_invalid(self): + regexp = "Cannot handle value 3 in streams dictionary" + with self.assertRaisesRegex(TypeError, regexp): + DynamicMap(lambda x: x, streams=dict(x=3)) + def test_simple_constructor_streams_invalid_uninstantiated(self): regexp = ("The supplied streams list contains objects " "that are not Stream instances:(.+?)") - with self.assertRaisesRegexp(TypeError, regexp): + with self.assertRaisesRegex(TypeError, regexp): DynamicMap(lambda x: x, streams=[PointerX]) def test_simple_constructor_streams_invalid_type(self): regexp = ("The supplied streams list contains objects " "that are not Stream instances:(.+?)") - with self.assertRaisesRegexp(TypeError, regexp): + with self.assertRaisesRegex(TypeError, regexp): DynamicMap(lambda x: x, streams=[3]) def test_simple_constructor_streams_invalid_mismatch(self): regexp = "Callable '' missing keywords to accept stream parameters: y" - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(lambda x: x, streams=[PointerXY()]) def test_simple_constructor_positional_stream_args(self): @@ -74,7 +99,7 @@ def test_simple_constructor_streams_invalid_mismatch_named(self): def foo(x): return x regexp = "Callable 'foo' missing keywords to accept stream parameters: y" - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): DynamicMap(foo, streams=[PointerXY()]) @@ -382,7 +407,7 @@ def history_callback(x, y, history=deque(maxlen=10)): dmap = DynamicMap(history_callback, kdims=['x', 'y']) exception = ("DynamicMap does not allow dropping dimensions, " "reindex may only be used to reorder dimensions.") - with self.assertRaisesRegexp(ValueError, exception): + with self.assertRaisesRegex(ValueError, exception): dmap.reindex(['x']) def test_dynamic_groupby_kdims_and_streams(self): @@ -457,13 +482,13 @@ def cb(i): class DynamicMapOptionsTests(CustomBackendTestCase): def test_dynamic_options(self): - dmap = DynamicMap(lambda X: TestObj(None), kdims=['X']).redim.range(X=(0,10)) + dmap = DynamicMap(lambda X: ExampleElement(None), kdims=['X']).redim.range(X=(0,10)) dmap = dmap.options(plot_opt1='red') opts = Store.lookup_options('backend_1', dmap[0], 'plot') self.assertEqual(opts.options, {'plot_opt1': 'red'}) def test_dynamic_options_no_clone(self): - dmap = DynamicMap(lambda X: TestObj(None), kdims=['X']).redim.range(X=(0,10)) + dmap = DynamicMap(lambda X: ExampleElement(None), kdims=['X']).redim.range(X=(0,10)) dmap.options(plot_opt1='red', clone=False) opts = Store.lookup_options('backend_1', dmap[0], 'plot') self.assertEqual(opts.options, {'plot_opt1': 'red'}) @@ -471,7 +496,7 @@ def test_dynamic_options_no_clone(self): def test_dynamic_opts_link_inputs(self): stream = LinkedStream() inputs = [DynamicMap(lambda: None, streams=[stream])] - dmap = DynamicMap(Callable(lambda X: TestObj(None), inputs=inputs), + dmap = DynamicMap(Callable(lambda X: ExampleElement(None), inputs=inputs), kdims=['X']).redim.range(X=(0,10)) styled_dmap = dmap.options(plot_opt1='red', clone=False) opts = Store.lookup_options('backend_1', dmap[0], 'plot') @@ -570,9 +595,15 @@ def apply_label(self, obj): def test_dynamic_util_inherits_dim_streams_clash(self): exception = ("The supplied stream objects PointerX\(x=None\) and " "PointerX\(x=0\) clash on the following parameters: \['x'\]") - with self.assertRaisesRegexp(Exception, exception): + with self.assertRaisesRegex(Exception, exception): Dynamic(self.dmap, streams=[PointerX]) + def test_dynamic_util_inherits_dim_streams_clash_dict(self): + exception = ("The supplied stream objects PointerX\(x=None\) and " + "PointerX\(x=0\) clash on the following parameters: \['x'\]") + with self.assertRaisesRegex(Exception, exception): + Dynamic(self.dmap, streams=dict(x=PointerX.param.x)) + class DynamicTestOperation(ComparisonTestCase): @@ -604,6 +635,14 @@ def test_dynamic_operation_on_element(self): self.assertEqual(element, Image(sine_array(0,5)*2+1)) self.assertEqual(dmap_with_fn.streams, [posxy]) + def test_dynamic_operation_on_element_dict(self): + img = Image(sine_array(0,5)) + posxy = PointerXY(x=3, y=1) + dmap_with_fn = Dynamic(img, operation=lambda obj, x, y: obj.clone(obj.data*x+y), + streams=dict(x=posxy.param.x, y=posxy.param.y)) + element = dmap_with_fn[()] + self.assertEqual(element, Image(sine_array(0,5)*3+1)) + def test_dynamic_operation_with_kwargs(self): fn = lambda i: Image(sine_array(0,i)) dmap=DynamicMap(fn, kdims=['i']) @@ -700,7 +739,7 @@ def fn(x1, y1): dmap = DynamicMap(fn, kdims=[], streams=[xy]) regexp = '(.+?)do not correspond to stream parameters' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): dmap.event(x=1, y=2) @@ -876,6 +915,7 @@ def history_callback(x, history=deque(maxlen=10)): return Curve(list(history)) class NoMemoize(PointerX): + x = param.ClassSelector(class_=pointer_types, default=None, constant=True) @property def hashkey(self): return {'hash': uuid.uuid4().hex} @@ -980,7 +1020,7 @@ def callback(x, y): stream = PointerXY() cb_callable = Callable(callback) dmap = DynamicMap(cb_callable, kdims=[], streams=[stream]) - with self.assertRaisesRegexp(ValueError, 'The following streams are set to be automatically linked'): + with self.assertRaisesRegex(ValueError, 'The following streams are set to be automatically linked'): dmap.collate() def test_dynamic_collate_layout_raise_ambiguous_remapping_error(self): @@ -989,7 +1029,7 @@ def callback(x, y): stream = PointerXY() cb_callable = Callable(callback, stream_mapping={'Image': [stream]}) dmap = DynamicMap(cb_callable, kdims=[], streams=[stream]) - with self.assertRaisesRegexp(ValueError, 'The stream_mapping supplied on the Callable is ambiguous'): + with self.assertRaisesRegex(ValueError, 'The stream_mapping supplied on the Callable is ambiguous'): dmap.collate() def test_dynamic_collate_layout_with_integer_stream_mapping(self): @@ -1111,7 +1151,7 @@ def callback(i): layout = dmap.collate() dmap1, dmap2 = layout.values() err = 'Collated DynamicMaps must return GridSpace with consistent number of items.' - with self.assertRaisesRegexp(ValueError, err): + with self.assertRaisesRegex(ValueError, err): dmap1[4] self.log_handler.assertContains('WARNING', err) @@ -1124,6 +1164,6 @@ def callback(i): dmap1, dmap2 = layout.values() err = ('The objects in a GridSpace returned by a DynamicMap must ' 'consistently return the same number of items of the same type.') - with self.assertRaisesRegexp(ValueError, err): + with self.assertRaisesRegex(ValueError, err): dmap1[3] self.log_handler.assertContains('WARNING', err) diff --git a/holoviews/tests/core/testelement.py b/holoviews/tests/core/test_element.py similarity index 100% rename from holoviews/tests/core/testelement.py rename to holoviews/tests/core/test_element.py diff --git a/holoviews/tests/core/testimportexport.py b/holoviews/tests/core/test_importexport.py similarity index 100% rename from holoviews/tests/core/testimportexport.py rename to holoviews/tests/core/test_importexport.py diff --git a/holoviews/tests/core/testlayers.py b/holoviews/tests/core/test_layers.py similarity index 100% rename from holoviews/tests/core/testlayers.py rename to holoviews/tests/core/test_layers.py diff --git a/holoviews/tests/core/testlayouts.py b/holoviews/tests/core/test_layouts.py similarity index 98% rename from holoviews/tests/core/testlayouts.py rename to holoviews/tests/core/test_layouts.py index 3a26071035..d6e2ae175a 100644 --- a/holoviews/tests/core/testlayouts.py +++ b/holoviews/tests/core/test_layouts.py @@ -2,12 +2,10 @@ """ Tests of Layout and related classes """ - -import sys from holoviews import AdjointLayout, NdLayout, GridSpace, Layout, Element, HoloMap, Overlay from holoviews.element import HLine, Curve from holoviews.element.comparison import ComparisonTestCase -from unittest import SkipTest + class CompositeTest(ComparisonTestCase): "For testing of basic composite element types" @@ -25,9 +23,8 @@ def setUp(self): def test_add_operator(self): self.assertEqual(type(self.view1 + self.view2), Layout) - def test_add_unicode_py3(self): + def test_add_unicode(self): "Test to avoid regression of #3403 where unicode characters don't capitalize" - if sys.version_info.major == 2: raise SkipTest layout = Curve([-1,-2,-3]) + Curve([1,2,3]) .relabel('𝜗_1 vs th_2') elements = list(layout) self.assertEqual(len(elements), 2) diff --git a/holoviews/tests/core/testndmapping.py b/holoviews/tests/core/test_ndmapping.py similarity index 96% rename from holoviews/tests/core/testndmapping.py rename to holoviews/tests/core/test_ndmapping.py index c157965791..22ce8261c0 100644 --- a/holoviews/tests/core/testndmapping.py +++ b/holoviews/tests/core/test_ndmapping.py @@ -87,10 +87,10 @@ def test_idxmapping_nested_update(self): ndmap_list = [(0.5, ndmap1), (1.5, ndmap2)] nested_ndmap = MultiDimensionalMapping(ndmap_list, kdims=[self.dim2]) nested_ndmap[(0.5,)].update(dict([(0, 'c'), (1, 'd')])) - self.assertEquals(list(nested_ndmap[0.5].values()), ['c', 'd']) + self.assertEqual(list(nested_ndmap[0.5].values()), ['c', 'd']) nested_ndmap[1.5] = ndmap3 - self.assertEquals(list(nested_ndmap[1.5].values()), ['e', 'f']) + self.assertEqual(list(nested_ndmap[1.5].values()), ['e', 'f']) def test_ndmapping_slice_lower_bound_inclusive_int(self): ndmap = NdMapping(self.init_item_odict, kdims=[self.dim1, self.dim2]) @@ -127,19 +127,19 @@ def test_ndmapping_slice_upper_bound_exclusive2_float(self): def test_idxmapping_unsorted(self): data = [('B', 1), ('C', 2), ('A', 3)] ndmap = MultiDimensionalMapping(data, sort=False) - self.assertEquals(ndmap.keys(), ['B', 'C', 'A']) + self.assertEqual(ndmap.keys(), ['B', 'C', 'A']) def test_idxmapping_unsorted_clone(self): data = [('B', 1), ('C', 2), ('A', 3)] ndmap = MultiDimensionalMapping(data, sort=False).clone() - self.assertEquals(ndmap.keys(), ['B', 'C', 'A']) + self.assertEqual(ndmap.keys(), ['B', 'C', 'A']) def test_idxmapping_groupby_unsorted(self): data = [(('B', 2), 1), (('C', 2), 2), (('A', 1), 3)] grouped = NdMapping(data, sort=False, kdims=['X', 'Y']).groupby('Y') - self.assertEquals(grouped.keys(), [2, 1]) - self.assertEquals(grouped.values()[0].keys(), ['B', 'C']) - self.assertEquals(grouped.last.keys(), ['A']) + self.assertEqual(grouped.keys(), [2, 1]) + self.assertEqual(grouped.values()[0].keys(), ['B', 'C']) + self.assertEqual(grouped.last.keys(), ['A']) def test_idxmapping_reindex(self): data = [((0, 0.5), 'a'), ((1, 0.5), 'b')] diff --git a/holoviews/tests/core/testoperation.py b/holoviews/tests/core/test_operation.py similarity index 87% rename from holoviews/tests/core/testoperation.py rename to holoviews/tests/core/test_operation.py index d649b44553..c067272118 100644 --- a/holoviews/tests/core/testoperation.py +++ b/holoviews/tests/core/test_operation.py @@ -6,7 +6,7 @@ from holoviews.streams import Stream, Params -class TestOperation(Operation): +class ExampleOperation(Operation): label = param.String() @@ -40,7 +40,7 @@ def test_element_not_dynamic_despite_streams(self): def test_element_dynamic_with_instance_param(self): curve = Curve([1, 2, 3]) inst = ParamClass(label='Test') - applied = TestOperation(curve, label=inst.param.label) + applied = ExampleOperation(curve, label=inst.param.label) self.assertEqual(len(applied.streams), 1) self.assertIsInstance(applied.streams[0], Params) self.assertEqual(applied.streams[0].parameters, [inst.param.label]) @@ -49,7 +49,7 @@ def test_element_dynamic_with_instance_param(self): def test_element_dynamic_with_param_method(self): curve = Curve([1, 2, 3]) inst = ParamClass(label='Test') - applied = TestOperation(curve, label=inst.dynamic_label) + applied = ExampleOperation(curve, label=inst.dynamic_label) self.assertEqual(len(applied.streams), 1) self.assertIsInstance(applied.streams[0], Params) self.assertEqual(applied.streams[0].parameters, [inst.param.label]) @@ -60,11 +60,11 @@ def test_element_dynamic_with_param_method(self): def test_element_not_dynamic_with_instance_param(self): curve = Curve([1, 2, 3]) inst = ParamClass(label='Test') - applied = TestOperation(curve, dynamic=False, label=inst.param.label) + applied = ExampleOperation(curve, dynamic=False, label=inst.param.label) self.assertEqual(applied, curve.relabel('Test')) def test_element_not_dynamic_with_param_method(self): curve = Curve([1, 2, 3]) inst = ParamClass(label='Test') - applied = TestOperation(curve, dynamic=False, label=inst.dynamic_label) + applied = ExampleOperation(curve, dynamic=False, label=inst.dynamic_label) self.assertEqual(applied, curve.relabel('Test!')) diff --git a/holoviews/tests/core/testoptions.py b/holoviews/tests/core/test_options.py similarity index 92% rename from holoviews/tests/core/testoptions.py rename to holoviews/tests/core/test_options.py index c669d85735..e041a6809d 100644 --- a/holoviews/tests/core/testoptions.py +++ b/holoviews/tests/core/test_options.py @@ -1,9 +1,11 @@ import os -import sys import pickle + from unittest import SkipTest import numpy as np +import pytest + from holoviews import Store, Histogram, Image, Curve, Points, DynamicMap, opts from holoviews.core.options import ( OptionError, Cycle, Options, OptionTree, StoreOptions, options_policy @@ -36,42 +38,42 @@ class TestOptions(ComparisonTestCase): def setUp(self): self.original_option_groups = Options._option_groups Options._option_groups = ['test'] - super(TestOptions, self).setUp() + super().setUp() def tearDown(self): Options._option_groups = self.original_option_groups - super(TestOptions, self).tearDown() + super().tearDown() def test_options_init(self): Options('test') def test_options_valid_keywords1(self): opts = Options('test', allowed_keywords=['kw1'], kw1='value') - self.assertEquals(opts.kwargs, {'kw1':'value'}) + self.assertEqual(opts.kwargs, {'kw1':'value'}) def test_options_valid_keywords2(self): opts = Options('test', allowed_keywords=['kw1', 'kw2'], kw1='value') - self.assertEquals(opts.kwargs, {'kw1':'value'}) + self.assertEqual(opts.kwargs, {'kw1':'value'}) def test_options_valid_keywords3(self): opts = Options('test', allowed_keywords=['kw1', 'kw2'], kw1='value1', kw2='value2') - self.assertEquals(opts.kwargs, {'kw1':'value1', 'kw2':'value2'}) + self.assertEqual(opts.kwargs, {'kw1':'value1', 'kw2':'value2'}) def test_options_any_keywords3(self): opts = Options('test', kw1='value1', kw2='value3') - self.assertEquals(opts.kwargs, {'kw1':'value1', 'kw2':'value3'}) + self.assertEqual(opts.kwargs, {'kw1':'value1', 'kw2':'value3'}) def test_options_invalid_keywords1(self): try: Options('test', allowed_keywords=['kw1'], kw='value') except OptionError as e: - self.assertEqual(str(e), "Invalid option 'kw', valid options are: ['kw1']") + self.assertEqual(str(e), "Invalid option 'kw', valid options are: ['kw1'].") def test_options_invalid_keywords2(self): try: Options('test', allowed_keywords=['kw2'], kw2='value', kw3='value') except OptionError as e: - self.assertEqual(str(e), "Invalid option 'kw3', valid options are: ['kw2']") + self.assertEqual(str(e), "Invalid option 'kw3', valid options are: ['kw2'].") def test_options_invalid_keywords_skip1(self): with options_policy(skip_invalid=True, warn_on_skip=False): @@ -124,7 +126,7 @@ def test_options_inherit_invalid_keywords(self): try: opts(**new_kws) except OptionError as e: - self.assertEqual(str(e), "Invalid option 'kw4', valid options are: ['kw2', 'kw3']") + self.assertEqual(str(e), "Invalid option 'kw4', valid options are: ['kw2', 'kw3'].") @@ -133,11 +135,11 @@ class TestCycle(ComparisonTestCase): def setUp(self): self.original_option_groups = Options._option_groups Options._option_groups = ['test'] - super(TestCycle, self).setUp() + super().setUp() def tearDown(self): Options._option_groups = self.original_option_groups - super(TestCycle, self).tearDown() + super().tearDown() def test_cycle_init(self): Cycle(values=['a', 'b', 'c']) @@ -206,13 +208,13 @@ class TestOptionTree(ComparisonTestCase): def setUp(self): if 'matplotlib' not in Store.renderers: raise SkipTest('Matplotlib backend not available.') - super(TestOptionTree, self).setUp() + super().setUp() self.original_option_groups = Options._option_groups[:] Options._option_groups = ['group1', 'group2'] def tearDown(self): Options._option_groups = self.original_option_groups - super(TestOptionTree, self).tearDown() + super().tearDown() def test_optiontree_init_1(self): OptionTree(groups=['group1', 'group2']) @@ -292,16 +294,18 @@ class TestStoreInheritanceDynamic(ComparisonTestCase): def setUp(self): if 'matplotlib' not in Store.renderers: raise SkipTest('Matplotlib backend not available.') - self.store_copy = OptionTree(sorted(Store.options().items()), - groups=Options._option_groups) self.backend = 'matplotlib' - Store.current_backend = self.backend - super(TestStoreInheritanceDynamic, self).setUp() + Store.set_current_backend(self.backend) + options = Store.options() + self.store_copy = OptionTree(sorted(options.items()), + groups=Options._option_groups, + backend=options.backend) + super().setUp() def tearDown(self): Store.options(val=self.store_copy) Store._custom_options = {k:{} for k in Store._custom_options.keys()} - super(TestStoreInheritanceDynamic, self).tearDown() + super().tearDown() def initialize_option_tree(self): Store.options(val=OptionTree(groups=['plot', 'style'])) @@ -488,11 +492,13 @@ class TestStoreInheritance(ComparisonTestCase): def setUp(self): if 'matplotlib' not in Store.renderers: raise SkipTest('Matplotlib backend not available.') + self.backend = 'matplotlib' + Store.set_current_backend(self.backend) self.store_copy = OptionTree(sorted(Store.options().items()), groups=Options._option_groups) - self.backend = 'matplotlib' - Store.current_backend = self.backend - Store.options(val=OptionTree(groups=['plot', 'style'])) + Store.options(val=OptionTree( + groups=['plot', 'style'], backend=self.backend + )) options = Store.options() @@ -505,16 +511,15 @@ def setUp(self): data = [np.random.normal() for i in range(10000)] frequencies, edges = np.histogram(data, 20) self.hist = Histogram((edges, frequencies)) - super(TestStoreInheritance, self).setUp() - - - def lookup_options(self, obj, group): - return Store.lookup_options(self.backend, obj, group) + super().setUp() def tearDown(self): Store.options(val=self.store_copy) Store._custom_options = {k:{} for k in Store._custom_options.keys()} - super(TestStoreInheritance, self).tearDown() + super().tearDown() + + def lookup_options(self, obj, group): + return Store.lookup_options(self.backend, obj, group) def test_original_style_options(self): self.assertEqual(self.lookup_options(self.hist, 'style').options, @@ -575,19 +580,19 @@ class TestOptionsMethod(ComparisonTestCase): def setUp(self): if 'matplotlib' not in Store.renderers: raise SkipTest('Matplotlib backend not available.') - self.store_copy = OptionTree(sorted(Store.options().items()), - groups=Options._option_groups) self.backend = 'matplotlib' Store.set_current_backend(self.backend) - super(TestOptionsMethod, self).setUp() - - def lookup_options(self, obj, group): - return Store.lookup_options(self.backend, obj, group) + self.store_copy = OptionTree(sorted(Store.options().items()), + groups=Options._option_groups) + super().setUp() def tearDown(self): Store.options(val=self.store_copy) Store._custom_options = {k:{} for k in Store._custom_options.keys()} - super(TestOptionsMethod, self).tearDown() + super().tearDown() + + def lookup_options(self, obj, group): + return Store.lookup_options(self.backend, obj, group) def test_plot_options_keywords(self): im = Image(np.random.rand(10,10)) @@ -628,19 +633,19 @@ class TestOptsMethod(ComparisonTestCase): def setUp(self): if 'matplotlib' not in Store.renderers: raise SkipTest('Matplotlib backend not available.') - self.store_copy = OptionTree(sorted(Store.options().items()), - groups=Options._option_groups) self.backend = 'matplotlib' Store.set_current_backend(self.backend) - super(TestOptsMethod, self).setUp() - - def lookup_options(self, obj, group): - return Store.lookup_options(self.backend, obj, group) + self.store_copy = OptionTree(sorted(Store.options().items()), + groups=Options._option_groups) + super().setUp() def tearDown(self): Store.options(val=self.store_copy) Store._custom_options = {k:{} for k in Store._custom_options.keys()} - super(TestOptsMethod, self).tearDown() + super().tearDown() + + def lookup_options(self, obj, group): + return Store.lookup_options(self.backend, obj, group) def test_old_opts_clone_disabled(self): im = Image(np.random.rand(10,10)) @@ -760,9 +765,8 @@ def setUp(self): options.XType.Bar = self.opts6 self.options = options - self.original_options = Store.options() - Store.options(val = OptionTree(groups=['group'])) - + self.original_options=Store.options() + Store.options(val=OptionTree(groups=['group'])) def tearDown(self): Options._option_groups = self.original_option_groups @@ -822,18 +826,26 @@ def setUp(self): # Some tests require that plotly isn't loaded self.plotly_options = Store._options.pop('plotly', None) - self.store_mpl = OptionTree(sorted(Store.options(backend='matplotlib').items()), - groups=Options._option_groups) - self.store_bokeh = OptionTree(sorted(Store.options(backend='bokeh').items()), - groups=Options._option_groups) + self.store_mpl = OptionTree( + sorted(Store.options(backend='matplotlib').items()), + groups=Options._option_groups, + backend='matplotlib' + ) + self.store_bokeh = OptionTree( + sorted(Store.options(backend='bokeh').items()), + groups=Options._option_groups, + backend='bokeh' + ) self.clear_options() - super(TestCrossBackendOptions, self).setUp() + super().setUp() def clear_options(self): # Clear global options.. - Store.options(val=OptionTree(groups=['plot', 'style']), backend='matplotlib') - Store.options(val=OptionTree(groups=['plot', 'style']), backend='bokeh') + Store.options(val=OptionTree(groups=['plot', 'style'], backend='matplotlib'), + backend='matplotlib') + Store.options(val=OptionTree(groups=['plot', 'style'], backend='bokeh'), + backend='bokeh') # ... and custom options Store.custom_options({}, backend='matplotlib') Store.custom_options({}, backend='bokeh') @@ -901,27 +913,7 @@ def test_mpl_bokeh_offset_mpl(self): self.assertEqual(bokeh_opts, {'cmap':'Purple'}) return img - def test_builder_backend_switch(self): - if sys.version_info.major == 3: - raise SkipTest('Python 3 tab completes via __signature__ not __doc__') - Store.options(val=self.store_mpl, backend='matplotlib') - Store.options(val=self.store_bokeh, backend='bokeh') - Store.set_current_backend('bokeh') - self.assertEqual(opts.Curve.__doc__.startswith('Curve('), True) - docline = opts.Curve.__doc__.splitlines()[0] - dockeys = eval(docline.replace('Curve', 'dict')) - self.assertEqual('color' in dockeys, True) - self.assertEqual('line_width' in dockeys, True) - Store.set_current_backend('matplotlib') - self.assertEqual(opts.Curve.__doc__.startswith('Curve('), True) - docline = opts.Curve.__doc__.splitlines()[0] - dockeys = eval(docline.replace('Curve', 'dict')) - self.assertEqual('color' in dockeys, True) - self.assertEqual('linewidth' in dockeys, True) - def test_builder_backend_switch_signature(self): - if sys.version_info.major == 2: - raise SkipTest('Python 2 tab completes via __doc__ not __signature__') Store.options(val=self.store_mpl, backend='matplotlib') Store.options(val=self.store_bokeh, backend='bokeh') Store.set_current_backend('bokeh') @@ -935,33 +927,38 @@ def test_builder_backend_switch_signature(self): self.assertEqual('color' in sigkeys, True) self.assertEqual('linewidth' in sigkeys, True) - def test_builder_cross_backend_validation(self): Store.options(val=self.store_mpl, backend='matplotlib') Store.options(val=self.store_bokeh, backend='bokeh') Store.set_current_backend('bokeh') opts.Curve(line_dash='dotted') # Bokeh keyword opts.Curve(linewidth=10) # MPL keyword - err = ("In opts.Curve\(...\), keywords supplied are mixed across backends. " - "Keyword\(s\) 'linewidth' are invalid for bokeh, " - "'line_dash' are invalid for matplotlib") - with self.assertRaisesRegexp(ValueError, err): + + err = ( + "In opts.Curve(...), keywords supplied are mixed across backends. " + "Keyword(s) 'linewidth' are invalid for bokeh, " + "'line_dash' are invalid for matplotlib" + ) + with pytest.raises(ValueError) as excinfo: opts.Curve(linewidth=10, line_dash='dotted') # Bokeh and MPL + assert err in str(excinfo.value) # Non-existent keyword across backends (bokeh active) - err = ("In opts.Curve\(...\), unexpected option 'foobar' for Curve type " + err = ("In opts.Curve(...), unexpected option 'foobar' for Curve type " "across all extensions. Similar options for current " - "extension \('bokeh'\) are: \['toolbar'\].") - with self.assertRaisesRegexp(ValueError, err): + "extension ('bokeh') are: ['toolbar'].") + with pytest.raises(ValueError) as excinfo: opts.Curve(foobar=3) + assert err in str(excinfo.value) # Non-existent keyword across backends (matplotlib active) Store.set_current_backend('matplotlib') - err = ("In opts.Curve\(...\), unexpected option 'foobar' for Curve " + err = ("In opts.Curve(...), unexpected option 'foobar' for Curve " "type across all extensions. No similar options found.") - with self.assertRaisesRegexp(ValueError, err): + with pytest.raises(ValueError) as excinfo: opts.Curve(foobar=3) + assert err in str(excinfo.value) class TestLookupOptions(ComparisonTestCase): @@ -1033,11 +1030,15 @@ def setUp(self): # Some tests require that plotly isn't loaded self.plotly_options = Store._options.pop('plotly', None) - self.store_mpl = OptionTree(sorted(Store.options(backend='matplotlib').items()), - groups=Options._option_groups) - self.store_bokeh = OptionTree(sorted(Store.options(backend='bokeh').items()), - groups=Options._option_groups) - super(TestCrossBackendOptionSpecification, self).setUp() + self.store_mpl = OptionTree( + sorted(Store.options(backend='matplotlib').items()), + groups=Options._option_groups, backend='matplotlib' + ) + self.store_bokeh = OptionTree( + sorted(Store.options(backend='bokeh').items()), + groups=Options._option_groups, backend='bokeh' + ) + super().setUp() def tearDown(self): Store.options(val=self.store_mpl, backend='matplotlib') @@ -1048,7 +1049,7 @@ def tearDown(self): if self.plotly_options is not None: Store._options['plotly'] = self.plotly_options - super(TestCrossBackendOptionSpecification, self).tearDown() + super().tearDown() def assert_output_options_group_empty(self, obj): mpl_output_lookup = Store.lookup_options('matplotlib', obj, 'output').options @@ -1205,7 +1206,7 @@ class TestCrossBackendOptionPickling(TestCrossBackendOptions): cleanup = ['test_raw_pickle.pkl', 'test_pickle_mpl_bokeh.pkl'] def tearDown(self): - super(TestCrossBackendOptionPickling, self).tearDown() + super().tearDown() for f in self.cleanup: try: os.remove(f) @@ -1217,7 +1218,7 @@ def test_raw_pickle(self): Test usual pickle saving and loading (no style information preserved) """ fname= 'test_raw_pickle.pkl' - raw = super(TestCrossBackendOptionPickling, self).test_mpl_bokeh_mpl() + raw = super().test_mpl_bokeh_mpl() pickle.dump(raw, open(fname,'wb')) self.clear_options() img = pickle.load(open(fname,'rb')) @@ -1237,7 +1238,7 @@ def test_pickle_mpl_bokeh(self): Test pickle saving and loading with Store (style information preserved) """ fname = 'test_pickle_mpl_bokeh.pkl' - raw = super(TestCrossBackendOptionPickling, self).test_mpl_bokeh_mpl() + raw = super().test_mpl_bokeh_mpl() Store.dump(raw, open(fname,'wb')) self.clear_options() img = Store.load(open(fname,'rb')) diff --git a/holoviews/tests/core/testprettyprint.py b/holoviews/tests/core/test_prettyprint.py similarity index 55% rename from holoviews/tests/core/testprettyprint.py rename to holoviews/tests/core/test_prettyprint.py index 040503a8cb..1388c571ef 100644 --- a/holoviews/tests/core/testprettyprint.py +++ b/holoviews/tests/core/test_prettyprint.py @@ -7,7 +7,7 @@ from holoviews import Store, Element, Curve, Overlay, Layout from holoviews.core.pprint import PrettyPrinter -from .testdimensioned import CustomBackendTestCase, TestObj +from .test_dimensioned import CustomBackendTestCase, ExampleElement class PrettyPrintTest(ComparisonTestCase): @@ -36,41 +36,41 @@ def test_curve_pprint_repr(self): class PrettyPrintOptionsTest(CustomBackendTestCase): def setUp(self): - super(PrettyPrintOptionsTest, self).setUp() + super().setUp() self.current_backend = Store.current_backend self.pprinter = PrettyPrinter(show_options=True) - self.register_custom(TestObj, 'backend_1', ['plot_custom1'], ['style_custom1']) + self.register_custom(ExampleElement, 'backend_1', ['plot_custom1'], ['style_custom1']) self.register_custom(Overlay, 'backend_1', ['plot_custom1']) self.register_custom(Layout, 'backend_1', ['plot_custom1']) - self.register_custom(TestObj, 'backend_2', ['plot_custom2']) + self.register_custom(ExampleElement, 'backend_2', ['plot_custom2']) Store.current_backend = 'backend_1' def test_element_options(self): - element = TestObj(None).opts(style_opt1='A', backend='backend_1') + element = ExampleElement(None).opts(style_opt1='A', backend='backend_1') r = self.pprinter.pprint(element) - self.assertEqual(r, ":TestObj\n | Options(style_opt1='A')") + self.assertEqual(r, ":ExampleElement\n | Options(style_opt1='A')") def test_element_options_wrapping(self): - element = TestObj(None).opts(plot_opt1='A'*40, style_opt1='B'*40, backend='backend_1') + element = ExampleElement(None).opts(plot_opt1='A'*40, style_opt1='B'*40, backend='backend_1') r = self.pprinter.pprint(element) - self.assertEqual(r, ":TestObj\n | Options(plot_opt1='AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA',\n | style_opt1='BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB')") + self.assertEqual(r, ":ExampleElement\n | Options(plot_opt1='AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA',\n | style_opt1='BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB')") def test_overlay_options(self): - overlay = (TestObj(None) * TestObj(None)).opts(plot_opt1='A') + overlay = (ExampleElement(None) * ExampleElement(None)).opts(plot_opt1='A') r = self.pprinter.pprint(overlay) - self.assertEqual(r, ":Overlay\n | Options(plot_opt1='A')\n .Element.I :TestObj\n .Element.II :TestObj") + self.assertEqual(r, ":Overlay\n | Options(plot_opt1='A')\n .Element.I :ExampleElement\n .Element.II :ExampleElement") def test_overlay_nested_options(self): - overlay = (TestObj(None) * TestObj(None)).opts('TestObj', plot_opt1='A', style_opt1='A') + overlay = (ExampleElement(None) * ExampleElement(None)).opts('ExampleElement', plot_opt1='A', style_opt1='A') r = self.pprinter.pprint(overlay) - self.assertEqual(r, ":Overlay\n .Element.I :TestObj\n | Options(plot_opt1='A', style_opt1='A')\n .Element.II :TestObj\n | Options(plot_opt1='A', style_opt1='A')") + self.assertEqual(r, ":Overlay\n .Element.I :ExampleElement\n | Options(plot_opt1='A', style_opt1='A')\n .Element.II :ExampleElement\n | Options(plot_opt1='A', style_opt1='A')") def test_layout_options(self): - overlay = (TestObj(None) + TestObj(None)).opts(plot_opt1='A') + overlay = (ExampleElement(None) + ExampleElement(None)).opts(plot_opt1='A') r = self.pprinter.pprint(overlay) - self.assertEqual(r, ":Layout\n | Options(plot_opt1='A')\n .Element.I :TestObj\n .Element.II :TestObj") + self.assertEqual(r, ":Layout\n | Options(plot_opt1='A')\n .Element.I :ExampleElement\n .Element.II :ExampleElement") def test_layout_nested_options(self): - overlay = (TestObj(None) + TestObj(None)).opts('TestObj', plot_opt1='A', style_opt1='A') + overlay = (ExampleElement(None) + ExampleElement(None)).opts('ExampleElement', plot_opt1='A', style_opt1='A') r = self.pprinter.pprint(overlay) - self.assertEqual(r, ":Layout\n .Element.I :TestObj\n | Options(plot_opt1='A', style_opt1='A')\n .Element.II :TestObj\n | Options(plot_opt1='A', style_opt1='A')") + self.assertEqual(r, ":Layout\n .Element.I :ExampleElement\n | Options(plot_opt1='A', style_opt1='A')\n .Element.II :ExampleElement\n | Options(plot_opt1='A', style_opt1='A')") diff --git a/holoviews/tests/core/teststoreoptions.py b/holoviews/tests/core/test_storeoptions.py similarity index 100% rename from holoviews/tests/core/teststoreoptions.py rename to holoviews/tests/core/test_storeoptions.py diff --git a/holoviews/tests/core/testtraversal.py b/holoviews/tests/core/test_traversal.py similarity index 97% rename from holoviews/tests/core/testtraversal.py rename to holoviews/tests/core/test_traversal.py index cc37ec7b03..c767415bbe 100644 --- a/holoviews/tests/core/testtraversal.py +++ b/holoviews/tests/core/test_traversal.py @@ -24,7 +24,7 @@ def test_unique_keys_no_overlap_exception(self): hmap2 = HoloMap({i: Curve(range(10)) for i in range(3, 10)}) exception = ('When combining HoloMaps into a composite plot ' 'their dimensions must be subsets of each other.') - with self.assertRaisesRegexp(Exception, exception): + with self.assertRaisesRegex(Exception, exception): dims, keys = unique_dimkeys(hmap1+hmap2) def test_unique_keys_no_overlap_dynamicmap_uninitialized(self): diff --git a/holoviews/tests/core/testtree.py b/holoviews/tests/core/test_tree.py similarity index 100% rename from holoviews/tests/core/testtree.py rename to holoviews/tests/core/test_tree.py diff --git a/holoviews/tests/core/testutils.py b/holoviews/tests/core/test_utils.py similarity index 78% rename from holoviews/tests/core/testutils.py rename to holoviews/tests/core/test_utils.py index cc1c21971e..745e05c7f6 100644 --- a/holoviews/tests/core/testutils.py +++ b/holoviews/tests/core/test_utils.py @@ -2,10 +2,11 @@ """ Unit tests of the helper functions in core.utils """ -import sys, math -import unittest import datetime -from unittest import SkipTest, skipIf +import math +import unittest + +from unittest import skipIf from itertools import product from collections import OrderedDict @@ -25,8 +26,6 @@ from holoviews.streams import PointerXY from holoviews.element.comparison import ComparisonTestCase -py_version = sys.version_info.major - sanitize_identifier = sanitize_identifier_fn.instance() pd_skip = skipIf(pd is None, "pandas is not available") @@ -162,34 +161,16 @@ def test_allowable_false_underscore(self): def test_allowable_true(self): self.assertEqual(sanitize_identifier.allowable('some_string'), True) - def test_prefix_test1_py2(self): - if py_version != 2: raise SkipTest - prefixed = sanitize_identifier.prefixed('_some_string', version=2) - self.assertEqual(prefixed, True) - - def test_prefix_test2_py2(self): - if py_version != 2: raise SkipTest - prefixed = sanitize_identifier.prefixed('some_string', version=2) - self.assertEqual(prefixed, False) - - def test_prefix_test3_py2(self): - if py_version != 2: raise SkipTest - prefixed = sanitize_identifier.prefixed('0some_string', version=2) - self.assertEqual(prefixed, True) - - def test_prefix_test1_py3(self): - if py_version != 3: raise SkipTest - prefixed = sanitize_identifier.prefixed('_some_string', version=3) + def test_prefix_test1(self): + prefixed = sanitize_identifier.prefixed('_some_string') self.assertEqual(prefixed, True) - def test_prefix_test2_py3(self): - if py_version != 3: raise SkipTest - prefixed = sanitize_identifier.prefixed('some_string', version=3) + def test_prefix_test2(self): + prefixed = sanitize_identifier.prefixed('some_string') self.assertEqual(prefixed, False) - def test_prefix_test3_py3(self): - if py_version != 3: raise SkipTest - prefixed = sanitize_identifier.prefixed('۵some_string', version=3) + def test_prefix_test3(self): + prefixed = sanitize_identifier.prefixed('۵some_string') self.assertEqual(prefixed, True) @@ -201,198 +182,98 @@ def test_simple_lowercase_string(self): def test_simple_uppercase_string(self): self.assertEqual(tree_attribute('UPPERCASE'), True) - def test_unicode_string(self): - if py_version != 2: raise SkipTest - self.assertEqual(tree_attribute('𝜗unicode'), True) - def test_underscore_string(self): self.assertEqual(tree_attribute('_underscore'), False) -class TestSanitizationPy2(ComparisonTestCase): - """ - Tests of sanitize_identifier (Python 2) - """ - def setUp(self): - if py_version != 2: raise SkipTest - - def test_simple_pound_sanitized_py2(self): - sanitized = sanitize_identifier('£', version=2) - self.assertEqual(sanitized, 'pound') - - def test_simple_digit_sanitized_py2(self): - sanitized = sanitize_identifier('0', version=2) - self.assertEqual(sanitized, 'A_0') - - def test_simple_underscore_sanitized_py2(self): - sanitized = sanitize_identifier('_test', version=2) - self.assertEqual(sanitized, 'A__test') - - def test_simple_alpha_sanitized_py2(self): - sanitized = sanitize_identifier('α', version=2) - self.assertEqual(sanitized, 'alpha') - - def test_simple_a_pound_sanitized_py2(self): - sanitized = sanitize_identifier('a £', version=2) - self.assertEqual(sanitized, 'A_pound') - - def test_capital_delta_sanitized_py2(self): - sanitized = sanitize_identifier('Δ', version=2) - self.assertEqual(sanitized, 'Delta') - - def test_lowercase_delta_sanitized_py2(self): - sanitized = sanitize_identifier('δ', version=2) - self.assertEqual(sanitized, 'delta') - - def test_simple_alpha_beta_sanitized_py2(self): - sanitized = sanitize_identifier('α β', version=2) - self.assertEqual(sanitized, 'alpha_beta') - - def test_simple_alpha_beta_underscore_sanitized_py2(self): - sanitized = sanitize_identifier('α_β', version=2) - self.assertEqual(sanitized, 'alpha_beta') - def test_simple_alpha_beta_double_underscore_sanitized_py2(self): - sanitized = sanitize_identifier('α__β', version=2) - self.assertEqual(sanitized, 'alpha__beta') - - def test_simple_alpha_beta_mixed_underscore_space_sanitized_py2(self): - sanitized = sanitize_identifier('α__ β', version=2) - self.assertEqual(sanitized, 'alpha__beta') - - def test_alpha_times_two_py2(self): - sanitized = sanitize_identifier('α*2', version=2) - self.assertEqual(sanitized, 'alpha_times_2') - - def test_arabic_five_sanitized_py2(self): - """ - Note: There would be a clash if you mixed the languages of - your digits! E.g arabic ٥ five and urdu ۵ five - """ - sanitized = sanitize_identifier('٥', version=2) - self.assertEqual(sanitized, 'five') - - def test_urdu_five_sanitized_py2(self): - """ - Note: There would be a clash if you mixed the languages of - your digits! E.g arabic ٥ five and urdu ۵ five - """ - sanitized = sanitize_identifier('۵', version=2) - self.assertEqual(sanitized, 'five') - - def test_urdu_a_five_sanitized_py2(self): - """ - Note: There would be a clash if you mixed the languages of - your digits! E.g arabic ٥ five and urdu ۵ five - """ - sanitized = sanitize_identifier('a ۵', version=2) - self.assertEqual(sanitized, 'A_five') - - def test_umlaut_sanitized_py2(self): - sanitized = sanitize_identifier('Festkörperphysik', version=2) - self.assertEqual(sanitized, 'Festkorperphysik') - - def test_power_umlaut_sanitized_py2(self): - sanitized = sanitize_identifier('^Festkörperphysik', version=2) - self.assertEqual(sanitized, 'power_Festkorperphysik') - - def test_custom_dollar_removal_py2(self): - sanitize_identifier.eliminations.extend(['dollar']) - sanitized = sanitize_identifier('$E$', version=2) - self.assertEqual(sanitized, 'E') - sanitize_identifier.eliminations.remove('dollar') - - -class TestSanitizationPy3(ComparisonTestCase): +class TestSanitization(ComparisonTestCase): """ - Tests of sanitize_identifier (Python 3) + Tests of sanitize_identifier """ - def setUp(self): - if py_version != 3: raise SkipTest - - def test_simple_pound_sanitized_py3(self): - sanitized = sanitize_identifier('£', version=3) + def test_simple_pound_sanitized(self): + sanitized = sanitize_identifier('£') self.assertEqual(sanitized, 'pound') - def test_simple_digit_sanitized_py3(self): - sanitized = sanitize_identifier('0', version=3) + def test_simple_digit_sanitized(self): + sanitized = sanitize_identifier('0') self.assertEqual(sanitized, 'A_0') - def test_simple_underscore_sanitized_py3(self): - sanitized = sanitize_identifier('_test', version=3) + def test_simple_underscore_sanitized(self): + sanitized = sanitize_identifier('_test') self.assertEqual(sanitized, 'A__test') - def test_simple_alpha_sanitized_py3(self): - sanitized = sanitize_identifier('α', version=3) + def test_simple_alpha_sanitized(self): + sanitized = sanitize_identifier('α') self.assertEqual(sanitized, 'α') - def test_simple_a_pound_sanitized_py3(self): - sanitized = sanitize_identifier('a £', version=3) + def test_simple_a_pound_sanitized(self): + sanitized = sanitize_identifier('a £') self.assertEqual(sanitized, 'A_pound') - def test_capital_delta_sanitized_py3(self): - sanitized = sanitize_identifier('Δ', version=3) + def test_capital_delta_sanitized(self): + sanitized = sanitize_identifier('Δ') self.assertEqual(sanitized, 'Δ') - def test_lowercase_delta_sanitized_py3(self): - sanitized = sanitize_identifier('δ', version=3) + def test_lowercase_delta_sanitized(self): + sanitized = sanitize_identifier('δ') self.assertEqual(sanitized, 'δ') - def test_simple_alpha_beta_sanitized_py3(self): - sanitized = sanitize_identifier('α β', version=3) + def test_simple_alpha_beta_sanitized(self): + sanitized = sanitize_identifier('α β') self.assertEqual(sanitized, 'α_β') - def test_simple_alpha_beta_underscore_sanitized_py3(self): - sanitized = sanitize_identifier('α_β', version=3) + def test_simple_alpha_beta_underscore_sanitized(self): + sanitized = sanitize_identifier('α_β') self.assertEqual(sanitized, 'α_β') - def test_simple_alpha_beta_double_underscore_sanitized_py3(self): - sanitized = sanitize_identifier('α__β', version=3) + def test_simple_alpha_beta_double_underscore_sanitized(self): + sanitized = sanitize_identifier('α__β') self.assertEqual(sanitized, 'α__β') - def test_simple_alpha_beta_mixed_underscore_space_sanitized_py3(self): - sanitized = sanitize_identifier('α__ β', version=3) + def test_simple_alpha_beta_mixed_underscore_space_sanitized(self): + sanitized = sanitize_identifier('α__ β') self.assertEqual(sanitized, 'α__β') - def test_alpha_times_two_py3(self): - sanitized = sanitize_identifier('α*2', version=3) + def test_alpha_times_two(self): + sanitized = sanitize_identifier('α*2') self.assertEqual(sanitized, 'α_times_2') - def test_arabic_five_sanitized_py3(self): + def test_arabic_five_sanitized(self): """ Note: There would be a clash if you mixed the languages of - your digits! E.g arabic ٥ five and urdu ۵ five + your digits! E.g. arabic ٥ five and urdu ۵ five """ try: - sanitize_identifier('٥', version=3) + sanitize_identifier('٥') except SyntaxError as e: assert str(e).startswith("String '٥' cannot be sanitized") - def test_urdu_five_sanitized_py3(self): + def test_urdu_five_sanitized(self): try: - sanitize_identifier('۵', version=3) + sanitize_identifier('۵') except SyntaxError as e: assert str(e).startswith("String '۵' cannot be sanitized") - def test_urdu_a_five_sanitized_py3(self): + def test_urdu_a_five_sanitized(self): """ Note: There would be a clash if you mixed the languages of - your digits! E.g arabic ٥ five and urdu ۵ five + your digits! E.g. arabic ٥ five and urdu ۵ five """ - sanitized = sanitize_identifier('a ۵', version=3) + sanitized = sanitize_identifier('a ۵') self.assertEqual(sanitized, 'A_۵') - def test_umlaut_sanitized_py3(self): - sanitized = sanitize_identifier('Festkörperphysik', version=3) + def test_umlaut_sanitized(self): + sanitized = sanitize_identifier('Festkörperphysik') self.assertEqual(sanitized, 'Festkörperphysik') - def test_power_umlaut_sanitized_py3(self): - sanitized = sanitize_identifier('^Festkörperphysik', version=3) + def test_power_umlaut_sanitized(self): + sanitized = sanitize_identifier('^Festkörperphysik') self.assertEqual(sanitized, 'power_Festkörperphysik') def test_custom_dollar_removal_py2(self): sanitize_identifier.eliminations.extend(['dollar']) - sanitized = sanitize_identifier('$E$', version=3) + sanitized = sanitize_identifier('$E$') self.assertEqual(sanitized, 'E') sanitize_identifier.eliminations.remove('dollar') @@ -660,6 +541,23 @@ def test_date_range_1_sec(self): self.assertEqual(drange[0], start+np.timedelta64(50, 'ms')) self.assertEqual(drange[-1], end-np.timedelta64(50, 'ms')) + @pd_skip + def test_timezone_to_int(self): + import pytz + timezone = pytz.timezone("Europe/Copenhagen") + + values = [ + datetime.datetime(2021, 4, 8, 12, 0, 0, 0), + datetime.datetime(2021, 4, 8, 12, 0, 0, 0, datetime.timezone.utc), + datetime.datetime(2021, 4, 8, 12, 0, 0, 0, timezone), + datetime.date(2021, 4, 8), + np.datetime64(datetime.datetime(2021, 4, 8, 12, 0, 0, 0)), + ] + + for value in values: + x1 = dt_to_int(value) + x2 = dt_to_int(pd.to_datetime(value)) + self.assertEqual(x1, x2) class TestNumericUtilities(ComparisonTestCase): diff --git a/holoviews/tests/element/testannotations.py b/holoviews/tests/element/test_annotations.py similarity index 68% rename from holoviews/tests/element/testannotations.py rename to holoviews/tests/element/test_annotations.py index 6b7566f1ca..153feb4643 100644 --- a/holoviews/tests/element/testannotations.py +++ b/holoviews/tests/element/test_annotations.py @@ -1,9 +1,11 @@ import numpy as np +import pytest from holoviews import HLine, VLine, Text, Arrow, Annotation, Spline from holoviews.element.comparison import ComparisonTestCase from holoviews.element import Points + class AnnotationTests(ComparisonTestCase): """ Tests allowable data formats when constructing @@ -11,16 +13,10 @@ class AnnotationTests(ComparisonTestCase): """ def test_hline_invalid_constructor(self): - with self.assertRaises(Exception): + err = "ClassSelector parameter 'y' value must be an instance of" + with pytest.raises(ValueError) as excinfo: HLine(None) - - # NOTE: This is the correct version of the test above but it will - # not work until the fix in param PR #149 is available. - - # def test_hline_invalid_constructor(self): - # regexp = "Parameter 'y' only takes numeric values" - # with self.assertRaisesRegexp(ValueError, regexp): - # hline = HLine(None) + assert err in str(excinfo.value) def test_text_string_position(self): text = Text('A', 1, 'A') @@ -32,10 +28,28 @@ def test_hline_dimension_values(self): self.assertTrue(all(not np.isfinite(v) for v in hline.range(0))) self.assertEqual(hline.range(1), (0, 0)) + # Testing numpy inputs + hline = HLine(np.array([0])) + self.assertTrue(all(not np.isfinite(v) for v in hline.range(0))) + self.assertEqual(hline.range(1), (0, 0)) + + hline = HLine(np.array(0)) + self.assertTrue(all(not np.isfinite(v) for v in hline.range(0))) + self.assertEqual(hline.range(1), (0, 0)) + def test_vline_dimension_values(self): - hline = VLine(0) - self.assertEqual(hline.range(0), (0, 0)) - self.assertTrue(all(not np.isfinite(v) for v in hline.range(1))) + vline = VLine(0) + self.assertEqual(vline.range(0), (0, 0)) + self.assertTrue(all(not np.isfinite(v) for v in vline.range(1))) + + # Testing numpy inputs + vline = VLine(np.array([0])) + self.assertEqual(vline.range(0), (0, 0)) + self.assertTrue(all(not np.isfinite(v) for v in vline.range(1))) + + vline = VLine(np.array(0)) + self.assertEqual(vline.range(0), (0, 0)) + self.assertTrue(all(not np.isfinite(v) for v in vline.range(1))) def test_arrow_redim_range_aux(self): annotations = Arrow(0, 0) diff --git a/holoviews/tests/element/testapiconsistency.py b/holoviews/tests/element/test_apiconsistency.py similarity index 100% rename from holoviews/tests/element/testapiconsistency.py rename to holoviews/tests/element/test_apiconsistency.py diff --git a/holoviews/tests/element/testcomparisonchart.py b/holoviews/tests/element/test_comparisonchart.py similarity index 100% rename from holoviews/tests/element/testcomparisonchart.py rename to holoviews/tests/element/test_comparisonchart.py diff --git a/holoviews/tests/element/testcomparisoncomposite.py b/holoviews/tests/element/test_comparisoncomposite.py similarity index 100% rename from holoviews/tests/element/testcomparisoncomposite.py rename to holoviews/tests/element/test_comparisoncomposite.py diff --git a/holoviews/tests/element/testcomparisondimension.py b/holoviews/tests/element/test_comparisondimension.py similarity index 94% rename from holoviews/tests/element/testcomparisondimension.py rename to holoviews/tests/element/test_comparisondimension.py index efe64c59fc..1234ce27c8 100644 --- a/holoviews/tests/element/testcomparisondimension.py +++ b/holoviews/tests/element/test_comparisondimension.py @@ -1,16 +1,14 @@ """ Test cases for Dimension and Dimensioned object comparison. """ -import sys from holoviews.core import Dimension, Dimensioned from holoviews.element.comparison import ComparisonTestCase -py3 = (sys.version_info.major == 3) class DimensionsComparisonTestCase(ComparisonTestCase): def setUp(self): - super(DimensionsComparisonTestCase, self).setUp() + super().setUp() self.dimension1 = Dimension('dim1', range=(0,1)) self.dimension2 = Dimension('dim2', range=(0,1)) self.dimension3 = Dimension('dim1', range=(0,2)) @@ -82,11 +80,7 @@ def test_dimension_comparison_types_unequal(self): try: self.assertEqual(self.dimension9, self.dimension10) except AssertionError as e: - if py3: - self.assertEqual(str(e), "Dimension parameter 'type' mismatched: != ") - else: - self.assertEqual(str(e), "Dimension parameter 'type' mismatched: != ") - + self.assertEqual(str(e), "Dimension parameter 'type' mismatched: != ") def test_dimension_comparison_value_format_unequal(self): # Comparing callables is skipped @@ -98,7 +92,7 @@ def test_dimension_comparison_value_format_unequal(self): class DimensionedComparisonTestCase(ComparisonTestCase): def setUp(self): - super(DimensionedComparisonTestCase, self).setUp() + super().setUp() # Value dimension lists self.value_list1 = [Dimension('val1')] self.value_list2 = [Dimension('val2')] diff --git a/holoviews/tests/element/testcomparisonpath.py b/holoviews/tests/element/test_comparisonpath.py similarity index 100% rename from holoviews/tests/element/testcomparisonpath.py rename to holoviews/tests/element/test_comparisonpath.py diff --git a/holoviews/tests/element/testcomparisonraster.py b/holoviews/tests/element/test_comparisonraster.py similarity index 98% rename from holoviews/tests/element/testcomparisonraster.py rename to holoviews/tests/element/test_comparisonraster.py index c2930dd313..0b317cd856 100644 --- a/holoviews/tests/element/testcomparisonraster.py +++ b/holoviews/tests/element/test_comparisonraster.py @@ -28,7 +28,7 @@ def setUp(self): class RasterOverlayTestCase(RasterTestCase): def setUp(self): - super(RasterOverlayTestCase, self).setUp() + super().setUp() # Two overlays of depth two with different layers self.overlay1_depth2 = (self.mat1 * self.mat2) self.overlay2_depth2 = (self.mat1 * self.mat3) @@ -41,7 +41,7 @@ def setUp(self): class RasterMapTestCase(RasterOverlayTestCase): def setUp(self): - super(RasterMapTestCase, self).setUp() + super().setUp() # Example 1D map self.map1_1D = HoloMap(kdims=['int']) self.map1_1D[0] = self.mat1 diff --git a/holoviews/tests/element/testcomparisonsimple.py b/holoviews/tests/element/test_comparisonsimple.py similarity index 100% rename from holoviews/tests/element/testcomparisonsimple.py rename to holoviews/tests/element/test_comparisonsimple.py diff --git a/holoviews/tests/element/testelementconstructors.py b/holoviews/tests/element/test_elementconstructors.py similarity index 95% rename from holoviews/tests/element/testelementconstructors.py rename to holoviews/tests/element/test_elementconstructors.py index 8c5b743598..e85ef08fc7 100644 --- a/holoviews/tests/element/testelementconstructors.py +++ b/holoviews/tests/element/test_elementconstructors.py @@ -1,12 +1,12 @@ import param import numpy as np -from holoviews import (Dimension, Dataset, Element, Annotation, Curve, - Path, Histogram, HeatMap, Contours, Scatter, - Points, Polygons, VectorField, Spikes, Area, - Bars, ErrorBars, BoxWhisker, Raster, Image, - QuadMesh, RGB, Graph, TriMesh, Div, Tiles, - Trisurface) +from holoviews import ( + Dimension, Dataset, Element, Annotation, Curve, Path, Histogram, + HeatMap, Contours, Scatter, Points, Polygons, VectorField, Spikes, + Area, Bars, ErrorBars, BoxWhisker, Raster, Image, QuadMesh, RGB, + Graph, TriMesh, Div, Tiles +) from holoviews.element.path import BaseShape from holoviews.element.comparison import ComparisonTestCase @@ -26,12 +26,12 @@ def setUp(self): self.curve = Curve(sine_data) self.path = Path([sine_data, cos_data]) self.histogram = Histogram((self.hxs, self.sin)) - super(ElementConstructorTest, self).setUp() + super().setUp() def test_empty_element_constructor(self): failed_elements = [] for name, el in param.concrete_descendents(Element).items(): - if issubclass(el, (Annotation, BaseShape, Div, Tiles, Trisurface)): + if issubclass(el, (Annotation, BaseShape, Div, Tiles)): continue try: el([]) diff --git a/holoviews/tests/element/testelementranges.py b/holoviews/tests/element/test_elementranges.py similarity index 100% rename from holoviews/tests/element/testelementranges.py rename to holoviews/tests/element/test_elementranges.py diff --git a/holoviews/tests/element/testelementselect.py b/holoviews/tests/element/test_elementselect.py similarity index 99% rename from holoviews/tests/element/testelementselect.py rename to holoviews/tests/element/test_elementselect.py index 1530d746b2..5ed3473dba 100644 --- a/holoviews/tests/element/testelementselect.py +++ b/holoviews/tests/element/test_elementselect.py @@ -117,7 +117,7 @@ def test_datetime_select(self): def test_selection_spec_positional_error_message(self): s, e = '1999-12-31', '2000-1-2' curve = self.datetime_fn() - with self.assertRaisesRegexp( + with self.assertRaisesRegex( ValueError, "Use the selection_specs keyword" ): curve.select((Curve,), time=(s, e)) diff --git a/holoviews/tests/element/testellipsis.py b/holoviews/tests/element/test_ellipsis.py similarity index 99% rename from holoviews/tests/element/testellipsis.py rename to holoviews/tests/element/test_ellipsis.py index 9de4c446f9..0346591455 100644 --- a/holoviews/tests/element/testellipsis.py +++ b/holoviews/tests/element/test_ellipsis.py @@ -60,7 +60,7 @@ def setUp(self): self.table =hv.Table(zip(keys,values), kdims = ['Gender', 'Age'], vdims=['Weight', 'Height']) - super(TestEllipsisTable, self).setUp() + super().setUp() def test_table_ellipsis_slice_value_weight(self): sliced = self.table[..., 'Weight'] diff --git a/holoviews/tests/element/testgraphelement.py b/holoviews/tests/element/test_graphelement.py similarity index 99% rename from holoviews/tests/element/testgraphelement.py rename to holoviews/tests/element/test_graphelement.py index 90309cb40b..cfa9dcf415 100644 --- a/holoviews/tests/element/testgraphelement.py +++ b/holoviews/tests/element/test_graphelement.py @@ -202,7 +202,7 @@ def test_from_networkx_dictionary_positions(self): graph = Graph.from_networkx(G, positions) self.assertEqual(graph.nodes.dimension_values(2), np.array([1, 2, 3])) - + class ChordTests(ComparisonTestCase): diff --git a/holoviews/tests/element/testimage.py b/holoviews/tests/element/test_image.py similarity index 98% rename from holoviews/tests/element/testimage.py rename to holoviews/tests/element/test_image.py index a9c0f5da90..a84bf0d214 100644 --- a/holoviews/tests/element/testimage.py +++ b/holoviews/tests/element/test_image.py @@ -11,7 +11,7 @@ class TestImage(LoggingComparisonTestCase): def setUp(self): - super(TestImage, self).setUp() + super().setUp() self.array1 = np.array([(0, 1, 2), (3, 4, 5)]) def test_image_init(self): diff --git a/holoviews/tests/element/testpaths.py b/holoviews/tests/element/test_paths.py similarity index 99% rename from holoviews/tests/element/testpaths.py rename to holoviews/tests/element/test_paths.py index 48340ccfd3..c9e5e843c9 100644 --- a/holoviews/tests/element/testpaths.py +++ b/holoviews/tests/element/test_paths.py @@ -83,7 +83,7 @@ def test_dataset_groupby_path(self): self.assertEqual(subpaths[0], Path([(0, 1), (1, 2)])) self.assertEqual(subpaths[1], Path([(2, 3), (3, 4)])) - + class PolygonsTests(ComparisonTestCase): def setUp(self): diff --git a/holoviews/tests/element/testraster.py b/holoviews/tests/element/test_raster.py similarity index 99% rename from holoviews/tests/element/testraster.py rename to holoviews/tests/element/test_raster.py index c25052f9b4..aa47f66329 100644 --- a/holoviews/tests/element/testraster.py +++ b/holoviews/tests/element/test_raster.py @@ -43,11 +43,11 @@ def test_construct_from_array_with_alpha(self): def test_construct_from_tuple_with_alpha(self): rgb = RGB(([0, 1, 2], [0, 1, 2], self.rgb_array)) self.assertEqual(len(rgb.vdims), 4) - + def test_construct_from_dict_with_alpha(self): rgb = RGB({'x': [1, 2, 3], 'y': [1, 2, 3], ('R', 'G', 'B', 'A'): self.rgb_array}) self.assertEqual(len(rgb.vdims), 4) - + class TestQuadMesh(ComparisonTestCase): diff --git a/holoviews/tests/element/test_selection.py b/holoviews/tests/element/test_selection.py index cf7d2095e6..538683157e 100644 --- a/holoviews/tests/element/test_selection.py +++ b/holoviews/tests/element/test_selection.py @@ -44,7 +44,7 @@ def setUp(self): import holoviews.plotting.bokeh # noqa except: raise SkipTest("Bokeh selection tests require bokeh.") - super(TestSelection1DExpr, self).setUp() + super().setUp() self._backend = Store.current_backend Store.set_current_backend('bokeh') @@ -226,7 +226,7 @@ def setUp(self): import holoviews.plotting.bokeh # noqa except: raise SkipTest("Bokeh selection tests require bokeh.") - super(TestSelection2DExpr, self).setUp() + super().setUp() self._backend = Store.current_backend Store.set_current_backend('bokeh') @@ -350,10 +350,10 @@ def test_img_selection_geom(self): self.assertEqual(bbox, {'x': np.array([-0.4, 0.6, 0.4, -0.1]), 'y': np.array([-0.1, -0.1, 1.7, 1.7])}) self.assertEqual(expr.apply(img, expanded=True, flat=False), np.array([ - [ True, False, False], - [ True, False, False], - [ False, False, False], - [False, False, False] + [ 1., np.nan, np.nan], + [ 1., np.nan, np.nan], + [np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan] ])) self.assertEqual(region, Rectangles([]) * Path([list(geom)+[(-0.4, -0.1)]])) @@ -441,7 +441,7 @@ def setUp(self): import holoviews.plotting.bokeh # noqa except: raise SkipTest("Bokeh selection tests require bokeh.") - super(TestSelectionGeomExpr, self).setUp() + super().setUp() self._backend = Store.current_backend Store.set_current_backend('bokeh') @@ -458,7 +458,7 @@ def test_rect_selection_numeric(self): self.assertEqual(bbox, {'x0': (0, 3.5), 'y0': (0.9, 4.9), 'x1': (0, 3.5), 'y1': (0.9, 4.9)}) self.assertEqual(expr.apply(rect), np.array([True, True, True])) self.assertEqual(region, Rectangles([(0, 0.9, 3.5, 4.9)]) * Path([])) - + def test_rect_selection_numeric_inverted(self): rect = Rectangles([(0, 1, 2, 3), (1, 3, 1.5, 4), (2.5, 4.2, 3.5, 4.8)]).opts(invert_axes=True) expr, bbox, region = rect._get_selection_expr_for_stream_value(bounds=(0.9, 0.5, 4.9, 3.4)) @@ -548,7 +548,7 @@ def setUp(self): import holoviews.plotting.bokeh # noqa except: raise SkipTest("Bokeh selection tests require bokeh.") - super(TestSelectionPolyExpr, self).setUp() + super().setUp() self._backend = Store.current_backend Store.set_current_backend('bokeh') diff --git a/holoviews/tests/element/teststatselements.py b/holoviews/tests/element/test_statselements.py similarity index 99% rename from holoviews/tests/element/teststatselements.py rename to holoviews/tests/element/test_statselements.py index 8c54de468f..e65b38023a 100644 --- a/holoviews/tests/element/teststatselements.py +++ b/holoviews/tests/element/test_statselements.py @@ -125,7 +125,7 @@ def setUp(self): raise SkipTest('SciPy not available') self.renderer = hv.renderer('matplotlib') np.random.seed(42) - super(StatisticalCompositorTest, self).setUp() + super().setUp() def test_distribution_composite(self): dist = Distribution(np.array([0, 1, 2])) diff --git a/holoviews/tests/element/testtiles.py b/holoviews/tests/element/test_tiles.py similarity index 100% rename from holoviews/tests/element/testtiles.py rename to holoviews/tests/element/test_tiles.py diff --git a/holoviews/tests/ipython/testdisplayhooks.py b/holoviews/tests/ipython/test_displayhooks.py similarity index 86% rename from holoviews/tests/ipython/testdisplayhooks.py rename to holoviews/tests/ipython/test_displayhooks.py index 2e66a7ab12..ba7316e8bb 100644 --- a/holoviews/tests/ipython/testdisplayhooks.py +++ b/holoviews/tests/ipython/test_displayhooks.py @@ -5,7 +5,7 @@ class TestDisplayHooks(IPTestCase): def setUp(self): - super(TestDisplayHooks, self).setUp() + super().setUp() if not notebook_extension._loaded: notebook_extension('matplotlib', ip=self.ip) self.backup = Store.display_formats @@ -17,14 +17,14 @@ def tearDown(self): del self.ip Store.display_hooks = self.backup notebook_extension._loaded = False - super(TestDisplayHooks, self).tearDown() + super().tearDown() class TestHTMLDisplay(TestDisplayHooks): def setUp(self): self.format = ['html'] - super(TestHTMLDisplay, self).setUp() + super().setUp() def test_store_render_html(self): curve = Curve([1, 2, 3]) @@ -37,7 +37,7 @@ class TestPNGDisplay(TestDisplayHooks): def setUp(self): self.format = ['png'] - super(TestPNGDisplay, self).setUp() + super().setUp() def test_store_render_png(self): curve = Curve([1, 2, 3]) @@ -50,7 +50,7 @@ class TestSVGDisplay(TestDisplayHooks): def setUp(self): self.format = ['svg'] - super(TestSVGDisplay, self).setUp() + super().setUp() def test_store_render_svg(self): curve = Curve([1, 2, 3]) @@ -63,7 +63,7 @@ class TestCombinedDisplay(TestDisplayHooks): def setUp(self): self.format = ['html', 'svg', 'png'] - super(TestCombinedDisplay, self).setUp() + super().setUp() def test_store_render_combined(self): curve = Curve([1, 2, 3]) diff --git a/holoviews/tests/ipython/testmagics.py b/holoviews/tests/ipython/test_magics.py similarity index 95% rename from holoviews/tests/ipython/testmagics.py rename to holoviews/tests/ipython/test_magics.py index 3ae66a30b1..a89d739333 100644 --- a/holoviews/tests/ipython/testmagics.py +++ b/holoviews/tests/ipython/test_magics.py @@ -14,7 +14,7 @@ class ExtensionTestCase(IPTestCase): def setUp(self): - super(ExtensionTestCase, self).setUp() + super().setUp() self.ip.run_line_magic("load_ext", "holoviews.ipython") for renderer in Store.renderers.values(): renderer.comm_manager = CommManager @@ -23,21 +23,19 @@ def tearDown(self): Store._custom_options = {k:{} for k in Store._custom_options.keys()} self.ip.run_line_magic("unload_ext", "holoviews.ipython") del self.ip - super(ExtensionTestCase, self).tearDown() - + super().tearDown() class TestOptsMagic(ExtensionTestCase): def setUp(self): - super(TestOptsMagic, self).setUp() + super().setUp() self.cell("import numpy as np") self.cell("from holoviews import DynamicMap, Curve, Image") def tearDown(self): Store.custom_options(val = {}) - super(TestOptsMagic, self).tearDown() - + super().tearDown() def test_cell_opts_style(self): @@ -126,7 +124,7 @@ def test_cell_opts_norm(self): class TestOutputMagic(ExtensionTestCase): def tearDown(self): - super(TestOutputMagic, self).tearDown() + super().tearDown() def test_output_svg(self): self.line_magic('output', "fig='svg'") @@ -161,7 +159,7 @@ def test_output_invalid_size(self): class TestCompositorMagic(ExtensionTestCase): def setUp(self): - super(TestCompositorMagic, self).setUp() + super().setUp() self.cell("import numpy as np") self.cell("from holoviews.element import Image") self.definitions = list(Compositor.definitions) @@ -169,7 +167,7 @@ def setUp(self): def tearDown(self): Compositor.definitions[:] = self.definitions - super(TestCompositorMagic, self).tearDown() + super().tearDown() def test_display_compositor_definition(self): definition = " display factory(Image * Image * Image) RGBTEST" diff --git a/holoviews/tests/ipython/testnotebooks.py b/holoviews/tests/ipython/test_notebooks.py similarity index 54% rename from holoviews/tests/ipython/testnotebooks.py rename to holoviews/tests/ipython/test_notebooks.py index 39f254e998..bdb0691e77 100644 --- a/holoviews/tests/ipython/testnotebooks.py +++ b/holoviews/tests/ipython/test_notebooks.py @@ -2,9 +2,11 @@ """ Unit tests relating to notebook processing """ -import nbformat, nbconvert +import os + +import nbconvert +import nbformat -import os, sys from holoviews.element.comparison import ComparisonTestCase from holoviews.ipython.preprocessors import OptsMagicProcessor, OutputMagicProcessor @@ -23,43 +25,28 @@ class TestOptsPreprocessor(ComparisonTestCase): def test_opts_image_line_magic(self): nbname = 'test_opts_image_line_magic.ipynb' - if sys.version_info.major == 2: - expected = """hv.util.opts(u" Image [xaxis=None] (cmap='viridis')")""" - else: - expected = """hv.util.opts(" Image [xaxis=None] (cmap='viridis')")""" + expected = """hv.util.opts(" Image [xaxis=None] (cmap='viridis')")""" source = apply_preprocessors([OptsMagicProcessor()], nbname) self.assertEqual(source.strip().endswith(expected), True) def test_opts_image_cell_magic(self): nbname = 'test_opts_image_cell_magic.ipynb' - if sys.version_info.major == 2: - expected = ("""hv.util.opts(u" Image [xaxis=None] (cmap='viridis')", """ - + """hv.Image(np.random.rand(20,20)))""") - else: - expected = ("""hv.util.opts(" Image [xaxis=None] (cmap='viridis')", """ - + """hv.Image(np.random.rand(20,20)))""") + expected = ("""hv.util.opts(" Image [xaxis=None] (cmap='viridis')", """ + + """hv.Image(np.random.rand(20,20)))""") source = apply_preprocessors([OptsMagicProcessor()], nbname) self.assertEqual(source.strip().endswith(expected), True) def test_opts_image_cell_magic_offset(self): nbname = 'test_opts_image_cell_magic_offset.ipynb' - if sys.version_info.major == 2: - expected = (" 'An expression (literal) on the same line';\n" + - """hv.util.opts(u" Image [xaxis=None] (cmap='viridis')",""" - + """hv.Image(np.random.rand(20,20)))""") - else: - # FIXME: Not quite right yet, shouldn't have a leading space or a newline - expected = (" 'An expression (literal) on the same line';\n" - + """hv.util.opts(" Image [xaxis=None] (cmap='viridis')", """ - + """hv.Image(np.random.rand(20,20)))""") + # FIXME: Not quite right yet, shouldn't have a leading space or a newline + expected = (" 'An expression (literal) on the same line';\n" + + """hv.util.opts(" Image [xaxis=None] (cmap='viridis')", """ + + """hv.Image(np.random.rand(20,20)))""") source = apply_preprocessors([OptsMagicProcessor()], nbname) self.assertEqual(source.strip().endswith(expected), False) def test_opts_image_line_magic_svg(self): nbname = 'test_output_svg_line_magic.ipynb' - if sys.version_info.major == 2: - expected = """hv.util.output(u" fig='svg'")""" - else: - expected = """hv.util.output(" fig='svg'")""" + expected = """hv.util.output(" fig='svg'")""" source = apply_preprocessors([OutputMagicProcessor()], nbname) self.assertEqual(source.strip().endswith(expected), True) diff --git a/holoviews/tests/ipython/testoptscompleter.py b/holoviews/tests/ipython/test_optscompleter.py similarity index 99% rename from holoviews/tests/ipython/testoptscompleter.py rename to holoviews/tests/ipython/test_optscompleter.py index dd2a20a68a..e9a0409323 100644 --- a/holoviews/tests/ipython/testoptscompleter.py +++ b/holoviews/tests/ipython/test_optscompleter.py @@ -27,7 +27,7 @@ def setUp(self): self.compositor_defs = {} self.all_keys = sorted(self.completions.keys()) + ['style(', 'plot[', 'norm{'] - super(TestOptsCompleter, self).setUp() + super().setUp() def test_completer_setup(self): "Test setup_completions for the real completion set" diff --git a/holoviews/tests/ipython/testparsers.py b/holoviews/tests/ipython/test_parsers.py similarity index 100% rename from holoviews/tests/ipython/testparsers.py rename to holoviews/tests/ipython/test_parsers.py diff --git a/holoviews/tests/operation/testdatashader.py b/holoviews/tests/operation/test_datashader.py similarity index 71% rename from holoviews/tests/operation/testdatashader.py rename to holoviews/tests/operation/test_datashader.py index ab32a1c1ea..67366025d9 100644 --- a/holoviews/tests/operation/testdatashader.py +++ b/holoviews/tests/operation/test_datashader.py @@ -4,9 +4,12 @@ import numpy as np -from holoviews import (Dimension, Curve, Points, Image, Dataset, RGB, Path, - Graph, TriMesh, QuadMesh, NdOverlay, Contours, Spikes, - Spread, Area, Rectangles, Segments, Polygons) +from holoviews import ( + Dimension, Curve, Points, Image, Dataset, RGB, Path, Graph, TriMesh, + QuadMesh, NdOverlay, Contours, Spikes, Spread, Area, Rectangles, + Segments, Polygons, Nodes +) +from holoviews.streams import Tap from holoviews.element.comparison import ComparisonTestCase from numpy import nan @@ -17,7 +20,8 @@ from holoviews.core.util import pd from holoviews.operation.datashader import ( aggregate, regrid, ds_version, stack, directly_connect_edges, - shade, spread, rasterize + shade, spread, rasterize, datashade, AggregationOperation, + inspect, inspect_points, inspect_polygons ) except: raise SkipTest('Datashader not available') @@ -37,6 +41,12 @@ cudf_skip = skipIf(cudf is None, "cuDF not available") +import logging + +numba_logger = logging.getLogger('numba') +numba_logger.setLevel(logging.WARNING) + +AggregationOperation.vdim_prefix = '' class DatashaderAggregateTests(ComparisonTestCase): """ @@ -48,7 +58,15 @@ def test_aggregate_points(self): img = aggregate(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) + self.assertEqual(img, expected) + + def test_aggregate_points_count_column(self): + points = Points([(0.2, 0.3, np.NaN), (0.4, 0.7, 22), (0, 0.99,np.NaN)], vdims='z') + img = aggregate(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), + width=2, height=2, aggregator=ds.count('z')) + expected = Image(([0.25, 0.75], [0.25, 0.75], [[0, 0], [1, 0]]), + vdims=[Dimension('z Count', nodata=0)]) self.assertEqual(img, expected) @cudf_skip @@ -58,7 +76,7 @@ def test_aggregate_points_cudf(self): img = aggregate(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) self.assertIsInstance(img.data.Count.data, cupy.ndarray) self.assertEqual(img, expected) @@ -67,20 +85,20 @@ def test_aggregate_zero_range_points(self): agg = rasterize(p, x_range=(0, 0), y_range=(0, 1), expand=False, dynamic=False, width=2, height=2) img = Image(([], [0.25, 0.75], np.zeros((2, 0))), bounds=(0, 0, 0, 1), - xdensity=1, vdims=['Count']) + xdensity=1, vdims=[Dimension('Count', nodata=0)]) self.assertEqual(agg, img) def test_aggregate_points_target(self): points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = aggregate(points, dynamic=False, target=expected) self.assertEqual(img, expected) def test_aggregate_points_sampling(self): points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = aggregate(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), x_sampling=0.5, y_sampling=0.5) self.assertEqual(img, expected) @@ -90,9 +108,9 @@ def test_aggregate_points_categorical(self): img = aggregate(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2, aggregator=ds.count_cat('z')) xs, ys = [0.25, 0.75], [0.25, 0.75] - expected = NdOverlay({'A': Image((xs, ys, [[1, 0], [0, 0]]), vdims='z Count'), - 'B': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count'), - 'C': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count')}, + expected = NdOverlay({'A': Image((xs, ys, [[1, 0], [0, 0]]), vdims=Dimension('z Count', nodata=0)), + 'B': Image((xs, ys, [[0, 0], [1, 0]]), vdims=Dimension('z Count', nodata=0)), + 'C': Image((xs, ys, [[0, 0], [1, 0]]), vdims=Dimension('z Count', nodata=0))}, kdims=['z']) self.assertEqual(img, expected) @@ -102,16 +120,16 @@ def test_aggregate_points_categorical_zero_range(self): aggregator=ds.count_cat('z'), height=2) xs, ys = [], [0.25, 0.75] params = dict(bounds=(0, 0, 0, 1), xdensity=1) - expected = NdOverlay({'A': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params), - 'B': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params), - 'C': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params)}, + expected = NdOverlay({'A': Image((xs, ys, np.zeros((2, 0))), vdims=Dimension('z Count', nodata=0), **params), + 'B': Image((xs, ys, np.zeros((2, 0))), vdims=Dimension('z Count', nodata=0), **params), + 'C': Image((xs, ys, np.zeros((2, 0))), vdims=Dimension('z Count', nodata=0), **params)}, kdims=['z']) self.assertEqual(img, expected) def test_aggregate_curve(self): curve = Curve([(0.2, 0.3), (0.4, 0.7), (0.8, 0.99)]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [1, 1]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = aggregate(curve, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -125,7 +143,7 @@ def test_aggregate_curve_datetimes(self): dates = [np.datetime64('2016-01-01T12:00:00.000000000'), np.datetime64('2016-01-02T12:00:00.000000000')] expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]), - datatype=['xarray'], bounds=bounds, vdims='Count') + datatype=['xarray'], bounds=bounds, vdims=Dimension('Count', nodata=0)) self.assertEqual(img, expected) def test_aggregate_curve_datetimes_dask(self): @@ -141,7 +159,8 @@ def test_aggregate_curve_datetimes_dask(self): dates = [np.datetime64('2019-01-01T04:09:45.000000000'), np.datetime64('2019-01-01T12:29:15.000000000')] expected = Image((dates, [166.5, 499.5, 832.5], [[332, 0], [167, 166], [0, 334]]), - ['index', 'a'], 'Count', datatype=['xarray'], bounds=bounds) + kdims=['index', 'a'], vdims=Dimension('Count', nodata=0), + datatype=['xarray'], bounds=bounds) self.assertEqual(img, expected) def test_aggregate_curve_datetimes_microsecond_timebase(self): @@ -155,7 +174,7 @@ def test_aggregate_curve_datetimes_microsecond_timebase(self): dates = [np.datetime64('2016-01-01T11:59:59.861759000',), np.datetime64('2016-01-02T12:00:00.138241000')] expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]), - datatype=['xarray'], bounds=bounds, vdims='Count') + datatype=['xarray'], bounds=bounds, vdims=Dimension('Count', nodata=0)) self.assertEqual(img, expected) def test_aggregate_ndoverlay_count_cat_datetimes_microsecond_timebase(self): @@ -172,9 +191,9 @@ def test_aggregate_ndoverlay_count_cat_datetimes_microsecond_timebase(self): dates = [np.datetime64('2016-01-01T11:59:59.861759000',), np.datetime64('2016-01-02T12:00:00.138241000')] expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]), - datatype=['xarray'], bounds=bounds, vdims='Count') + datatype=['xarray'], bounds=bounds, vdims=Dimension('Count', nodata=0)) expected2 = Image((dates, [1.5, 2.5], [[0, 1], [1, 1]]), - datatype=['xarray'], bounds=bounds, vdims='Count') + datatype=['xarray'], bounds=bounds, vdims=Dimension('Count', nodata=0)) self.assertEqual(imgs[0], expected) self.assertEqual(imgs[1], expected2) @@ -186,15 +205,16 @@ def test_aggregate_dt_xaxis_constant_yaxis(self): ys = np.array([]) bounds = (np.datetime64('1980-01-01T00:00:00.000000'), 1.0, np.datetime64('1980-01-01T01:39:00.000000'), 1.0) - expected = Image((xs, ys, np.empty((0, 3))), ['index', 'y'], 'Count', - xdensity=1, ydensity=1, bounds=bounds) + expected = Image((xs, ys, np.empty((0, 3))), ['index', 'y'], + vdims=Dimension('Count', nodata=0), xdensity=1, + ydensity=1, bounds=bounds) self.assertEqual(img, expected) def test_aggregate_ndoverlay(self): ds = Dataset([(0.2, 0.3, 0), (0.4, 0.7, 1), (0, 0.99, 2)], kdims=['x', 'y', 'z']) ndoverlay = ds.to(Points, ['x', 'y'], [], 'z').overlay() expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = aggregate(ndoverlay, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -202,7 +222,7 @@ def test_aggregate_ndoverlay(self): def test_aggregate_path(self): path = Path([[(0.2, 0.3), (0.4, 0.7)], [(0.4, 0.7), (0.8, 0.99)]]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 1]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = aggregate(path, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -215,12 +235,12 @@ def test_aggregate_contours_with_vdim(self): def test_aggregate_contours_without_vdim(self): contours = Contours([[(0.2, 0.3), (0.4, 0.7)], [(0.4, 0.7), (0.8, 0.99)]]) img = rasterize(contours, dynamic=False) - self.assertEqual(img.vdims, ['Count']) + self.assertEqual(img.vdims, [Dimension('Any', nodata=0)]) def test_aggregate_dframe_nan_path(self): path = Path([Path([[(0.2, 0.3), (0.4, 0.7)], [(0.4, 0.7), (0.8, 0.99)]]).dframe()]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 1]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = aggregate(path, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -228,14 +248,14 @@ def test_aggregate_dframe_nan_path(self): def test_spikes_aggregate_count(self): spikes = Spikes([1, 2, 3]) agg = rasterize(spikes, width=5, dynamic=False, expand=False) - expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims='count', + expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims=Dimension('Count', nodata=0), xdensity=2.5, ydensity=1, bounds=(1, 0, 3, 0.5)) self.assertEqual(agg, expected) def test_spikes_aggregate_count_dask(self): spikes = Spikes([1, 2, 3], datatype=['dask']) agg = rasterize(spikes, width=5, dynamic=False, expand=False) - expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims='count', + expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims=Dimension('Count', nodata=0), xdensity=2.5, ydensity=1, bounds=(1, 0, 3, 0.5)) self.assertEqual(agg, expected) @@ -244,7 +264,7 @@ def test_spikes_aggregate_dt_count(self): agg = rasterize(spikes, width=5, dynamic=False, expand=False) bounds = (np.datetime64('2016-01-01T00:00:00.000000'), 0, np.datetime64('2016-01-03T00:00:00.000000'), 0.5) - expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims='count', bounds=bounds) + expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims=Dimension('Count', nodata=0), bounds=bounds) self.assertEqual(agg, expected) def test_spikes_aggregate_dt_count_dask(self): @@ -253,13 +273,13 @@ def test_spikes_aggregate_dt_count_dask(self): agg = rasterize(spikes, width=5, dynamic=False, expand=False) bounds = (np.datetime64('2016-01-01T00:00:00.000000'), 0, np.datetime64('2016-01-03T00:00:00.000000'), 0.5) - expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims='count', bounds=bounds) + expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims=Dimension('Count', nodata=0), bounds=bounds) self.assertEqual(agg, expected) def test_spikes_aggregate_spike_length(self): spikes = Spikes([1, 2, 3]) agg = rasterize(spikes, width=5, dynamic=False, expand=False, spike_length=7) - expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims='count', + expected = Image(np.array([[1, 0, 1, 0, 1]]), vdims=Dimension('Count', nodata=0), xdensity=2.5, ydensity=1, bounds=(1, 0, 3, 7.0)) self.assertEqual(agg, expected) @@ -275,7 +295,7 @@ def test_spikes_aggregate_with_height_count(self): [0, 0, 1, 0, 0], [0, 0, 1, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_spikes_aggregate_with_height_count_override(self): @@ -289,7 +309,7 @@ def test_spikes_aggregate_with_height_count_override(self): [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_rasterize_regrid_and_spikes_overlay(self): @@ -307,7 +327,7 @@ def test_rasterize_regrid_and_spikes_overlay(self): [0, 0, 0, 0], [0, 0, 0, 0]]) expected_spikes = Image(([0.25, 0.75, 1.25, 1.75], - [0.25, 0.75, 1.25, 1.75], spikes_arr), vdims='count') + [0.25, 0.75, 1.25, 1.75], spikes_arr), vdims=Dimension('Count', nodata=0)) overlay = img * spikes agg = rasterize(overlay, width=4, height=4, x_range=(0, 2), y_range=(0, 2), spike_length=0.5, upsample=True, dynamic=False) @@ -327,7 +347,7 @@ def test_spikes_aggregate_with_height_count_dask(self): [0, 0, 1, 0, 0], [0, 0, 1, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_spikes_aggregate_with_negative_height_count(self): @@ -342,7 +362,7 @@ def test_spikes_aggregate_with_negative_height_count(self): [0, 0, 1, 0, 1], [1, 0, 1, 0, 1] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_spikes_aggregate_with_positive_and_negative_height_count(self): @@ -357,7 +377,7 @@ def test_spikes_aggregate_with_positive_and_negative_height_count(self): [0, 0, 1, 0, 0], [0, 0, 1, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_rectangles_aggregate_count(self): @@ -371,7 +391,7 @@ def test_rectangles_aggregate_count(self): [1, 2, 1, 1], [0, 0, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_rectangles_aggregate_count_cat(self): @@ -392,8 +412,8 @@ def test_rectangles_aggregate_count_cat(self): [0, 1, 1, 1], [0, 0, 0, 0] ]) - expected1 = Image((xs, ys, arr1), vdims='cat Count') - expected2 = Image((xs, ys, arr2), vdims='cat Count') + expected1 = Image((xs, ys, arr1), vdims=Dimension('cat Count', nodata=0)) + expected2 = Image((xs, ys, arr2), vdims=Dimension('cat Count', nodata=0)) expected = NdOverlay({'A': expected1, 'B': expected2}, kdims=['cat']) self.assertEqual(agg, expected) @@ -431,7 +451,7 @@ def test_rectangles_aggregate_dt_count(self): ]) bounds = (0.0, np.datetime64('2016-01-01T00:00:00'), 4.0, np.datetime64('2016-01-05T00:00:00')) - expected = Image((xs, ys, arr), bounds=bounds, vdims='count') + expected = Image((xs, ys, arr), bounds=bounds, vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_segments_aggregate_count(self): @@ -445,7 +465,7 @@ def test_segments_aggregate_count(self): [0, 1, 0, 0], [0, 1, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_segments_aggregate_sum(self, instance=False): @@ -492,7 +512,7 @@ def test_segments_aggregate_dt_count(self): ]) bounds = (0.0, np.datetime64('2016-01-01T00:00:00'), 4.0, np.datetime64('2016-01-05T00:00:00')) - expected = Image((xs, ys, arr), bounds=bounds, vdims='count') + expected = Image((xs, ys, arr), bounds=bounds, vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_area_aggregate_simple_count(self): @@ -506,7 +526,7 @@ def test_area_aggregate_simple_count(self): [0, 1, 1, 0], [0, 0, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_area_aggregate_negative_count(self): @@ -520,7 +540,7 @@ def test_area_aggregate_negative_count(self): [1, 1, 1, 1], [1, 1, 1, 1] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_area_aggregate_crossover_count(self): @@ -534,7 +554,7 @@ def test_area_aggregate_crossover_count(self): [1, 1, 1, 1], [0, 0, 1, 1] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_spread_aggregate_symmetric_count(self): @@ -548,7 +568,7 @@ def test_spread_aggregate_symmetric_count(self): [0, 1, 1, 0], [0, 0, 0, 1] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_spread_aggregate_assymmetric_count(self): @@ -563,7 +583,7 @@ def test_spread_aggregate_assymmetric_count(self): [0, 1, 1, 0], [0, 0, 1, 1] ]) - expected = Image((xs, ys, arr), vdims='count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) def test_rgb_regrid_packed(self): @@ -607,7 +627,7 @@ def test_line_rasterize(self): [1, 1, 1, 0], [1, 0, 1, 0] ]) - expected = Image((xs, ys, arr), vdims='Count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) @spatialpandas_skip @@ -623,7 +643,7 @@ def test_multi_line_rasterize(self): [1, 1, 1, 0], [1, 0, 1, 0] ]) - expected = Image((xs, ys, arr), vdims='Count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) @spatialpandas_skip @@ -638,7 +658,7 @@ def test_ring_rasterize(self): [0, 1, 1, 0], [0, 0, 1, 0] ]) - expected = Image((xs, ys, arr), vdims='Count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) @spatialpandas_skip @@ -658,7 +678,7 @@ def test_polygon_rasterize(self): [0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='Count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) @spatialpandas_skip @@ -692,7 +712,7 @@ def test_multi_poly_rasterize(self): [1, 1, 1, 0], [1, 1, 0, 0] ]) - expected = Image((xs, ys, arr), vdims='Count') + expected = Image((xs, ys, arr), vdims=Dimension('Count', nodata=0)) self.assertEqual(agg, expected) @@ -708,9 +728,9 @@ def test_aggregate_points_categorical(self): img = aggregate(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2, aggregator=ds.by('z', ds.count())) xs, ys = [0.25, 0.75], [0.25, 0.75] - expected = NdOverlay({'A': Image((xs, ys, [[1, 0], [0, 0]]), vdims='z Count'), - 'B': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count'), - 'C': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count')}, + expected = NdOverlay({'A': Image((xs, ys, [[1, 0], [0, 0]]), vdims=Dimension('z Count', nodata=0)), + 'B': Image((xs, ys, [[0, 0], [1, 0]]), vdims=Dimension('z Count', nodata=0)), + 'C': Image((xs, ys, [[0, 0], [1, 0]]), vdims=Dimension('z Count', nodata=0))}, kdims=['z']) self.assertEqual(img, expected) @@ -725,7 +745,7 @@ def test_aggregate_points_categorical_mean(self): 'C': Image((xs, ys, [[nan, nan], [0.3, nan]]), vdims='z')}, kdims=['cat']) self.assertEqual(img, expected) - + class DatashaderShadeTests(ComparisonTestCase): @@ -733,11 +753,11 @@ class DatashaderShadeTests(ComparisonTestCase): def test_shade_categorical_images_xarray(self): xs, ys = [0.25, 0.75], [0.25, 0.75] data = NdOverlay({'A': Image((xs, ys, np.array([[1, 0], [0, 0]], dtype='u4')), - datatype=['xarray'], vdims='z Count'), + datatype=['xarray'], vdims=Dimension('z Count', nodata=0)), 'B': Image((xs, ys, np.array([[0, 0], [1, 0]], dtype='u4')), - datatype=['xarray'], vdims='z Count'), + datatype=['xarray'], vdims=Dimension('z Count', nodata=0)), 'C': Image((xs, ys, np.array([[0, 0], [1, 0]], dtype='u4')), - datatype=['xarray'], vdims='z Count')}, + datatype=['xarray'], vdims=Dimension('z Count', nodata=0))}, kdims=['z']) shaded = shade(data) r = [[228, 120], [66, 120]] @@ -751,11 +771,11 @@ def test_shade_categorical_images_xarray(self): def test_shade_categorical_images_grid(self): xs, ys = [0.25, 0.75], [0.25, 0.75] data = NdOverlay({'A': Image((xs, ys, np.array([[1, 0], [0, 0]], dtype='u4')), - datatype=['grid'], vdims='z Count'), + datatype=['grid'], vdims=Dimension('z Count', nodata=0)), 'B': Image((xs, ys, np.array([[0, 0], [1, 0]], dtype='u4')), - datatype=['grid'], vdims='z Count'), + datatype=['grid'], vdims=Dimension('z Count', nodata=0)), 'C': Image((xs, ys, np.array([[0, 0], [1, 0]], dtype='u4')), - datatype=['grid'], vdims='z Count')}, + datatype=['grid'], vdims=Dimension('z Count', nodata=0))}, kdims=['z']) shaded = shade(data) r = [[228, 120], [66, 120]] @@ -869,64 +889,167 @@ def setUp(self): if ds_version <= '0.6.4': raise SkipTest('Regridding operations require datashader>=0.7.0') + self.simplexes = [(0, 1, 2), (3, 2, 1)] + self.vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] + self.simplexes_vdim = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] + self.vertices_vdim = [(0., 0., 1), (0., 1., 2), (1., 0, 3), (1, 1, 4)] + def test_rasterize_trimesh_no_vdims(self): - simplices = [(0, 1, 2), (3, 2, 1)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices)) + trimesh = TriMesh((self.simplexes, self.vertices)) img = rasterize(trimesh, width=3, height=3, dynamic=False) image = Image(np.array([[True, True, True], [True, True, True], [True, True, True]]), - bounds=(0, 0, 1, 1), vdims='Any') + bounds=(0, 0, 1, 1), vdims=Dimension('Any', nodata=0)) self.assertEqual(img, image) def test_rasterize_trimesh_no_vdims_zero_range(self): - simplices = [(0, 1, 2), (3, 2, 1)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices)) + trimesh = TriMesh((self.simplexes, self.vertices)) img = rasterize(trimesh, height=2, x_range=(0, 0), dynamic=False) image = Image(([], [0.25, 0.75], np.zeros((2, 0))), - bounds=(0, 0, 0, 1), xdensity=1, vdims='Any') + bounds=(0, 0, 0, 1), xdensity=1, vdims=Dimension('Any', nodata=0)) self.assertEqual(img, image) def test_rasterize_trimesh_with_vdims_as_wireframe(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices), vdims=['z']) + trimesh = TriMesh((self.simplexes_vdim, self.vertices), vdims=['z']) img = rasterize(trimesh, width=3, height=3, aggregator='any', interpolation=None, dynamic=False) - image = Image(np.array([[True, True, True], [True, True, True], [True, True, True]]), - bounds=(0, 0, 1, 1), vdims='Any') + array = np.array([ + [True, True, True], + [True, True, True], + [True, True, True] + ]) + image = Image(array, bounds=(0, 0, 1, 1), vdims=Dimension('Any', nodata=0)) self.assertEqual(img, image) def test_rasterize_trimesh(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices), vdims=['z']) + trimesh = TriMesh((self.simplexes_vdim, self.vertices), vdims=['z']) + img = rasterize(trimesh, width=3, height=3, dynamic=False) + array = np.array([ + [ 1.5, 1.5, np.NaN], + [ 0.5, 1.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) + self.assertEqual(img, image) + + def test_rasterize_pandas_trimesh_implicit_nodes(self): + simplex_df = pd.DataFrame(self.simplexes, columns=['v0', 'v1', 'v2']) + vertex_df = pd.DataFrame(self.vertices_vdim, columns=['x', 'y', 'z']) + + trimesh = TriMesh((simplex_df, vertex_df)) img = rasterize(trimesh, width=3, height=3, dynamic=False) - image = Image(np.array([[1.5, 1.5, np.NaN], [0.5, 1.5, np.NaN], [np.NaN, np.NaN, np.NaN]]), - bounds=(0, 0, 1, 1)) + + array = np.array([ + [ 2., 3., np.NaN], + [ 1.5, 2.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) + self.assertEqual(img, image) + + def test_rasterize_dask_trimesh_implicit_nodes(self): + simplex_df = pd.DataFrame(self.simplexes, columns=['v0', 'v1', 'v2']) + vertex_df = pd.DataFrame(self.vertices_vdim, columns=['x', 'y', 'z']) + + simplex_ddf = dd.from_pandas(simplex_df, npartitions=2) + vertex_ddf = dd.from_pandas(vertex_df, npartitions=2) + + trimesh = TriMesh((simplex_ddf, vertex_ddf)) + + ri = rasterize.instance() + img = ri(trimesh, width=3, height=3, dynamic=False, precompute=True) + + cache = ri._precomputed + self.assertEqual(len(cache), 1) + self.assertIn(trimesh._plot_id, cache) + self.assertIsInstance(cache[trimesh._plot_id]['mesh'], dd.DataFrame) + + array = np.array([ + [ 2., 3., np.NaN], + [ 1.5, 2.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) + self.assertEqual(img, image) + + def test_rasterize_dask_trimesh(self): + simplex_df = pd.DataFrame(self.simplexes_vdim, columns=['v0', 'v1', 'v2', 'z']) + vertex_df = pd.DataFrame(self.vertices, columns=['x', 'y']) + + simplex_ddf = dd.from_pandas(simplex_df, npartitions=2) + vertex_ddf = dd.from_pandas(vertex_df, npartitions=2) + + tri_nodes = Nodes(vertex_ddf, ['x', 'y', 'index']) + trimesh = TriMesh((simplex_ddf, tri_nodes), vdims=['z']) + + ri = rasterize.instance() + img = ri(trimesh, width=3, height=3, dynamic=False, precompute=True) + + cache = ri._precomputed + self.assertEqual(len(cache), 1) + self.assertIn(trimesh._plot_id, cache) + self.assertIsInstance(cache[trimesh._plot_id]['mesh'], dd.DataFrame) + + array = np.array([ + [ 1.5, 1.5, np.NaN], + [ 0.5, 1.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) + self.assertEqual(img, image) + + def test_rasterize_dask_trimesh_with_node_vdims(self): + simplex_df = pd.DataFrame(self.simplexes, columns=['v0', 'v1', 'v2']) + vertex_df = pd.DataFrame(self.vertices_vdim, columns=['x', 'y', 'z']) + + simplex_ddf = dd.from_pandas(simplex_df, npartitions=2) + vertex_ddf = dd.from_pandas(vertex_df, npartitions=2) + + tri_nodes = Nodes(vertex_ddf, ['x', 'y', 'index'], ['z']) + trimesh = TriMesh((simplex_ddf, tri_nodes)) + + ri = rasterize.instance() + img = ri(trimesh, width=3, height=3, dynamic=False, precompute=True) + + cache = ri._precomputed + self.assertEqual(len(cache), 1) + self.assertIn(trimesh._plot_id, cache) + self.assertIsInstance(cache[trimesh._plot_id]['mesh'], dd.DataFrame) + + array = np.array([ + [ 2., 3., np.NaN], + [ 1.5, 2.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) self.assertEqual(img, image) def test_rasterize_trimesh_node_vdim_precedence(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0., 1), (0., 1., 2), (1., 0, 3), (1, 1, 4)] - trimesh = TriMesh((simplices, Points(vertices, vdims=['node_z'])), vdims=['z']) + nodes = Points(self.vertices_vdim, vdims=['node_z']) + trimesh = TriMesh((self.simplexes_vdim, nodes), vdims=['z']) img = rasterize(trimesh, width=3, height=3, dynamic=False) - image = Image(np.array([[2., 3., np.NaN], [1.5, 2.5, np.NaN], [np.NaN, np.NaN, np.NaN]]), - bounds=(0, 0, 1, 1), vdims='node_z') + + array = np.array([ + [ 2., 3., np.NaN], + [ 1.5, 2.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1), vdims='node_z') self.assertEqual(img, image) def test_rasterize_trimesh_node_explicit_vdim(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0., 1), (0., 1., 2), (1., 0, 3), (1, 1, 4)] - trimesh = TriMesh((simplices, Points(vertices, vdims=['node_z'])), vdims=['z']) + nodes = Points(self.vertices_vdim, vdims=['node_z']) + trimesh = TriMesh((self.simplexes_vdim, nodes), vdims=['z']) img = rasterize(trimesh, width=3, height=3, dynamic=False, aggregator=ds.mean('z')) - image = Image(np.array([[1.5, 1.5, np.NaN], [0.5, 1.5, np.NaN], [np.NaN, np.NaN, np.NaN]]), - bounds=(0, 0, 1, 1)) + + array = np.array([ + [ 1.5, 1.5, np.NaN], + [ 0.5, 1.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) self.assertEqual(img, image) def test_rasterize_trimesh_zero_range(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices), vdims=['z']) + trimesh = TriMesh((self.simplexes_vdim, self.vertices), vdims=['z']) img = rasterize(trimesh, x_range=(0, 0), height=2, dynamic=False) image = Image(([], [0.25, 0.75], np.zeros((2, 0))), bounds=(0, 0, 0, 1), xdensity=1) @@ -942,21 +1065,25 @@ def test_rasterize_trimesh_vertex_vdims(self): self.assertEqual(img, image) def test_rasterize_trimesh_ds_aggregator(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices), vdims=['z']) + trimesh = TriMesh((self.simplexes_vdim, self.vertices), vdims=['z']) img = rasterize(trimesh, width=3, height=3, dynamic=False, aggregator=ds.mean('z')) - image = Image(np.array([[1.5, 1.5, np.NaN], [0.5, 1.5, np.NaN], [np.NaN, np.NaN, np.NaN]]), - bounds=(0, 0, 1, 1)) + array = np.array([ + [ 1.5, 1.5, np.NaN], + [ 0.5, 1.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) self.assertEqual(img, image) def test_rasterize_trimesh_string_aggregator(self): - simplices = [(0, 1, 2, 0.5), (3, 2, 1, 1.5)] - vertices = [(0., 0.), (0., 1.), (1., 0), (1, 1)] - trimesh = TriMesh((simplices, vertices), vdims=['z']) + trimesh = TriMesh((self.simplexes_vdim, self.vertices), vdims=['z']) img = rasterize(trimesh, width=3, height=3, dynamic=False, aggregator='mean') - image = Image(np.array([[1.5, 1.5, np.NaN], [0.5, 1.5, np.NaN], [np.NaN, np.NaN, np.NaN]]), - bounds=(0, 0, 1, 1)) + array = np.array([ + [ 1.5, 1.5, np.NaN], + [ 0.5, 1.5, np.NaN], + [np.NaN, np.NaN, np.NaN] + ]) + image = Image(array, bounds=(0, 0, 1, 1)) self.assertEqual(img, image) def test_rasterize_quadmesh(self): @@ -978,13 +1105,13 @@ def test_rasterize_points(self): img = rasterize(points, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) self.assertEqual(img, expected) def test_rasterize_curve(self): curve = Curve([(0.2, 0.3), (0.4, 0.7), (0.8, 0.99)]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [1, 1]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = rasterize(curve, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -993,7 +1120,7 @@ def test_rasterize_ndoverlay(self): ds = Dataset([(0.2, 0.3, 0), (0.4, 0.7, 1), (0, 0.99, 2)], kdims=['x', 'y', 'z']) ndoverlay = ds.to(Points, ['x', 'y'], [], 'z').overlay() expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = rasterize(ndoverlay, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -1001,7 +1128,7 @@ def test_rasterize_ndoverlay(self): def test_rasterize_path(self): path = Path([[(0.2, 0.3), (0.4, 0.7)], [(0.4, 0.7), (0.8, 0.99)]]) expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 1]]), - vdims=['Count']) + vdims=[Dimension('Count', nodata=0)]) img = rasterize(path, dynamic=False, x_range=(0, 1), y_range=(0, 1), width=2, height=2) self.assertEqual(img, expected) @@ -1037,9 +1164,9 @@ def test_spread_img_1px(self): raise SkipTest('Datashader does not support DataArray yet') arr = np.array([[0, 0, 0], [0, 0, 0], [1, 1, 1]]).T spreaded = spread(Image(arr)) - arr = np.array([[0, 0, 0], [1, 1, 1], [1, 1, 1]]).T + arr = np.array([[0, 0, 0], [2, 3, 2], [2, 3, 2]]).T self.assertEqual(spreaded, Image(arr)) - + class DatashaderStackTests(ComparisonTestCase): @@ -1089,3 +1216,109 @@ def setUp(self): def test_directly_connect_paths(self): direct = directly_connect_edges(self.graph)._split_edgepaths self.assertEqual(direct, self.graph.edgepaths) + +class InspectorTests(ComparisonTestCase): + """ + Tests for inspector operations + """ + def setUp(self): + points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)]) + self.pntsimg = rasterize(points, dynamic=False, + x_range=(0, 1), y_range=(0, 1), width=4, height=4) + if spatialpandas is None: + return + + xs1 = [1, 2, 3]; xs2 = [6, 7, 3];ys1 = [2, 0, 7]; ys2 = [7, 5, 2] + holes = [ [[(1.5, 2), (2, 3), (1.6, 1.6)], [(2.1, 4.5), (2.5, 5), (2.3, 3.5)]],] + polydata = [{'x': xs1, 'y': ys1, 'holes': holes, 'z': 1}, + {'x': xs2, 'y': ys2, 'holes': [[]], 'z': 2}] + self.polysrgb = datashade(Polygons(polydata, vdims=['z'], + datatype=['spatialpandas']), + x_range=(0, 7), y_range=(0, 7), dynamic=False) + + def tearDown(self): + Tap.x, Tap.y = None, None + + + def test_inspect_points_or_polygons(self): + if spatialpandas is None: + raise SkipTest('Polygon inspect tests require spatialpandas') + polys = inspect(self.polysrgb, + max_indicators=3, dynamic=False, pixels=1, x=6, y=5) + self.assertEqual(polys, Polygons([{'x': [6, 3, 7], 'y': [7, 2, 5], 'z': 2}], vdims='z')) + points = inspect(self.pntsimg, max_indicators=3, dynamic=False, pixels=1, x=-0.1, y=-0.1) + self.assertEqual(points.dimension_values('x'), np.array([])) + self.assertEqual(points.dimension_values('y'), np.array([])) + + def test_points_inspection_1px_mask(self): + points = inspect_points(self.pntsimg, max_indicators=3, dynamic=False, pixels=1, x=-0.1, y=-0.1) + self.assertEqual(points.dimension_values('x'), np.array([])) + self.assertEqual(points.dimension_values('y'), np.array([])) + + def test_points_inspection_2px_mask(self): + points = inspect_points(self.pntsimg, max_indicators=3, dynamic=False, pixels=2, x=-0.1, y=-0.1) + self.assertEqual(points.dimension_values('x'), np.array([0.2])) + self.assertEqual(points.dimension_values('y'), np.array([0.3])) + + def test_points_inspection_4px_mask(self): + points = inspect_points(self.pntsimg, max_indicators=3, dynamic=False, pixels=4, x=-0.1, y=-0.1) + self.assertEqual(points.dimension_values('x'), np.array([0.2, 0.4])) + self.assertEqual(points.dimension_values('y'), np.array([0.3, 0.7])) + + def test_points_inspection_5px_mask(self): + points = inspect_points(self.pntsimg, max_indicators=3, dynamic=False, pixels=5, x=-0.1, y=-0.1) + self.assertEqual(points.dimension_values('x'), np.array([0.2, 0.4, 0])) + self.assertEqual(points.dimension_values('y'), np.array([0.3, 0.7, 0.99])) + + def test_inspection_5px_mask_points_df(self): + inspector = inspect.instance(max_indicators=3, dynamic=False, pixels=5, + x=-0.1, y=-0.1) + inspector(self.pntsimg) + self.assertEqual(list(inspector.hits['x']),[0.2,0.4,0.0]) + self.assertEqual(list(inspector.hits['y']),[0.3,0.7,0.99]) + + def test_points_inspection_dict_streams(self): + Tap.x, Tap.y = 0.4, 0.7 + points = inspect_points(self.pntsimg, max_indicators=3, dynamic=True, + pixels=1, streams=dict(x=Tap.param.x, y=Tap.param.y)) + self.assertEqual(len(points.streams), 1) + self.assertEqual(isinstance(points.streams[0], Tap), True) + self.assertEqual(points.streams[0].x, 0.4) + self.assertEqual(points.streams[0].y, 0.7) + + def test_points_inspection_dict_streams_instance(self): + Tap.x, Tap.y = 0.2, 0.3 + inspector = inspect_points.instance(max_indicators=3, dynamic=True, pixels=1, + streams=dict(x=Tap.param.x, y=Tap.param.y)) + points = inspector(self.pntsimg) + self.assertEqual(len(points.streams), 1) + self.assertEqual(isinstance(points.streams[0], Tap), True) + self.assertEqual(points.streams[0].x, 0.2) + self.assertEqual(points.streams[0].y, 0.3) + + def test_polys_inspection_1px_mask_hit(self): + if spatialpandas is None: + raise SkipTest('Polygon inspect tests require spatialpandas') + polys = inspect_polygons(self.polysrgb, + max_indicators=3, dynamic=False, pixels=1, x=6, y=5) + self.assertEqual(polys, Polygons([{'x': [6, 3, 7], 'y': [7, 2, 5], 'z': 2}], + vdims='z')) + + + def test_inspection_1px_mask_poly_df(self): + if spatialpandas is None: + raise SkipTest('Polygon inspect tests require spatialpandas') + inspector = inspect.instance(max_indicators=3, dynamic=False, pixels=1, x=6, y=5) + inspector(self.polysrgb) + self.assertEqual(len(inspector.hits), 1) + data = [[6.0, 7.0, 3.0, 2.0, 7.0, 5.0, 6.0, 7.0]] + self.assertEqual(inspector.hits.iloc[0].geometry, + spatialpandas.geometry.polygon.Polygon(data)) + + + def test_polys_inspection_1px_mask_miss(self): + if spatialpandas is None: + raise SkipTest('Polygon inspect tests require spatialpandas') + polys = inspect_polygons(self.polysrgb, + max_indicators=3, dynamic=False, pixels=1, x=0, y=0) + self.assertEqual(polys, Polygons([], vdims='z')) diff --git a/holoviews/tests/operation/testoperation.py b/holoviews/tests/operation/test_operation.py similarity index 96% rename from holoviews/tests/operation/testoperation.py rename to holoviews/tests/operation/test_operation.py index 09cdfcdf2a..0c344a2888 100644 --- a/holoviews/tests/operation/testoperation.py +++ b/holoviews/tests/operation/test_operation.py @@ -141,7 +141,7 @@ def test_image_contours_filled(self): def test_points_histogram(self): points = Points([float(i) for i in range(10)]) - op_hist = histogram(points, num_bins=3) + op_hist = histogram(points, num_bins=3, normed=True) hist = Histogram(([0, 3, 6, 9], [0.1, 0.1, 0.133333]), vdims=('x_frequency', 'Frequency')) @@ -151,7 +151,7 @@ def test_dataset_histogram_empty_explicit_bins(self): ds = Dataset([np.nan, np.nan], ['x']) op_hist = histogram(ds, bins=[0, 1, 2]) - hist = Histogram(([0, 1, 2], [0, 0]), vdims=('x_frequency', 'Frequency')) + hist = Histogram(([0, 1, 2], [0, 0]), vdims=('x_count', 'Count')) self.assertEqual(op_hist, hist) @da_skip @@ -159,7 +159,7 @@ def test_dataset_histogram_dask(self): import dask.array as da ds = Dataset((da.from_array(np.array(range(10), dtype='f'), chunks=(3)),), ['x'], datatype=['dask']) - op_hist = histogram(ds, num_bins=3) + op_hist = histogram(ds, num_bins=3, normed=True) hist = Histogram(([0, 3, 6, 9], [0.1, 0.1, 0.133333]), vdims=('x_frequency', 'Frequency')) @@ -171,7 +171,7 @@ def test_dataset_cumulative_histogram_dask(self): import dask.array as da ds = Dataset((da.from_array(np.array(range(10), dtype='f'), chunks=(3)),), ['x'], datatype=['dask']) - op_hist = histogram(ds, num_bins=3, cumulative=True) + op_hist = histogram(ds, num_bins=3, cumulative=True, normed=True) hist = Histogram(([0, 3, 6, 9], [0.3, 0.6, 1]), vdims=('x_frequency', 'Frequency')) @@ -184,7 +184,7 @@ def test_dataset_weighted_histogram_dask(self): ds = Dataset((da.from_array(np.array(range(10), dtype='f'), chunks=3), da.from_array([i/10. for i in range(10)], chunks=3)), ['x', 'y'], datatype=['dask']) - op_hist = histogram(ds, weight_dimension='y', num_bins=3) + op_hist = histogram(ds, weight_dimension='y', num_bins=3, normed=True) hist = Histogram(([0, 3, 6, 9], [0.022222, 0.088889, 0.222222]), vdims='y') @@ -193,7 +193,7 @@ def test_dataset_weighted_histogram_dask(self): def test_points_histogram_bin_range(self): points = Points([float(i) for i in range(10)]) - op_hist = histogram(points, num_bins=3, bin_range=(0, 3)) + op_hist = histogram(points, num_bins=3, bin_range=(0, 3), normed=True) hist = Histogram(([0.25, 0.25, 0.5], [0., 1., 2., 3.]), vdims=('x_frequency', 'Frequency')) @@ -226,7 +226,7 @@ def test_points_histogram_not_normed(self): def test_histogram_operation_datetime(self): dates = np.array([dt.datetime(2017, 1, i) for i in range(1, 5)]) - op_hist = histogram(Dataset(dates, 'Date'), num_bins=4) + op_hist = histogram(Dataset(dates, 'Date'), num_bins=4, normed=True) hist_data = { 'Date': np.array([ '2017-01-01T00:00:00.000000', '2017-01-01T18:00:00.000000', @@ -241,7 +241,7 @@ def test_histogram_operation_datetime(self): def test_histogram_operation_datetime64(self): dates = np.array([dt.datetime(2017, 1, i) for i in range(1, 5)]).astype('M') - op_hist = histogram(Dataset(dates, 'Date'), num_bins=4) + op_hist = histogram(Dataset(dates, 'Date'), num_bins=4, normed=True) hist_data = { 'Date': np.array([ '2017-01-01T00:00:00.000000', '2017-01-01T18:00:00.000000', @@ -257,7 +257,7 @@ def test_histogram_operation_datetime64(self): @pd_skip def test_histogram_operation_pd_period(self): dates = pd.date_range('2017-01-01', '2017-01-04', freq='D').to_period('D') - op_hist = histogram(Dataset(dates, 'Date'), num_bins=4) + op_hist = histogram(Dataset(dates, 'Date'), num_bins=4, normed=True) hist_data = { 'Date': np.array([ '2017-01-01T00:00:00.000000', '2017-01-01T18:00:00.000000', @@ -272,13 +272,14 @@ def test_histogram_operation_pd_period(self): def test_points_histogram_weighted(self): points = Points([float(i) for i in range(10)]) - op_hist = histogram(points, num_bins=3, weight_dimension='y') + op_hist = histogram(points, num_bins=3, weight_dimension='y', normed=True) hist = Histogram(([0.022222, 0.088889, 0.222222], [0, 3, 6, 9]), vdims=['y']) self.assertEqual(op_hist, hist) def test_points_histogram_mean_weighted(self): points = Points([float(i) for i in range(10)]) - op_hist = histogram(points, num_bins=3, weight_dimension='y', mean_weighted=True) + op_hist = histogram(points, num_bins=3, weight_dimension='y', + mean_weighted=True, normed=True) hist = Histogram(([1., 4., 7.5], [0, 3, 6, 9]), vdims=['y']) self.assertEqual(op_hist, hist) diff --git a/holoviews/tests/operation/teststatsoperations.py b/holoviews/tests/operation/test_statsoperations.py similarity index 100% rename from holoviews/tests/operation/teststatsoperations.py rename to holoviews/tests/operation/test_statsoperations.py diff --git a/holoviews/tests/operation/testtimeseriesoperations.py b/holoviews/tests/operation/test_timeseriesoperations.py similarity index 100% rename from holoviews/tests/operation/testtimeseriesoperations.py rename to holoviews/tests/operation/test_timeseriesoperations.py diff --git a/holoviews/tests/plotting/bokeh/testannotationplot.py b/holoviews/tests/plotting/bokeh/test_annotationplot.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testannotationplot.py rename to holoviews/tests/plotting/bokeh/test_annotationplot.py index 7654036e0c..615bd42f78 100644 --- a/holoviews/tests/plotting/bokeh/testannotationplot.py +++ b/holoviews/tests/plotting/bokeh/test_annotationplot.py @@ -4,7 +4,7 @@ HLine, VLine, Text, Labels, Arrow, HSpan, VSpan, Slope ) -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestHVLinePlot(TestBokehPlot): @@ -39,7 +39,7 @@ def test_vline_plot(self): class TestHVSpanPlot(TestBokehPlot): - + def test_hspan_invert_axes(self): hspan = HSpan(1.1, 1.5).opts(invert_axes=True) plot = bokeh_renderer.get_plot(hspan) @@ -94,7 +94,7 @@ def test_slope_invert_axes(self): slope = plot.handles['glyph'] self.assertEqual(slope.gradient, 0.5) self.assertEqual(slope.y_intercept, -5) - + class TestTextPlot(TestBokehPlot): @@ -131,7 +131,7 @@ def _compare_arrow_plot(self, plot, start, end): arrow_glyph = plot.handles['arrow_1_glyph'] arrow_cds = plot.handles['arrow_1_source'] label_glyph = plot.handles['text_1_glyph'] - + label_cds = plot.handles['text_1_source'] x0, y0 = start x1, y1 = end diff --git a/holoviews/tests/plotting/bokeh/testareaplot.py b/holoviews/tests/plotting/bokeh/test_areaplot.py similarity index 89% rename from holoviews/tests/plotting/bokeh/testareaplot.py rename to holoviews/tests/plotting/bokeh/test_areaplot.py index 02fc69c0b9..2cb7ffb7d2 100644 --- a/holoviews/tests/plotting/bokeh/testareaplot.py +++ b/holoviews/tests/plotting/bokeh/test_areaplot.py @@ -5,7 +5,7 @@ from holoviews.element import Area from ...utils import LoggingComparisonTestCase -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestAreaPlot(LoggingComparisonTestCase, TestBokehPlot): @@ -92,7 +92,7 @@ def test_area_padding_mixed(self): self.assertEqual(x_range.end, 3.2) self.assertEqual(y_range.start, -2.5) self.assertEqual(y_range.end, 3.5) - + def test_area_padding_hard_range(self): area = Area([(1, 1), (2, 2), (3, 3)]).redim.range(y=(0, 4)).options(padding=0.1) plot = bokeh_renderer.get_plot(area) @@ -128,7 +128,7 @@ def test_area_padding_logx(self): self.assertEqual(x_range.end, 3.3483695221017129) self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 3.2) - + def test_area_padding_logy(self): area = Area([(1, 1), (2, 2), (3, 3)]).options(padding=0.1, logy=True) plot = bokeh_renderer.get_plot(area) @@ -138,3 +138,17 @@ def test_area_padding_logy(self): self.assertEqual(y_range.start, 0.033483695221017122) self.assertEqual(y_range.end, 3.3483695221017129) self.log_handler.assertContains('WARNING', 'Logarithmic axis range encountered value less than') + + def test_area_legend(self): + python = np.array([2, 3, 7, 5, 26, 221, 44, 233, 254, 265, 266, 267, 120, 111]) + pypy = np.array([12, 33, 47, 15, 126, 121, 144, 233, 254, 225, 226, 267, 110, 130]) + jython = np.array([22, 43, 10, 25, 26, 101, 114, 203, 194, 215, 201, 227, 139, 160]) + + dims = dict(kdims="time", vdims="memory") + python = Area(python, label="python", **dims) + pypy = Area(pypy, label="pypy", **dims) + jython = Area(jython, label="jython", **dims) + + overlay = Area.stack(python * pypy * jython) + labels = [n[1] for n in overlay.data] + self.assertEqual(labels, ['Python', 'Pypy', 'Jython']) diff --git a/holoviews/tests/plotting/bokeh/testbarplot.py b/holoviews/tests/plotting/bokeh/test_barplot.py similarity index 94% rename from holoviews/tests/plotting/bokeh/testbarplot.py rename to holoviews/tests/plotting/bokeh/test_barplot.py index bc3466946e..2174c17df0 100644 --- a/holoviews/tests/plotting/bokeh/testbarplot.py +++ b/holoviews/tests/plotting/bokeh/test_barplot.py @@ -5,8 +5,7 @@ from bokeh.models import CategoricalColorMapper, LinearColorMapper -from ..utils import ParamLogStream -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestBarPlot(TestBokehPlot): @@ -29,7 +28,7 @@ def test_bars_suppress_legend(self): self.assertEqual(len(fig.legend), 0) def test_empty_bars(self): - bars = Bars([], kdims=['x', 'y'], vdims=['z']).opts(plot=dict(group_index=1)) + bars = Bars([], kdims=['x', 'y'], vdims=['z']) plot = bokeh_renderer.get_plot(bars) plot.initialize_plot() source = plot.handles['source'] @@ -43,10 +42,10 @@ def test_bars_grouped_categories(self): source = plot.handles['source'] self.assertEqual([tuple(x) for x in source.data['xoffsets']], [('A', '0'), ('B', '0'), ('A', '1')]) - self.assertEqual(list(source.data['Category']), ['0', '0', '1']) - self.assertEqual(source.data['Value'], np.array([1, 2, -1])) x_range = plot.handles['x_range'] self.assertEqual(x_range.factors, [('A', '0'), ('A', '1'), ('B', '0'), ('B', '1')]) + self.assertEqual(list(source.data['Category']), ['0', '0', '1']) + self.assertEqual(source.data['Value'], np.array([1, 2, -1])) def test_bars_multi_level_sorted(self): box= Bars((['A', 'B']*15, [3, 10, 1]*10, np.random.randn(30)), @@ -106,7 +105,7 @@ def test_bars_ylim(self): y_range = plot.handles['y_range'] self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 200) - + def test_bars_padding_square(self): points = Bars([(1, 2), (2, -1), (3, 3)]).options(padding=0.1) plot = bokeh_renderer.get_plot(points) @@ -254,13 +253,3 @@ def test_op_ndoverlay_value(self): plot = bokeh_renderer.get_plot(overlay) for subplot, color in zip(plot.subplots.values(), colors): self.assertEqual(subplot.handles['glyph'].fill_color, color) - - def test_bars_color_index_color_clash(self): - bars = Bars([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims=['y', 'color']).options(color='color', color_index='color') - with ParamLogStream() as log: - bokeh_renderer.get_plot(bars) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/bokeh/testboxwhiskerplot.py b/holoviews/tests/plotting/bokeh/test_boxwhiskerplot.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testboxwhiskerplot.py rename to holoviews/tests/plotting/bokeh/test_boxwhiskerplot.py index 2cb8fb7712..464810df9a 100644 --- a/holoviews/tests/plotting/bokeh/testboxwhiskerplot.py +++ b/holoviews/tests/plotting/bokeh/test_boxwhiskerplot.py @@ -4,7 +4,7 @@ from holoviews.element import BoxWhisker -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import ColumnDataSource, CategoricalColorMapper, LinearColorMapper diff --git a/holoviews/tests/plotting/bokeh/testcallbacks.py b/holoviews/tests/plotting/bokeh/test_callbacks.py similarity index 79% rename from holoviews/tests/plotting/bokeh/testcallbacks.py rename to holoviews/tests/plotting/bokeh/test_callbacks.py index b94064d439..45aeac349e 100644 --- a/holoviews/tests/plotting/bokeh/testcallbacks.py +++ b/holoviews/tests/plotting/bokeh/test_callbacks.py @@ -1,17 +1,20 @@ import datetime as dt + from collections import deque, namedtuple +from unittest import SkipTest import numpy as np +import pyviz_comms as comms -from holoviews.core import DynamicMap, NdOverlay +from holoviews.core import DynamicMap from holoviews.core.options import Store from holoviews.core.util import pd from holoviews.element import Points, Polygons, Box, Curve, Table, Rectangles from holoviews.element.comparison import ComparisonTestCase -from holoviews.streams import (PointDraw, PolyDraw, PolyEdit, BoxEdit, - PointerXY, PointerX, PlotReset, Selection1D, - RangeXY, PlotSize, CDSStream, SingleTap) -import pyviz_comms as comms +from holoviews.streams import ( + PointDraw, PolyDraw, PolyEdit, BoxEdit, PointerXY, PointerX, + PlotReset, Selection1D, RangeXY, PlotSize, CDSStream, SingleTap +) try: from bokeh.events import Tap @@ -19,7 +22,7 @@ from bokeh.models import Range1d, Plot, ColumnDataSource, Selection, PolyEditTool from holoviews.plotting.bokeh.callbacks import ( Callback, PointDrawCallback, PolyDrawCallback, PolyEditCallback, - BoxEditCallback, Selection1DCallback, PointerXCallback, TapCallback + BoxEditCallback, PointerXCallback, TapCallback ) from holoviews.plotting.bokeh.renderer import BokehRenderer bokeh_server_renderer = BokehRenderer.instance(mode='server') @@ -191,20 +194,12 @@ def test_point_draw_callback_initialized_server(self): self.assertEqual(plot.handles['source']._callbacks, {'data': [plot.callbacks[0].on_change]}) - def test_point_draw_callback_initialized_js(self): - points = Points([(0, 1)]) - PointDraw(source=points) - plot = bokeh_renderer.get_plot(points) - cb = plot.callbacks[0].callbacks[0] - self.assertEqual(plot.handles['source'].js_property_callbacks, - {'change:data': [cb], 'patching': [cb]}) - def test_point_draw_callback_with_vdims_initialization(self): points = Points([(0, 1, 'A')], vdims=['A']) stream = PointDraw(source=points) bokeh_server_renderer.get_plot(points) self.assertEqual(stream.element.dimension_values('A'), np.array(['A'])) - + def test_point_draw_callback_with_vdims(self): points = Points([(0, 1, 'A')], vdims=['A']) point_draw = PointDraw(source=points) @@ -234,14 +229,6 @@ def test_poly_draw_callback_initialized_server(self): self.assertEqual(plot.handles['source']._callbacks, {'data': [plot.callbacks[0].on_change]}) - def test_poly_draw_callback_initialized_js(self): - polys = Polygons([[(0, 0), (2, 2), (4, 0)]]) - PolyDraw(source=polys) - plot = bokeh_renderer.get_plot(polys) - cb = plot.callbacks[0].callbacks[0] - self.assertEqual(plot.handles['source'].js_property_callbacks, - {'change:data': [cb], 'patching': [cb]}) - def test_poly_draw_callback_with_vdims(self): polys = Polygons([{'x': [0, 2, 4], 'y': [0, 2, 0], 'A': 1}], vdims=['A']) poly_draw = PolyDraw(source=polys) @@ -254,18 +241,6 @@ def test_poly_draw_callback_with_vdims(self): {'x': [3, 4, 5], 'y': [3, 4, 5], 'A': 2}], vdims=['A']) self.assertEqual(poly_draw.element, element) - def test_poly_draw_callback_with_vdims_no_color_index(self): - polys = Polygons([{'x': [0, 2, 4], 'y': [0, 2, 0], 'A': 1}], vdims=['A']).options(color_index=None) - poly_draw = PolyDraw(source=polys) - plot = bokeh_server_renderer.get_plot(polys) - self.assertIsInstance(plot.callbacks[0], PolyDrawCallback) - callback = plot.callbacks[0] - data = {'x': [[1, 2, 3], [3, 4, 5]], 'y': [[1, 2, 3], [3, 4, 5]], 'A': [1, 2]} - callback.on_msg({'data': data}) - element = Polygons([{'x': [1, 2, 3], 'y': [1, 2, 3], 'A': 1}, - {'x': [3, 4, 5], 'y': [3, 4, 5], 'A': 2}], vdims=['A']) - self.assertEqual(poly_draw.element, element) - def test_box_edit_callback(self): boxes = Rectangles([(-0.5, -0.5, 0.5, 0.5)]) box_edit = BoxEdit(source=boxes) @@ -305,14 +280,6 @@ def test_box_edit_callback_initialized_server(self): self.assertEqual(plot.handles['cds']._callbacks, {'data': [plot.callbacks[0].on_change]}) - def test_box_edit_callback_initialized_js(self): - boxes = Polygons([Box(0, 0, 1)]) - BoxEdit(source=boxes) - plot = bokeh_renderer.get_plot(boxes) - cb = plot.callbacks[0].callbacks[0] - self.assertEqual(plot.handles['cds'].js_property_callbacks, - {'change:data': [cb], 'patching': [cb]}) - def test_poly_edit_callback(self): polys = Polygons([[(0, 0), (2, 2), (4, 0)]]) poly_edit = PolyEdit(source=polys) @@ -331,14 +298,6 @@ def test_poly_edit_callback_initialized_server(self): self.assertEqual(plot.handles['source']._callbacks, {'data': [plot.callbacks[0].on_change]}) - def test_poly_edit_callback_initialized_js(self): - polys = Polygons([[(0, 0), (2, 2), (4, 0)]]) - PolyEdit(source=polys) - plot = bokeh_renderer.get_plot(polys) - cb = plot.callbacks[0].callbacks[0] - self.assertEqual(plot.handles['source'].js_property_callbacks, - {'change:data': [cb], 'patching': [cb]}) - def test_poly_edit_shared_callback(self): polys = Polygons([[(0, 0), (2, 2), (4, 0)]]) polys2 = Polygons([[(0, 0), (2, 2), (4, 0)]]) @@ -370,9 +329,6 @@ def test_point_draw_shared_datasource_callback(self): point_plot = plot.subplots[(0, 0)].subplots['main'] table_plot = plot.subplots[(0, 1)].subplots['main'] self.assertIs(point_plot.handles['source'], table_plot.handles['source']) - self.assertIn(plot.id, point_plot.callbacks[0].callbacks[0].code) - self.assertNotIn('PLACEHOLDER_PLOT_ID', point_plot.callbacks[0].callbacks[0].code) - class TestServerCallbacks(CallbackTestCase): @@ -475,6 +431,7 @@ def test_rangexy_datetime(self): self.assertEqual(stream.y_range, (0.2, 0.8)) def test_rangexy_framewise_reset(self): + raise SkipTest('The fix for this was reverted, see #4396') stream = RangeXY(x_range=(0, 2), y_range=(0, 1)) curve = DynamicMap(lambda z, x_range, y_range: Curve([1, 2, z]), kdims=['z'], streams=[stream]).redim.range(z=(0, 3)) @@ -490,55 +447,3 @@ def test_rangexy_framewise_not_reset_if_triggering(self): )) stream.event(x_range=(0, 3)) self.assertEqual(stream.x_range, (0, 3)) - - - -class TestBokehCustomJSCallbacks(CallbackTestCase): - - def test_customjs_callback_attributes_js_for_model(self): - js_code = Callback.attributes_js({'x0': 'x_range.attributes.start', - 'x1': 'x_range.attributes.end'}) - - code = ( - 'if ((x_range != undefined)) { data["x0"] = {id: x_range["id"], value: ' - 'x_range["attributes"]["start"]};\n }' - 'if ((x_range != undefined)) { data["x1"] = {id: x_range["id"], value: ' - 'x_range["attributes"]["end"]};\n }' - ) - self.assertEqual(js_code, code) - - def test_customjs_callback_attributes_js_for_cb_obj(self): - js_code = Callback.attributes_js({'x': 'cb_obj.x', - 'y': 'cb_obj.y'}) - code = 'data["x"] = cb_obj["x"];\ndata["y"] = cb_obj["y"];\n' - self.assertEqual(js_code, code) - - def test_customjs_callback_attributes_js_for_cb_data(self): - js_code = Callback.attributes_js({'x0': 'cb_data.geometry.x0', - 'x1': 'cb_data.geometry.x1', - 'y0': 'cb_data.geometry.y0', - 'y1': 'cb_data.geometry.y1'}) - code = ('data["x0"] = cb_data["geometry"]["x0"];\n' - 'data["x1"] = cb_data["geometry"]["x1"];\n' - 'data["y0"] = cb_data["geometry"]["y0"];\n' - 'data["y1"] = cb_data["geometry"]["y1"];\n') - self.assertEqual(js_code, code) - - def test_callback_on_ndoverlay_is_attached(self): - ndoverlay = NdOverlay({i: Curve([i]) for i in range(5)}) - selection = Selection1D(source=ndoverlay) - plot = bokeh_renderer.get_plot(ndoverlay) - self.assertEqual(len(plot.callbacks), 1) - self.assertIsInstance(plot.callbacks[0], Selection1DCallback) - self.assertIn(selection, plot.callbacks[0].streams) - - def test_callback_on_table_is_attached(self): - table = Table([1, 2, 3], 'x') - selection = Selection1D(source=table) - plot = bokeh_renderer.get_plot(table) - self.assertEqual(len(plot.callbacks), 1) - self.assertIsInstance(plot.callbacks[0], Selection1DCallback) - self.assertIn(selection, plot.callbacks[0].streams) - callbacks = plot.handles['selected'].js_property_callbacks - self.assertIn('change:indices', callbacks) - self.assertIn(plot.id, callbacks['change:indices'][0].code) diff --git a/holoviews/tests/plotting/bokeh/testcurveplot.py b/holoviews/tests/plotting/bokeh/test_curveplot.py similarity index 99% rename from holoviews/tests/plotting/bokeh/testcurveplot.py rename to holoviews/tests/plotting/bokeh/test_curveplot.py index 1e18ad0193..3fdb019281 100644 --- a/holoviews/tests/plotting/bokeh/testcurveplot.py +++ b/holoviews/tests/plotting/bokeh/test_curveplot.py @@ -5,13 +5,13 @@ from holoviews.core import NdOverlay, HoloMap, DynamicMap from holoviews.core.options import Cycle, Palette -from holoviews.core.util import pd, basestring +from holoviews.core.util import pd from holoviews.element import Curve from holoviews.plotting.util import rgb2hex from holoviews.streams import PointerX from holoviews.util.transform import dim -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import FactorRange, FixedTicker @@ -58,7 +58,7 @@ def test_cyclic_palette_curves(self): colors = palette[3].values plot = bokeh_renderer.get_plot(hmap) for subp, color in zip(plot.subplots.values(), colors): - color = color if isinstance(color, basestring) else rgb2hex(color) + color = color if isinstance(color, str) else rgb2hex(color) self.assertEqual(subp.handles['glyph'].line_color, color) def test_batched_curve_line_color_and_color(self): diff --git a/holoviews/tests/plotting/bokeh/testdivplot.py b/holoviews/tests/plotting/bokeh/test_divplot.py similarity index 91% rename from holoviews/tests/plotting/bokeh/testdivplot.py rename to holoviews/tests/plotting/bokeh/test_divplot.py index 9ad38f98b0..512b24861e 100644 --- a/holoviews/tests/plotting/bokeh/testdivplot.py +++ b/holoviews/tests/plotting/bokeh/test_divplot.py @@ -1,6 +1,6 @@ from holoviews.element import Div -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestDivPlot(TestBokehPlot): diff --git a/holoviews/tests/plotting/bokeh/testelementplot.py b/holoviews/tests/plotting/bokeh/test_elementplot.py similarity index 94% rename from holoviews/tests/plotting/bokeh/testelementplot.py rename to holoviews/tests/plotting/bokeh/test_elementplot.py index b273f9976a..b6a7aba225 100644 --- a/holoviews/tests/plotting/bokeh/testelementplot.py +++ b/holoviews/tests/plotting/bokeh/test_elementplot.py @@ -1,22 +1,26 @@ +import datetime as dt + from unittest import SkipTest from collections import OrderedDict import numpy as np from holoviews.core import Dimension, DynamicMap, NdOverlay, HoloMap +from holoviews.core.util import dt_to_int from holoviews.element import Curve, Image, Scatter, Labels from holoviews.streams import Stream, PointDraw from holoviews.plotting.util import process_cmap from holoviews.util import render -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer from ...utils import LoggingComparisonTestCase try: from bokeh.document import Document from bokeh.models import tools from bokeh.models import (FuncTickFormatter, PrintfTickFormatter, - NumeralTickFormatter, LogTicker) + NumeralTickFormatter, LogTicker, + LinearColorMapper, LogColorMapper) from holoviews.plotting.bokeh.util import bokeh_version except: pass @@ -36,7 +40,10 @@ def test_element_font_scaling(self): fig = plot.state xaxis = plot.handles['xaxis'] yaxis = plot.handles['yaxis'] - self.assertEqual(fig.title.text_font_size, {'value': '24pt'}) + if bokeh_version > '2.2.3': + self.assertEqual(fig.title.text_font_size, '24pt') + else: + self.assertEqual(fig.title.text_font_size, {'value': '24pt'}) if bokeh_version < '2.0.2': self.assertEqual(xaxis.axis_label_text_font_size, '20pt') self.assertEqual(yaxis.axis_label_text_font_size, '20pt') @@ -54,7 +61,10 @@ def test_element_font_scaling_fontsize_override_common(self): fig = plot.state xaxis = plot.handles['xaxis'] yaxis = plot.handles['yaxis'] - self.assertEqual(fig.title.text_font_size, {'value': '28pt'}) + if bokeh_version > '2.2.3': + self.assertEqual(fig.title.text_font_size, '28pt') + else: + self.assertEqual(fig.title.text_font_size, {'value': '28pt'}) self.assertEqual(xaxis.axis_label_text_font_size, '28pt') self.assertEqual(yaxis.axis_label_text_font_size, '28pt') if bokeh_version < '2.0.2': @@ -72,7 +82,10 @@ def test_element_font_scaling_fontsize_override_specific(self): fig = plot.state xaxis = plot.handles['xaxis'] yaxis = plot.handles['yaxis'] - self.assertEqual(fig.title.text_font_size, {'value': '200%'}) + if bokeh_version > '2.2.3': + self.assertEqual(fig.title.text_font_size, '200%') + else: + self.assertEqual(fig.title.text_font_size, {'value': '200%'}) self.assertEqual(xaxis.axis_label_text_font_size, '24pt') self.assertEqual(xaxis.major_label_text_font_size, '2.4em') if bokeh_version < '2.0.2': @@ -325,6 +338,21 @@ def formatter(x): plot = bokeh_renderer.get_plot(curve).state self.assertIsInstance(plot.yaxis[0].formatter, FuncTickFormatter) + def test_element_xticks_datetime(self): + dates = [(dt.datetime(2016, 1, i), i) for i in range(1, 4)] + tick = dt.datetime(2016, 1, 1, 12) + curve = Curve(dates).opts(xticks=[tick]) + plot = bokeh_renderer.get_plot(curve) + self.assertEqual(plot.state.xaxis.ticker.ticks, [dt_to_int(tick, 'ms')]) + + def test_element_xticks_datetime_label_override(self): + dates = [(dt.datetime(2016, 1, i), i) for i in range(1, 4)] + tick = dt.datetime(2016, 1, 1, 12) + curve = Curve(dates).opts(xticks=[(tick, 'A')]) + plot = bokeh_renderer.get_plot(curve) + self.assertEqual(plot.state.xaxis.ticker.ticks, [dt_to_int(tick, 'ms')]) + self.assertEqual(plot.state.xaxis.major_label_overrides, {dt_to_int(tick, 'ms'): 'A'}) + def test_element_grid_custom_xticker(self): curve = Curve([1, 2, 3]).opts(xticks=[0.5, 1.5], show_grid=True) plot = bokeh_renderer.get_plot(curve) @@ -810,6 +838,29 @@ def test_colormapper_transparent_nan(self): cmapper = plot.handles['color_mapper'] self.assertEqual(cmapper.nan_color, 'rgba(0, 0, 0, 0)') + def test_colormapper_cnorm_linear(self): + img = Image(np.array([[0, 1], [2, 3]])).options(cnorm='linear') + plot = bokeh_renderer.get_plot(img) + cmapper = plot.handles['color_mapper'] + self.assertTrue(cmapper, LinearColorMapper) + + def test_colormapper_cnorm_log(self): + img = Image(np.array([[0, 1], [2, 3]])).options(cnorm='log') + plot = bokeh_renderer.get_plot(img) + cmapper = plot.handles['color_mapper'] + self.assertTrue(cmapper, LogColorMapper) + + def test_colormapper_cnorm_eqhist(self): + try: + from bokeh.models import EqHistColorMapper + except: + raise SkipTest("Option cnorm='eq_hist' requires EqHistColorMapper") + img = Image(np.array([[0, 1], [2, 3]])).options(cnorm='eq_hist') + plot = bokeh_renderer.get_plot(img) + cmapper = plot.handles['color_mapper'] + self.assertTrue(cmapper, EqHistColorMapper) + + def test_colormapper_min_max_colors(self): img = Image(np.array([[0, 1], [2, 3]])).options(clipping_colors={'min': 'red', 'max': 'blue'}) plot = bokeh_renderer.get_plot(img) @@ -838,12 +889,12 @@ def test_colorbar_fontsize_scaling(self): def test_explicit_categorical_cmap_on_integer_data(self): explicit_mapping = OrderedDict([(0, 'blue'), (1, 'red'), (2, 'green'), (3, 'purple')]) points = Scatter(([0, 1, 2, 3], [0, 1, 2, 3], [0, 1, 2, 3]), vdims=['y', 'Category']).options( - color_index='Category', cmap=explicit_mapping + color='Category', cmap=explicit_mapping ) plot = bokeh_renderer.get_plot(points) - cmapper = plot.handles['color_mapper'] + cmapper = plot.handles['color_color_mapper'] cds = plot.handles['cds'] - self.assertEqual(cds.data['Category_str__'], ['0', '1', '2', '3']) + self.assertEqual(cds.data['color_str__'], ['0', '1', '2', '3']) self.assertEqual(cmapper.factors, ['0', '1', '2', '3']) self.assertEqual(cmapper.palette, ['blue', 'red', 'green', 'purple']) diff --git a/holoviews/tests/plotting/bokeh/testerrorbarplot.py b/holoviews/tests/plotting/bokeh/test_errorbarplot.py similarity index 99% rename from holoviews/tests/plotting/bokeh/testerrorbarplot.py rename to holoviews/tests/plotting/bokeh/test_errorbarplot.py index 6d4249d2dd..6dc577e5f3 100644 --- a/holoviews/tests/plotting/bokeh/testerrorbarplot.py +++ b/holoviews/tests/plotting/bokeh/test_errorbarplot.py @@ -4,7 +4,7 @@ from holoviews.element import ErrorBars -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestErrorBarsPlot(TestBokehPlot): @@ -53,7 +53,7 @@ def test_errorbars_padding_logx(self): self.assertEqual(x_range.end, 3.3483695221017129) self.assertEqual(y_range.start, 0.19999999999999996) self.assertEqual(y_range.end, 3.8) - + def test_errorbars_padding_logy(self): errorbars = ErrorBars([(1, 1, 0.5), (2, 2, 0.5), (3, 3, 0.5)]).options(padding=0.1, logy=True) plot = bokeh_renderer.get_plot(errorbars) diff --git a/holoviews/tests/plotting/bokeh/testgeomplot.py b/holoviews/tests/plotting/bokeh/test_geomplot.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testgeomplot.py rename to holoviews/tests/plotting/bokeh/test_geomplot.py index 533ae0a411..d4e09907dd 100644 --- a/holoviews/tests/plotting/bokeh/testgeomplot.py +++ b/holoviews/tests/plotting/bokeh/test_geomplot.py @@ -4,7 +4,7 @@ from holoviews.core.util import pd from holoviews.element import Segments -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import FactorRange diff --git a/holoviews/tests/plotting/bokeh/testgraphplot.py b/holoviews/tests/plotting/bokeh/test_graphplot.py similarity index 78% rename from holoviews/tests/plotting/bokeh/testgraphplot.py rename to holoviews/tests/plotting/bokeh/test_graphplot.py index 63bad0ec50..e35280e5ff 100644 --- a/holoviews/tests/plotting/bokeh/testgraphplot.py +++ b/holoviews/tests/plotting/bokeh/test_graphplot.py @@ -1,23 +1,22 @@ -from __future__ import absolute_import - import numpy as np + from holoviews.core.data import Dataset -from holoviews.element import Graph, Nodes, TriMesh, Chord, circular_layout +from holoviews.element import Graph, Nodes, TriMesh, Chord, VLine, circular_layout from holoviews.util.transform import dim try: - from bokeh.models import (NodesAndLinkedEdges, EdgesAndLinkedNodes, Patches) - from bokeh.models.mappers import CategoricalColorMapper, LinearColorMapper + from bokeh.models import (NodesAndLinkedEdges, EdgesAndLinkedNodes, NodesOnly, Patches) + from bokeh.models.mappers import CategoricalColorMapper except: pass -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestBokehGraphPlot(TestBokehPlot): def setUp(self): - super(TestBokehGraphPlot, self).setUp() + super().setUp() N = 8 self.nodes = circular_layout(np.arange(N, dtype=np.int32)) @@ -42,6 +41,15 @@ def test_plot_simple_graph(self): layout = {str(int(z)): (x, y) for x, y, z in self.graph.nodes.array()} self.assertEqual(layout_source.graph_layout, layout) + def test_plot_graph_annotation_overlay(self): + plot = bokeh_renderer.get_plot(VLine(0) * self.graph) + x_range = plot.handles['x_range'] + y_range = plot.handles['x_range'] + self.assertEqual(x_range.start, -1) + self.assertEqual(x_range.end, 1) + self.assertEqual(y_range.start, -1) + self.assertEqual(y_range.end, 1) + def test_plot_graph_with_paths(self): graph = self.graph.clone((self.graph.data, self.graph.nodes, self.graph.edgepaths)) plot = bokeh_renderer.get_plot(graph) @@ -85,7 +93,7 @@ def test_graph_inspection_policy_edges_non_default_names(self): def test_graph_inspection_policy_none(self): plot = bokeh_renderer.get_plot(self.graph.opts(plot=dict(inspection_policy=None))) renderer = plot.handles['glyph_renderer'] - self.assertIs(renderer.inspection_policy, None) + self.assertIsInstance(renderer.inspection_policy, NodesOnly) def test_graph_selection_policy_nodes(self): plot = bokeh_renderer.get_plot(self.graph) @@ -104,56 +112,7 @@ def test_graph_selection_policy_edges(self): def test_graph_selection_policy_none(self): plot = bokeh_renderer.get_plot(self.graph.opts(plot=dict(selection_policy=None))) renderer = plot.handles['glyph_renderer'] - self.assertIs(renderer.selection_policy, None) - - def test_graph_nodes_categorical_colormapped(self): - g = self.graph2.opts(plot=dict(color_index='Label'), style=dict(cmap='Set1')) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['color_mapper'] - node_source = plot.handles['scatter_1_source'] - glyph = plot.handles['scatter_1_glyph'] - self.assertIsInstance(cmapper, CategoricalColorMapper) - self.assertEqual(cmapper.factors, ['Output', 'Input']) - self.assertEqual(node_source.data['Label'], self.node_info['Label']) - self.assertEqual(glyph.fill_color, {'field': 'Label', 'transform': cmapper}) - - def test_graph_nodes_numerically_colormapped(self): - g = self.graph3.opts(plot=dict(color_index='Weight'), style=dict(cmap='viridis')) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['color_mapper'] - node_source = plot.handles['scatter_1_source'] - glyph = plot.handles['scatter_1_glyph'] - self.assertIsInstance(cmapper, LinearColorMapper) - self.assertEqual(cmapper.low, self.weights.min()) - self.assertEqual(cmapper.high, self.weights.max()) - self.assertEqual(node_source.data['Weight'], self.node_info2['Weight']) - self.assertEqual(glyph.fill_color, {'field': 'Weight', 'transform': cmapper}) - - def test_graph_edges_categorical_colormapped(self): - g = self.graph3.opts(plot=dict(edge_color_index='start'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['edge_colormapper'] - edge_source = plot.handles['multi_line_1_source'] - glyph = plot.handles['multi_line_1_glyph'] - self.assertIsInstance(cmapper, CategoricalColorMapper) - factors = ['0', '1', '2', '3', '4', '5', '6', '7'] - self.assertEqual(cmapper.factors, factors) - self.assertEqual(edge_source.data['start_str__'], factors) - self.assertEqual(glyph.line_color, {'field': 'start_str__', 'transform': cmapper}) - - def test_graph_edges_numerically_colormapped(self): - g = self.graph4.opts(plot=dict(edge_color_index='Weight'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['edge_colormapper'] - edge_source = plot.handles['multi_line_1_source'] - glyph = plot.handles['multi_line_1_glyph'] - self.assertIsInstance(cmapper, LinearColorMapper) - self.assertEqual(cmapper.low, self.weights.min()) - self.assertEqual(cmapper.high, self.weights.max()) - self.assertEqual(edge_source.data['Weight'], self.node_info2['Weight']) - self.assertEqual(glyph.line_color, {'field': 'Weight', 'transform': cmapper}) + self.assertIsInstance(renderer.selection_policy, NodesOnly) ########################### # Styling mapping # @@ -280,7 +239,7 @@ def test_graph_op_edge_line_width(self): class TestBokehTriMeshPlot(TestBokehPlot): def setUp(self): - super(TestBokehTriMeshPlot, self).setUp() + super().setUp() self.nodes = [(0, 0, 0), (0.5, 1, 1), (1., 0, 2), (1.5, 1, 3)] self.simplices = [(0, 1, 2, 0), (1, 2, 3, 1)] @@ -310,33 +269,6 @@ def test_plot_simple_trimesh_filled(self): layout = {str(int(z)): (x, y) for x, y, z in self.trimesh.nodes.array()} self.assertEqual(layout_source.graph_layout, layout) - def test_trimesh_edges_categorical_colormapped(self): - g = self.trimesh.opts(plot=dict(edge_color_index='node1'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = bokeh_renderer.get_plot(g) - print(plot.handles) - cmapper = plot.handles['edge_colormapper'] - edge_source = plot.handles['multi_line_1_source'] - glyph = plot.handles['multi_line_1_glyph'] - self.assertIsInstance(cmapper, CategoricalColorMapper) - factors = ['0', '1', '2', '3'] - self.assertEqual(cmapper.factors, factors) - self.assertEqual(edge_source.data['node1_str__'], ['0', '1']) - self.assertEqual(glyph.line_color, {'field': 'node1_str__', 'transform': cmapper}) - - def test_trimesh_nodes_numerically_colormapped(self): - g = self.trimesh_weighted.opts(plot=dict(edge_color_index='weight'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['edge_colormapper'] - edge_source = plot.handles['multi_line_1_source'] - glyph = plot.handles['multi_line_1_glyph'] - self.assertIsInstance(cmapper, LinearColorMapper) - self.assertEqual(cmapper.low, 0) - self.assertEqual(cmapper.high, 1) - self.assertEqual(edge_source.data['weight'], np.array([0, 1])) - self.assertEqual(glyph.line_color, {'field': 'weight', 'transform': cmapper}) - ########################### # Styling mapping # ########################### @@ -351,7 +283,7 @@ def test_trimesh_op_node_color(self): self.assertEqual(glyph.fill_color, {'field': 'node_color'}) self.assertEqual(glyph.line_color, 'black') self.assertEqual(cds.data['node_color'], np.array(['red', 'green', 'blue', 'black'])) - + def test_trimesh_op_node_color_linear(self): edges = [(0, 1, 2), (1, 2, 3)] nodes = [(-1, -1, 0, 2), (0, 0, 1, 1), (0, 1, 2, 3), (1, 0, 3, 4)] @@ -363,7 +295,7 @@ def test_trimesh_op_node_color_linear(self): self.assertEqual(glyph.fill_color, {'field': 'node_color', 'transform': cmapper}) self.assertEqual(glyph.line_color, 'black') self.assertEqual(cds.data['node_color'], np.array([2, 1, 3, 4])) - self.assertEqual(cmapper.low, 1) + self.assertEqual(cmapper.low, 1) self.assertEqual(cmapper.high, 4) def test_trimesh_op_node_color_categorical(self): @@ -495,7 +427,7 @@ def test_trimesh_op_edge_line_width(self): class TestBokehChordPlot(TestBokehPlot): def setUp(self): - super(TestBokehChordPlot, self).setUp() + super().setUp() self.edges = [(0, 1, 1), (0, 2, 2), (1, 2, 3)] self.nodes = Dataset([(0, 'A'), (1, 'B'), (2, 'C')], 'index', 'Label') self.chord = Chord((self.edges, self.nodes)) @@ -527,24 +459,8 @@ def test_chord_nodes_labels_mapping(self): source = plot.handles['text_1_source'] self.assertEqual(source.data['text'], ['A', 'B', 'C']) - def test_chord_nodes_categorically_colormapped(self): - g = self.chord.opts(plot=dict(color_index='Label', label_index='Label'), - style=dict(cmap=['#FFFFFF', '#888888', '#000000'])) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['color_mapper'] - source = plot.handles['scatter_1_source'] - arc_source = plot.handles['multi_line_2_source'] - glyph = plot.handles['scatter_1_glyph'] - self.assertIsInstance(cmapper, CategoricalColorMapper) - self.assertEqual(cmapper.factors, ['A', 'B', 'C']) - self.assertEqual(cmapper.palette, ['#FFFFFF', '#888888', '#000000']) - self.assertEqual(source.data['Label'], np.array(['A', 'B', 'C'])) - self.assertEqual(arc_source.data['Label'], np.array(['A', 'B', 'C'])) - self.assertEqual(glyph.fill_color, {'field': 'Label', 'transform': cmapper}) - def test_chord_nodes_style_map_node_color_colormapped(self): - g = self.chord.opts(plot=dict(labels='Label'), - style=dict(node_color='Label', cmap=['#FFFFFF', '#888888', '#000000'])) + g = self.chord.opts(labels='Label', node_color='Label', cmap=['#FFFFFF', '#888888', '#000000']) plot = bokeh_renderer.get_plot(g) cmapper = plot.handles['node_color_color_mapper'] source = plot.handles['scatter_1_source'] @@ -559,21 +475,8 @@ def test_chord_nodes_style_map_node_color_colormapped(self): self.assertEqual(glyph.fill_color, {'field': 'node_color', 'transform': cmapper}) self.assertEqual(arc_glyph.line_color, {'field': 'node_color', 'transform': cmapper}) - def test_chord_edges_categorically_colormapped(self): - g = self.chord.opts(plot=dict(edge_color_index='start'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = bokeh_renderer.get_plot(g) - cmapper = plot.handles['edge_colormapper'] - edge_source = plot.handles['multi_line_1_source'] - glyph = plot.handles['multi_line_1_glyph'] - self.assertIsInstance(cmapper, CategoricalColorMapper) - self.assertEqual(cmapper.palette, ['#FFFFFF', '#000000', '#FFFFFF']) - self.assertEqual(cmapper.factors, ['0', '1', '2']) - self.assertEqual(edge_source.data['start_str__'], ['0', '0', '1']) - self.assertEqual(glyph.line_color, {'field': 'start_str__', 'transform': cmapper}) - def test_chord_edge_color_style_mapping(self): - g = self.chord.opts(style=dict(edge_color=dim('start').astype(str), edge_cmap=['#FFFFFF', '#000000'])) + g = self.chord.opts(edge_color=dim('start').astype(str), edge_cmap=['#FFFFFF', '#000000']) plot = bokeh_renderer.get_plot(g) cmapper = plot.handles['edge_color_color_mapper'] edge_source = plot.handles['multi_line_1_source'] diff --git a/holoviews/tests/plotting/bokeh/testgridplot.py b/holoviews/tests/plotting/bokeh/test_gridplot.py similarity index 99% rename from holoviews/tests/plotting/bokeh/testgridplot.py rename to holoviews/tests/plotting/bokeh/test_gridplot.py index 9d4bf76c13..687fff24f5 100644 --- a/holoviews/tests/plotting/bokeh/testgridplot.py +++ b/holoviews/tests/plotting/bokeh/test_gridplot.py @@ -6,7 +6,7 @@ from holoviews.operation import gridmatrix from holoviews.streams import Stream -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.layouts import Column diff --git a/holoviews/tests/plotting/bokeh/testheatmapplot.py b/holoviews/tests/plotting/bokeh/test_heatmapplot.py similarity index 88% rename from holoviews/tests/plotting/bokeh/testheatmapplot.py rename to holoviews/tests/plotting/bokeh/test_heatmapplot.py index 919e76ebf9..e109aa84c8 100644 --- a/holoviews/tests/plotting/bokeh/testheatmapplot.py +++ b/holoviews/tests/plotting/bokeh/test_heatmapplot.py @@ -7,7 +7,7 @@ except: pass -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestHeatMapPlot(TestBokehPlot): @@ -110,7 +110,7 @@ def test_heatmap_dilate(self): hmap = HeatMap([('A',1, 1), ('B', 2, 2)]).options(dilate=True) plot = bokeh_renderer.get_plot(hmap) glyph = plot.handles['glyph'] - self.assertTrue(glyph.dilate) + self.assertTrue(glyph.dilate) def test_heatmap_single_x_value(self): hmap = HeatMap(([1], ['A', 'B'], np.array([[1], [2]]))) @@ -129,3 +129,18 @@ def test_heatmap_single_y_value(self): self.assertEqual(cds.data['x'], np.array(['A', 'B'])) self.assertEqual(cds.data['height'], [2.0, 2.0]) self.assertEqual(plot.handles['glyph'].width, 1) + + def test_heatmap_alpha_dim(self): + data = { + "row": [1, 2, 1, 2], + "col": [1, 2, 2, 1], + "alpha": [0, 0, 0, 1], + "val": [.5, .6, .2, .1] + } + hm = HeatMap(data, kdims=["col", "row"], vdims=["val", "alpha"]).opts(alpha="alpha") + plot = bokeh_renderer.get_plot(hm) + cds = plot.handles['cds'] + self.assertEqual(cds.data['row'], np.array([1, 2, 1, 2])) + self.assertEqual(cds.data['col'], np.array([1, 1, 2, 2])) + self.assertEqual(cds.data['alpha'], np.array([0, 1, 0, 0])) + self.assertEqual(cds.data['zvalues'], np.array([0.5, 0.1, 0.2, 0.6])) diff --git a/holoviews/tests/plotting/bokeh/testhextilesplot.py b/holoviews/tests/plotting/bokeh/test_hextilesplot.py similarity index 77% rename from holoviews/tests/plotting/bokeh/testhextilesplot.py rename to holoviews/tests/plotting/bokeh/test_hextilesplot.py index a569148e77..779ca633ba 100644 --- a/holoviews/tests/plotting/bokeh/testhextilesplot.py +++ b/holoviews/tests/plotting/bokeh/test_hextilesplot.py @@ -7,13 +7,13 @@ from holoviews.plotting.bokeh.hex_tiles import hex_binning from holoviews.plotting.bokeh.util import bokeh_version -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestHexTilesOperation(TestBokehPlot): def setUp(self): - super(TestHexTilesOperation, self).setUp() + super().setUp() if bokeh_version < '0.12.15': raise SkipTest("Bokeh >= 0.12.15 required to test HexTiles operation.") @@ -41,63 +41,51 @@ def test_hex_tiles_sum_value_aggregation(self): class TestHexTilesPlot(TestBokehPlot): def setUp(self): - super(TestHexTilesPlot, self).setUp() + super().setUp() if bokeh_version < '0.12.15': raise SkipTest("Bokeh >= 0.12.15 required to test HexTilesPlot.") def test_hex_tiles_empty(self): tiles = HexTiles([]) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) self.assertEqual(plot.handles['source'].data, {'q': [], 'r': []}) def test_hex_tiles_only_nans(self): tiles = HexTiles([(np.NaN, 0), (1, np.NaN)]) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) self.assertEqual(plot.handles['source'].data, {'q': [], 'r': []}) def test_hex_tiles_zero_min_count(self): tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(min_count=0) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) cmapper = plot.handles['color_mapper'] self.assertEqual(cmapper.low, 0) self.assertEqual(plot.state.background_fill_color, cmapper.palette[0]) def test_hex_tiles_gridsize_tuple(self): tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(gridsize=(5, 10)) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) glyph = plot.handles['glyph'] self.assertEqual(glyph.size, 0.066666666666666666) self.assertEqual(glyph.aspect_scale, 0.5) def test_hex_tiles_gridsize_tuple_flat_orientation(self): tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(gridsize=(5, 10), orientation='flat') - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) glyph = plot.handles['glyph'] self.assertEqual(glyph.size, 0.13333333333333333) self.assertEqual(glyph.aspect_scale, 0.5) - def test_hex_tiles_scale(self): - tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(size_index=2, gridsize=3) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] - source = plot.handles['source'] - self.assertEqual(source.data['scale'], np.array([0.45, 0.45, 0.9])) - - def test_hex_tiles_scale_all_equal(self): - tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(size_index=2) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] - source = plot.handles['source'] - self.assertEqual(source.data['scale'], np.array([0.9, 0.9, 0.9, 0.9])) - def test_hex_tiles_hover_count(self): tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(tools=['hover']) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) dims, opts = plot._hover_opts(tiles) self.assertEqual(dims, [Dimension('Count')]) self.assertEqual(opts, {}) def test_hex_tiles_hover_weighted(self): tiles = HexTiles([(0, 0, 0.1), (0.5, 0.5, 0.2), (-0.5, -0.5, 0.3)], vdims='z').options(aggregator=np.mean) - plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(tiles) dims, opts = plot._hover_opts(tiles) self.assertEqual(dims, [Dimension('z')]) self.assertEqual(opts, {}) @@ -108,18 +96,24 @@ def test_hex_tiles_hover_weighted(self): def test_hex_tile_line_width_op(self): hextiles = HexTiles(np.random.randn(1000, 2)).options(line_width='Count') - plot = list(bokeh_renderer.get_plot(hextiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(hextiles) glyph = plot.handles['glyph'] self.assertEqual(glyph.line_width, {'field': 'line_width'}) def test_hex_tile_alpha_op(self): hextiles = HexTiles(np.random.randn(1000, 2)).options(alpha='Count') - plot = list(bokeh_renderer.get_plot(hextiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(hextiles) glyph = plot.handles['glyph'] self.assertEqual(glyph.fill_alpha, {'field': 'alpha'}) def test_hex_tile_scale_op(self): hextiles = HexTiles(np.random.randn(1000, 2)).options(scale='Count') - plot = list(bokeh_renderer.get_plot(hextiles).subplots.values())[0] + plot = bokeh_renderer.get_plot(hextiles) glyph = plot.handles['glyph'] self.assertEqual(glyph.scale, {'field': 'scale'}) + + def test_hex_tiles_scale_op_all_equal(self): + tiles = HexTiles([(0, 0), (0.5, 0.5), (-0.5, -0.5), (-0.4, -0.4)]).options(scale='Count') + plot = list(bokeh_renderer.get_plot(tiles).subplots.values())[0] + source = plot.handles['source'] + self.assertEqual(source.data['scale'], np.array([0.9, 0.9, 0.9, 0.9])) diff --git a/holoviews/tests/plotting/bokeh/testhistogramplot.py b/holoviews/tests/plotting/bokeh/test_histogramplot.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testhistogramplot.py rename to holoviews/tests/plotting/bokeh/test_histogramplot.py index c0d1163983..15d15086ac 100644 --- a/holoviews/tests/plotting/bokeh/testhistogramplot.py +++ b/holoviews/tests/plotting/bokeh/test_histogramplot.py @@ -9,7 +9,7 @@ from bokeh.models import DatetimeAxis, CategoricalColorMapper, LinearColorMapper from ...utils import LoggingComparisonTestCase -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestSideHistogramPlot(LoggingComparisonTestCase, TestBokehPlot): @@ -60,10 +60,8 @@ def test_histogram_datetime64_plot(self): hist = histogram(Dataset(dates, 'Date'), num_bins=4) plot = bokeh_renderer.get_plot(hist) source = plot.handles['source'] - print(source.data) data = { - 'top': np.array([ - 3.85802469e-18, 3.85802469e-18, 3.85802469e-18, 3.85802469e-18]), + 'top': np.array([1, 1, 1, 1]), 'left': np.array([ '2017-01-01T00:00:00.000000', '2017-01-01T18:00:00.000000', '2017-01-02T12:00:00.000000', '2017-01-03T06:00:00.000000'], diff --git a/holoviews/tests/plotting/bokeh/testlabels.py b/holoviews/tests/plotting/bokeh/test_labels.py similarity index 71% rename from holoviews/tests/plotting/bokeh/testlabels.py rename to holoviews/tests/plotting/bokeh/test_labels.py index 99fc3a89eb..f98549fc21 100644 --- a/holoviews/tests/plotting/bokeh/testlabels.py +++ b/holoviews/tests/plotting/bokeh/test_labels.py @@ -8,8 +8,7 @@ except: pass -from ..utils import ParamLogStream -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestLabelsPlot(TestBokehPlot): @@ -65,42 +64,6 @@ def test_labels_inverted(self): self.assertEqual(glyph.y, 'x') self.assertEqual(glyph.text, 'Label') - def test_labels_color_mapped_text_vals(self): - labels = Labels([(0, 1, 0.33333), (1, 0, 0.66666)]).options(color_index=2) - plot = bokeh_renderer.get_plot(labels) - source = plot.handles['source'] - glyph = plot.handles['glyph'] - cmapper = plot.handles['color_mapper'] - expected = {'x': np.array([0, 1]), 'y': np.array([1, 0]), - 'Label': ['0.33333', '0.66666'], - 'text_color': np.array([0.33333, 0.66666])} - for k, vals in expected.items(): - self.assertEqual(source.data[k], vals) - self.assertEqual(glyph.x, 'x') - self.assertEqual(glyph.y, 'y') - self.assertEqual(glyph.text, 'Label') - self.assertEqual(glyph.text_color, {'field': 'text_color', 'transform': cmapper}) - self.assertEqual(cmapper.low, 0.33333) - self.assertEqual(cmapper.high, 0.66666) - - def test_labels_color_mapped(self): - labels = Labels([(0, 1, 0.33333, 2), (1, 0, 0.66666, 1)], vdims=['text', 'color']).options(color_index=3) - plot = bokeh_renderer.get_plot(labels) - source = plot.handles['source'] - glyph = plot.handles['glyph'] - cmapper = plot.handles['color_mapper'] - expected = {'x': np.array([0, 1]), 'y': np.array([1, 0]), - 'text': ['0.33333', '0.66666'], - 'color': np.array([2, 1])} - for k, vals in expected.items(): - self.assertEqual(source.data[k], vals) - self.assertEqual(glyph.x, 'x') - self.assertEqual(glyph.y, 'y') - self.assertEqual(glyph.text, 'text') - self.assertEqual(glyph.text_color, {'field': 'color', 'transform': cmapper}) - self.assertEqual(cmapper.low, 1) - self.assertEqual(cmapper.high, 2) - ########################### # Styling mapping # ########################### @@ -174,13 +137,3 @@ def test_label_font_size_op_ints(self): glyph = plot.handles['glyph'] self.assertEqual(cds.data['text_font_size'], ['10pt', '4pt', '8pt']) self.assertEqual(glyph.text_font_size, {'field': 'text_font_size'}) - - def test_labels_color_index_color_clash(self): - labels = Labels([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='color').options(text_color='color', color_index='color') - with ParamLogStream() as log: - bokeh_renderer.get_plot(labels) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'text_color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/bokeh/testlayoutplot.py b/holoviews/tests/plotting/bokeh/test_layoutplot.py similarity index 88% rename from holoviews/tests/plotting/bokeh/testlayoutplot.py rename to holoviews/tests/plotting/bokeh/test_layoutplot.py index 6ed186a611..5fca5d8967 100644 --- a/holoviews/tests/plotting/bokeh/testlayoutplot.py +++ b/holoviews/tests/plotting/bokeh/test_layoutplot.py @@ -8,6 +8,7 @@ from holoviews.element import Curve, Image, Points, Histogram, Scatter from holoviews.streams import Stream from holoviews.util import render, opts +from holoviews.util.transform import dim try: from bokeh.layouts import Column, Row @@ -17,7 +18,7 @@ pass from ...utils import LoggingComparisonTestCase -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer @@ -48,6 +49,50 @@ def test_layout_framewise_norm(self): self.assertEqual(img1_cmapper.high, 40) self.assertEqual(img2_cmapper.high, 40) + def test_layout_framewise_matching_norm_update(self): + img1 = Image(np.mgrid[0:5, 0:5][0], vdims='z').opts(framewise=True, axiswise=True) + stream = Stream.define('zscale', value=1)() + transform = dim('z')*stream.param.value + img2 = Image(np.mgrid[0:5, 0:5][0], vdims='z').apply.transform( + z=transform).opts(framewise=True, axiswise=True) + plot = bokeh_renderer.get_plot(img1+img2) + img1_plot = plot.subplots[(0, 0)].subplots['main'] + img2_plot = plot.subplots[(0, 1)].subplots['main'] + img1_cmapper = img1_plot.handles['color_mapper'] + img2_cmapper = img2_plot.handles['color_mapper'] + self.assertEqual(img1_cmapper.low, 0) + self.assertEqual(img2_cmapper.low, 0) + self.assertEqual(img1_cmapper.high, 4) + self.assertEqual(img2_cmapper.high, 4) + stream.update(value=10) + self.assertEqual(img1_cmapper.high, 4) + self.assertEqual(img2_cmapper.high, 40) + stream.update(value=2) + self.assertEqual(img1_cmapper.high, 4) + self.assertEqual(img2_cmapper.high, 8) + + def test_layout_framewise_nonmatching_norm_update(self): + img1 = Image(np.mgrid[0:5, 0:5][0], vdims='z').opts(framewise=True) + stream = Stream.define('zscale', value=1)() + transform = dim('z2')*stream.param.value + img2 = Image(np.mgrid[0:5, 0:5][0], vdims='z2').apply.transform( + z2=transform).opts(framewise=True) + plot = bokeh_renderer.get_plot(img1+img2) + img1_plot = plot.subplots[(0, 0)].subplots['main'] + img2_plot = plot.subplots[(0, 1)].subplots['main'] + img1_cmapper = img1_plot.handles['color_mapper'] + img2_cmapper = img2_plot.handles['color_mapper'] + self.assertEqual(img1_cmapper.low, 0) + self.assertEqual(img2_cmapper.low, 0) + self.assertEqual(img1_cmapper.high, 4) + self.assertEqual(img2_cmapper.high, 4) + stream.update(value=10) + self.assertEqual(img1_cmapper.high, 4) + self.assertEqual(img2_cmapper.high, 40) + stream.update(value=2) + self.assertEqual(img1_cmapper.high, 4) + self.assertEqual(img2_cmapper.high, 8) + def test_layout_title(self): hmap1 = HoloMap({a: Image(np.random.rand(10,10)) for a in range(3)}) hmap2 = HoloMap({a: Image(np.random.rand(10,10)) for a in range(3)}) diff --git a/holoviews/tests/plotting/bokeh/testlinks.py b/holoviews/tests/plotting/bokeh/test_links.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testlinks.py rename to holoviews/tests/plotting/bokeh/test_links.py index 52645814a8..2fe56c071a 100644 --- a/holoviews/tests/plotting/bokeh/testlinks.py +++ b/holoviews/tests/plotting/bokeh/test_links.py @@ -12,7 +12,7 @@ except: pass -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestLinkCallbacks(TestBokehPlot): @@ -20,7 +20,7 @@ class TestLinkCallbacks(TestBokehPlot): def setUp(self): if not bokeh_renderer or bokeh_version < '0.13': raise SkipTest('RangeTool requires bokeh version >= 0.13') - super(TestLinkCallbacks, self).setUp() + super().setUp() def test_range_tool_link_callback_single_axis(self): from bokeh.models import RangeTool diff --git a/holoviews/tests/plotting/bokeh/testoverlayplot.py b/holoviews/tests/plotting/bokeh/test_overlayplot.py similarity index 90% rename from holoviews/tests/plotting/bokeh/testoverlayplot.py rename to holoviews/tests/plotting/bokeh/test_overlayplot.py index 47d0330466..5de01e6d0d 100644 --- a/holoviews/tests/plotting/bokeh/testoverlayplot.py +++ b/holoviews/tests/plotting/bokeh/test_overlayplot.py @@ -1,13 +1,14 @@ import numpy as np +import panel as pn from holoviews.core import NdOverlay, HoloMap, DynamicMap, Overlay from holoviews.core.options import Cycle from holoviews.element import Curve, Points, ErrorBars, Scatter, Text, VLine -from holoviews.streams import Stream +from holoviews.streams import Stream, Tap from holoviews.util import Dynamic from ...utils import LoggingComparisonTestCase -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import FixedTicker, HoverTool, FactorRange, Range1d @@ -76,6 +77,12 @@ def test_hover_tool_instance_renderer_association(self): self.assertIn(curve_plot.handles['glyph_renderer'], curve_plot.handles['hover'].renderers) self.assertEqual(plot.handles['hover'].tooltips, tooltips) + def test_hover_tool_overlay_renderers(self): + overlay = Curve(range(2)).opts(tools=['hover']) * ErrorBars([]).opts(tools=['hover']) + plot = bokeh_renderer.get_plot(overlay) + self.assertEqual(len(plot.handles['hover'].renderers), 1) + self.assertEqual(plot.handles['hover'].tooltips, [('x', '@{x}'), ('y', '@{y}')]) + def test_hover_tool_nested_overlay_renderers(self): overlay1 = NdOverlay({0: Curve(range(2)), 1: Curve(range(3))}, kdims=['Test']) overlay2 = NdOverlay({0: Curve(range(4)), 1: Curve(range(5))}, kdims=['Test']) @@ -227,6 +234,36 @@ def test_complex_range_example(self): self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 19.655978889110628) + def test_overlay_muted_renderer(self): + overlay = Curve((np.arange(5)), label='increase') * Curve((np.arange(5)*-1+5), label='decrease').opts(muted=True) + plot = bokeh_renderer.get_plot(overlay) + unmuted, muted = plot.subplots.values() + self.assertFalse(unmuted.handles['glyph_renderer'].muted) + self.assertTrue(muted.handles['glyph_renderer'].muted) + + def test_overlay_params_bind_linked_stream(self): + tap = Tap() + def test(x): + return Curve([1, 2, 3]) * VLine(x or 0) + dmap = DynamicMap(pn.bind(test, x=tap.param.x)) + plot = bokeh_renderer.get_plot(dmap) + + tap.event(x=1) + _, vline_plot = plot.subplots.values() + assert vline_plot.handles['glyph'].location == 1 + + def test_overlay_params_dict_linked_stream(self): + tap = Tap() + def test(x): + return Curve([1, 2, 3]) * VLine(x or 0) + dmap = DynamicMap(test, streams={'x': tap.param.x}) + plot = bokeh_renderer.get_plot(dmap) + + tap.event(x=1) + _, vline_plot = plot.subplots.values() + assert vline_plot.handles['glyph'].location == 1 + + class TestLegends(TestBokehPlot): @@ -302,7 +339,6 @@ def test_dynamicmap_legend_updates_add_dynamic_plots(self): plot.update((3,)) legend_labels = [item.label for item in plot.state.legend[0].items] self.assertEqual(legend_labels, [{'value': 'A'}, {'value': 'B'}, {'value': 'C'}]) - def test_dynamicmap_ndoverlay_shrink_number_of_items(self): selected = Stream.define('selected', items=3)() def callback(items): diff --git a/holoviews/tests/plotting/bokeh/testpathplot.py b/holoviews/tests/plotting/bokeh/test_pathplot.py similarity index 96% rename from holoviews/tests/plotting/bokeh/testpathplot.py rename to holoviews/tests/plotting/bokeh/test_pathplot.py index 0531db532e..197d155bca 100644 --- a/holoviews/tests/plotting/bokeh/testpathplot.py +++ b/holoviews/tests/plotting/bokeh/test_pathplot.py @@ -8,7 +8,7 @@ from holoviews.streams import PolyDraw from holoviews.util.transform import dim -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import LinearColorMapper, CategoricalColorMapper @@ -64,7 +64,7 @@ def test_path_colored_and_split_with_extra_vdims(self): color = [0, 0.25, 0.5, 0.75] other = ['A', 'B', 'C', 'D'] data = {'x': xs, 'y': ys, 'color': color, 'other': other} - path = Path([data], vdims=['color','other']).options(color_index='color', tools=['hover']) + path = Path([data], vdims=['color','other']).options(color='color', tools=['hover']) plot = bokeh_renderer.get_plot(path) source = plot.handles['source'] @@ -97,10 +97,10 @@ def test_path_colored_by_levels_single_value(self): levels = [0, 38, 73, 95, 110, 130, 156, 999] colors = ['#5ebaff', '#00faf4', '#ffffcc', '#ffe775', '#ffc140', '#ff8f20', '#ff6060'] path = Path([data], vdims=['color', 'date']).options( - color_index='color', color_levels=levels, cmap=colors, tools=['hover']) + color='color', color_levels=levels, cmap=colors, tools=['hover']) plot = bokeh_renderer.get_plot(path) source = plot.handles['source'] - cmapper = plot.handles['color_mapper'] + cmapper = plot.handles['color_color_mapper'] self.assertEqual(source.data['xs'], [np.array([1, 2]), np.array([2, 3]), np.array([3, 4])]) self.assertEqual(source.data['ys'], [np.array([4, 3]), np.array([3, 2]), np.array([2, 1])]) @@ -131,7 +131,7 @@ def test_path_continuously_varying_color_op(self): self.assertEqual(cmapper.low, 994) self.assertEqual(cmapper.high, 999) self.assertEqual(cmapper.palette, colors[-1:]) - + def test_path_continuously_varying_alpha_op(self): xs = [1, 2, 3, 4] ys = xs[::-1] @@ -173,7 +173,7 @@ def test_path_continuously_varying_color_legend(self): self.assertEqual(item.label, legend) self.assertEqual(item.renderers, [plot.handles['glyph_renderer']]) - + class TestPolygonPlot(TestBokehPlot): @@ -190,17 +190,18 @@ def test_polygons_colored(self): for j in range(5)}) plot = bokeh_renderer.get_plot(polygons) for i, splot in enumerate(plot.subplots.values()): - cmapper = splot.handles['color_mapper'] + cmapper = splot.handles['fill_color_color_mapper'] self.assertEqual(cmapper.low, 0) self.assertEqual(cmapper.high, 4) source = splot.handles['source'] - self.assertEqual(source.data['Value'], np.array([i])) + self.assertEqual(source.data['fill_color'], np.array([i])) def test_polygons_colored_batched(self): polygons = NdOverlay({j: Polygons([[(i**j, i, j) for i in range(10)]], vdims='Value') for j in range(5)}).opts(plot=dict(legend_limit=0)) plot = list(bokeh_renderer.get_plot(polygons).subplots.values())[0] - cmapper = plot.handles['color_mapper'] + + cmapper = plot.handles['fill_color_color_mapper'] self.assertEqual(cmapper.low, 0) self.assertEqual(cmapper.high, 4) source = plot.handles['source'] @@ -212,7 +213,7 @@ def test_polygons_colored_batched_unsanitized(self): vdims=['some ? unescaped name']) for j in range(5)}).opts(plot=dict(legend_limit=0)) plot = list(bokeh_renderer.get_plot(polygons).subplots.values())[0] - cmapper = plot.handles['color_mapper'] + cmapper = plot.handles['fill_color_color_mapper'] self.assertEqual(cmapper.low, 0) self.assertEqual(cmapper.high, 4) source = plot.handles['source'] @@ -225,7 +226,7 @@ def test_empty_polygons_plot(self): source = plot.handles['source'] self.assertEqual(len(source.data['xs']), 0) self.assertEqual(len(source.data['ys']), 0) - self.assertEqual(len(source.data['Intensity']), 0) + self.assertEqual(len(source.data['fill_color']), 0) def test_polygon_with_hole_plot(self): xs = [1, 2, 3] @@ -276,7 +277,7 @@ def test_polygons_color_op(self): cds = plot.handles['source'] glyph = plot.handles['glyph'] self.assertEqual(glyph.line_color, 'black') - self.assertEqual(glyph.fill_color, {'field': 'color'}) + self.assertEqual(glyph.fill_color, {'field': 'fill_color'}) self.assertEqual(cds.data['color'], np.array(['green', 'red'])) def test_polygons_linear_color_op(self): @@ -374,7 +375,7 @@ def test_empty_contours_plot(self): source = plot.handles['source'] self.assertEqual(len(source.data['xs']), 0) self.assertEqual(len(source.data['ys']), 0) - self.assertEqual(len(source.data['Intensity']), 0) + self.assertEqual(len(source.data['line_color']), 0) def test_contours_color_op(self): contours = Contours([ @@ -384,7 +385,7 @@ def test_contours_color_op(self): plot = bokeh_renderer.get_plot(contours) cds = plot.handles['source'] glyph = plot.handles['glyph'] - self.assertEqual(glyph.line_color, {'field': 'color'}) + self.assertEqual(glyph.line_color, {'field': 'line_color'}) self.assertEqual(cds.data['color'], np.array(['green', 'red'])) def test_contours_linear_color_op(self): diff --git a/holoviews/tests/plotting/bokeh/testplot.py b/holoviews/tests/plotting/bokeh/test_plot.py similarity index 100% rename from holoviews/tests/plotting/bokeh/testplot.py rename to holoviews/tests/plotting/bokeh/test_plot.py diff --git a/holoviews/tests/plotting/bokeh/testpointplot.py b/holoviews/tests/plotting/bokeh/test_pointplot.py similarity index 90% rename from holoviews/tests/plotting/bokeh/testpointplot.py rename to holoviews/tests/plotting/bokeh/test_pointplot.py index b39dd28e98..a745258ca8 100644 --- a/holoviews/tests/plotting/bokeh/testpointplot.py +++ b/holoviews/tests/plotting/bokeh/test_pointplot.py @@ -9,7 +9,7 @@ from holoviews.element import Points from holoviews.streams import Stream -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer from ..utils import ParamLogStream try: @@ -21,25 +21,6 @@ class TestPointPlot(TestBokehPlot): - def test_points_colormapping(self): - points = Points(np.random.rand(10, 4), vdims=['a', 'b']).opts(plot=dict(color_index=3)) - self._test_colormapping(points, 3) - - def test_points_colormapping_with_nonselection(self): - opts = dict(plot=dict(color_index=3), - style=dict(nonselection_color='red')) - points = Points(np.random.rand(10, 4), vdims=['a', 'b']).opts(**opts) - self._test_colormapping(points, 3) - - def test_points_colormapping_categorical(self): - points = Points([(i, i*2, i*3, chr(65+i)) for i in range(10)], - vdims=['a', 'b']).opts(plot=dict(color_index='b')) - plot = bokeh_renderer.get_plot(points) - plot.initialize_plot() - cmapper = plot.handles['color_mapper'] - self.assertIsInstance(cmapper, CategoricalColorMapper) - self.assertEqual(cmapper.factors, list(points['b'])) - def test_points_color_selection_nonselection(self): opts = dict(color='green', selection_color='red', nonselection_color='blue') points = Points([(i, i*2, i*3, chr(65+i)) for i in range(10)], @@ -167,7 +148,7 @@ def test_points_no_single_item_legend(self): def test_points_non_numeric_size_warning(self): data = (np.arange(10), np.arange(10), list(map(chr, range(94,104)))) - points = Points(data, vdims=['z']).opts(plot=dict(size_index=2)) + points = Points(data, vdims=['z']).opts(size='z') with ParamLogStream() as log: bokeh_renderer.get_plot(points) log_msg = log.stream.read() @@ -265,7 +246,7 @@ def test_points_padding_logx(self): self.assertEqual(x_range.end, 3.3483695221017129) self.assertEqual(y_range.start, 0.8) self.assertEqual(y_range.end, 3.2) - + def test_points_padding_logy(self): points = Points([1, 2, 3]).options(padding=0.1, logy=True) plot = bokeh_renderer.get_plot(points) @@ -274,7 +255,7 @@ def test_points_padding_logy(self): self.assertEqual(x_range.end, 2.2) self.assertEqual(y_range.start, 0.89595845984076228) self.assertEqual(y_range.end, 3.3483695221017129) - + def test_points_padding_datetime_square(self): points = Points([(np.datetime64('2016-04-0%d' % i), i) for i in range(1, 4)]).options( padding=0.1 @@ -500,33 +481,3 @@ def test_op_ndoverlay_value(self): for subplot, glyph_type, marker in zip(plot.subplots.values(), [Scatter, Scatter], markers): self.assertIsInstance(subplot.handles['glyph'], glyph_type) self.assertEqual(subplot.handles['glyph'].marker, marker) - - def test_point_color_index_color_clash(self): - points = Points([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='color').options(color='color', color_index='color') - with ParamLogStream() as log: - bokeh_renderer.get_plot(points) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) - - def test_point_color_index_color_no_clash(self): - points = Points([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='color').options(fill_color='color', color_index='color') - plot = bokeh_renderer.get_plot(points) - glyph = plot.handles['glyph'] - cmapper = plot.handles['fill_color_color_mapper'] - cmapper2 = plot.handles['color_mapper'] - self.assertEqual(glyph.fill_color, {'field': 'fill_color', 'transform': cmapper}) - self.assertEqual(glyph.line_color, {'field': 'color', 'transform': cmapper2}) - - def test_point_size_index_size_clash(self): - points = Points([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='size').options(size='size', size_index='size') - with ParamLogStream() as log: - bokeh_renderer.get_plot(points) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'size' option " - "and declare a size_index; ignoring the size_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/bokeh/testquadmeshplot.py b/holoviews/tests/plotting/bokeh/test_quadmeshplot.py similarity index 70% rename from holoviews/tests/plotting/bokeh/testquadmeshplot.py rename to holoviews/tests/plotting/bokeh/test_quadmeshplot.py index 60015ffb14..1ee5707dd9 100644 --- a/holoviews/tests/plotting/bokeh/testquadmeshplot.py +++ b/holoviews/tests/plotting/bokeh/test_quadmeshplot.py @@ -2,7 +2,7 @@ from holoviews.element import QuadMesh, Image -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import ColorBar @@ -48,3 +48,23 @@ def test_quadmesh_inverted_coords(self): self.assertEqual(source.data['right'], np.array([0.5, 0.5, 0.5, 1.5, 1.5, 1.5, 2.5, 2.5, 2.5])) self.assertEqual(source.data['top'], np.array([0.5, 1.5, 2.5, 0.5, 1.5, 2.5, 0.5, 1.5, 2.5])) self.assertEqual(source.data['bottom'], np.array([-0.5, 0.5, 1.5, -0.5, 0.5, 1.5, -0.5, 0.5, 1.5])) + + def test_quadmesh_nodata(self): + xs = [0, 1, 2] + ys = [2, 1, 0] + data = np.array([[0,1,2], [3,4,5], [6,7,8]]) + flattened = np.array([6, 3, np.NaN, 7, 4, 1, 8, 5, 2]) + qmesh = QuadMesh((xs, ys, data)).opts(nodata=0) + plot = bokeh_renderer.get_plot(qmesh) + source = plot.handles['source'] + self.assertEqual(source.data['z'], flattened) + + def test_quadmesh_nodata_uint(self): + xs = [0, 1, 2] + ys = [2, 1, 0] + data = np.array([[0,1,2], [3,4,5], [6,7,8]], dtype='uint32') + flattened = np.array([6, 3, np.NaN, 7, 4, 1, 8, 5, 2]) + qmesh = QuadMesh((xs, ys, data)).opts(nodata=0) + plot = bokeh_renderer.get_plot(qmesh) + source = plot.handles['source'] + self.assertEqual(source.data['z'], flattened) \ No newline at end of file diff --git a/holoviews/tests/plotting/bokeh/testradialheatmap.py b/holoviews/tests/plotting/bokeh/test_radialheatmap.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testradialheatmap.py rename to holoviews/tests/plotting/bokeh/test_radialheatmap.py index 6f7e33623c..71e9afc3c8 100644 --- a/holoviews/tests/plotting/bokeh/testradialheatmap.py +++ b/holoviews/tests/plotting/bokeh/test_radialheatmap.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from itertools import product import numpy as np @@ -13,13 +11,13 @@ except: pass -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class BokehRadialHeatMapPlotTests(TestBokehPlot): def setUp(self): - super(BokehRadialHeatMapPlotTests, self).setUp() + super().setUp() # set up dummy data for convenient tests x = ["Seg {}".format(idx) for idx in range(2)] y = ["Ann {}".format(idx) for idx in range(2)] diff --git a/holoviews/tests/plotting/bokeh/testrasterplot.py b/holoviews/tests/plotting/bokeh/test_rasterplot.py similarity index 80% rename from holoviews/tests/plotting/bokeh/testrasterplot.py rename to holoviews/tests/plotting/bokeh/test_rasterplot.py index b04a624ffc..efeffb948d 100644 --- a/holoviews/tests/plotting/bokeh/testrasterplot.py +++ b/holoviews/tests/plotting/bokeh/test_rasterplot.py @@ -2,7 +2,7 @@ from holoviews.element import Raster, Image, RGB -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestRasterPlot(TestBokehPlot): @@ -21,6 +21,26 @@ def test_image_boolean_array(self): self.assertEqual(source.data['image'][0], np.array([[0, 1], [1, 0]])) + def test_nodata_array(self): + img = Image(np.array([[0, 1], [2, 0]])).opts(nodata=0) + plot = bokeh_renderer.get_plot(img) + cmapper = plot.handles['color_mapper'] + source = plot.handles['source'] + self.assertEqual(cmapper.low, 1) + self.assertEqual(cmapper.high, 2) + self.assertEqual(source.data['image'][0], + np.array([[2, np.NaN], [np.NaN, 1]])) + + def test_nodata_array_uint(self): + img = Image(np.array([[0, 1], [2, 0]], dtype='uint32')).opts(nodata=0) + plot = bokeh_renderer.get_plot(img) + cmapper = plot.handles['color_mapper'] + source = plot.handles['source'] + self.assertEqual(cmapper.low, 1) + self.assertEqual(cmapper.high, 2) + self.assertEqual(source.data['image'][0], + np.array([[2, np.NaN], [np.NaN, 1]])) + def test_raster_invert_axes(self): arr = np.array([[0, 1, 2], [3, 4, 5]]) raster = Raster(arr).opts(plot=dict(invert_axes=True)) diff --git a/holoviews/tests/plotting/bokeh/testrenderer.py b/holoviews/tests/plotting/bokeh/test_renderer.py similarity index 99% rename from holoviews/tests/plotting/bokeh/testrenderer.py rename to holoviews/tests/plotting/bokeh/test_renderer.py index 463efd9260..b1951b615b 100644 --- a/holoviews/tests/plotting/bokeh/testrenderer.py +++ b/holoviews/tests/plotting/bokeh/test_renderer.py @@ -1,5 +1,3 @@ -from __future__ import unicode_literals - from collections import OrderedDict from io import BytesIO from unittest import SkipTest diff --git a/holoviews/tests/plotting/bokeh/testsankey.py b/holoviews/tests/plotting/bokeh/test_sankey.py similarity index 94% rename from holoviews/tests/plotting/bokeh/testsankey.py rename to holoviews/tests/plotting/bokeh/test_sankey.py index 68b863ec51..db3c6bd4c8 100644 --- a/holoviews/tests/plotting/bokeh/testsankey.py +++ b/holoviews/tests/plotting/bokeh/test_sankey.py @@ -1,11 +1,10 @@ -from __future__ import absolute_import - import numpy as np + from holoviews.core.data import Dataset from holoviews.element import Sankey -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestSankeyPlot(TestBokehPlot): @@ -14,7 +13,7 @@ def test_sankey_simple(self): ('A', 'X', 5), ('A', 'Y', 7), ('A', 'Z', 6), ('B', 'X', 2), ('B', 'Y', 9), ('B', 'Z', 4)] ) - plot = list(bokeh_renderer.get_plot(sankey).subplots.values())[0] + plot = bokeh_renderer.get_plot(sankey) scatter_source = plot.handles['scatter_1_source'] quad_source = plot.handles['quad_1_source'] text_source = plot.handles['text_1_source'] @@ -55,7 +54,7 @@ def test_sankey_label_index(self): (0, 2, 5), (0, 3, 7), (0, 4, 6), (1, 2, 2), (1, 3, 9), (1, 4, 4)], Dataset(enumerate('ABXYZ'), 'index', 'label')) - ).options(label_index='label', tools=['hover']) + ).options(labels='label', tools=['hover']) plot = list(bokeh_renderer.get_plot(sankey).subplots.values())[0] scatter_source = plot.handles['scatter_1_source'] diff --git a/holoviews/tests/plotting/bokeh/testserver.py b/holoviews/tests/plotting/bokeh/test_server.py similarity index 100% rename from holoviews/tests/plotting/bokeh/testserver.py rename to holoviews/tests/plotting/bokeh/test_server.py diff --git a/holoviews/tests/plotting/bokeh/testspikesplot.py b/holoviews/tests/plotting/bokeh/test_spikesplot.py similarity index 92% rename from holoviews/tests/plotting/bokeh/testspikesplot.py rename to holoviews/tests/plotting/bokeh/test_spikesplot.py index 991d562dbf..4820b5b9a1 100644 --- a/holoviews/tests/plotting/bokeh/testspikesplot.py +++ b/holoviews/tests/plotting/bokeh/test_spikesplot.py @@ -6,17 +6,11 @@ from bokeh.models import CategoricalColorMapper, LinearColorMapper -from ..utils import ParamLogStream -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestSpikesPlot(TestBokehPlot): - def test_spikes_colormapping(self): - spikes = Spikes(np.random.rand(20, 2), vdims=['Intensity']) - color_spikes = spikes.opts(plot=dict(color_index=1)) - self._test_colormapping(color_spikes, 1) - def test_empty_spikes_plot(self): spikes = Spikes([], vdims=['Intensity']) plot = bokeh_renderer.get_plot(spikes) @@ -145,7 +139,7 @@ def test_spikes_datetime_kdim_hover_spike_length_override(self): hover = plot.handles['hover'] self.assertEqual(hover.tooltips, [('x', '@{x}{%F %T}'), ('y', '@{y}')]) self.assertEqual(hover.formatters, {'@{x}': "datetime"}) - + ########################### # Styling mapping # ########################### @@ -226,13 +220,3 @@ def test_op_ndoverlay_value(self): plot = bokeh_renderer.get_plot(overlay) for subplot, color in zip(plot.subplots.values(), colors): self.assertEqual(subplot.handles['glyph'].line_color, color) - - def test_spikes_color_index_color_clash(self): - spikes = Spikes([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims=['y', 'color']).options(color='color', color_index='color') - with ParamLogStream() as log: - bokeh_renderer.get_plot(spikes) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/bokeh/testspreadplot.py b/holoviews/tests/plotting/bokeh/test_spreadplot.py similarity index 98% rename from holoviews/tests/plotting/bokeh/testspreadplot.py rename to holoviews/tests/plotting/bokeh/test_spreadplot.py index 91c0b15c1c..7cd40a0bd6 100644 --- a/holoviews/tests/plotting/bokeh/testspreadplot.py +++ b/holoviews/tests/plotting/bokeh/test_spreadplot.py @@ -4,7 +4,7 @@ from holoviews.element import Spread from holoviews.streams import Buffer -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestSpreadPlot(TestBokehPlot): @@ -79,7 +79,7 @@ def test_spread_padding_logx(self): self.assertEqual(x_range.end, 3.3483695221017129) self.assertEqual(y_range.start, 0.19999999999999996) self.assertEqual(y_range.end, 3.8) - + def test_spread_padding_logy(self): spread = Spread([(1, 1, 0.5), (2, 2, 0.5), (3, 3, 0.5)]).options(padding=0.1, logy=True) plot = bokeh_renderer.get_plot(spread) diff --git a/holoviews/tests/plotting/bokeh/teststreaming.py b/holoviews/tests/plotting/bokeh/test_streaming.py similarity index 91% rename from holoviews/tests/plotting/bokeh/teststreaming.py rename to holoviews/tests/plotting/bokeh/test_streaming.py index b74ab2ecd8..284dcd3801 100644 --- a/holoviews/tests/plotting/bokeh/teststreaming.py +++ b/holoviews/tests/plotting/bokeh/test_streaming.py @@ -1,10 +1,10 @@ import numpy as np from holoviews.core import DynamicMap -from holoviews.element import Curve +from holoviews.element import Curve from holoviews.streams import Buffer -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer class TestBufferStreamPlot(TestBokehPlot): @@ -12,7 +12,7 @@ class TestBufferStreamPlot(TestBokehPlot): def test_buffer_stream_following(self): stream = Buffer(data={'x': np.array([1]), 'y': np.array([1])}, following=True) dmap = DynamicMap(Curve, streams=[stream]) - + plot = bokeh_renderer.get_plot(dmap) x_range = plot.handles['x_range'] diff --git a/holoviews/tests/plotting/bokeh/testtabular.py b/holoviews/tests/plotting/bokeh/test_tabular.py similarity index 100% rename from holoviews/tests/plotting/bokeh/testtabular.py rename to holoviews/tests/plotting/bokeh/test_tabular.py diff --git a/holoviews/tests/plotting/bokeh/testutils.py b/holoviews/tests/plotting/bokeh/test_utils.py similarity index 100% rename from holoviews/tests/plotting/bokeh/testutils.py rename to holoviews/tests/plotting/bokeh/test_utils.py diff --git a/holoviews/tests/plotting/bokeh/testvectorfieldplot.py b/holoviews/tests/plotting/bokeh/test_vectorfieldplot.py similarity index 84% rename from holoviews/tests/plotting/bokeh/testvectorfieldplot.py rename to holoviews/tests/plotting/bokeh/test_vectorfieldplot.py index 6c8d3b2fad..ef35f6bf58 100644 --- a/holoviews/tests/plotting/bokeh/testvectorfieldplot.py +++ b/holoviews/tests/plotting/bokeh/test_vectorfieldplot.py @@ -2,8 +2,7 @@ from holoviews.element import VectorField -from .testplot import TestBokehPlot, bokeh_renderer -from ..utils import ParamLogStream +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import LinearColorMapper, CategoricalColorMapper @@ -69,13 +68,3 @@ def test_vectorfield_line_width_op(self): glyph = plot.handles['glyph'] self.assertEqual(cds.data['line_width'], np.array([1, 4, 8, 1, 4, 8, 1, 4, 8])) self.assertEqual(glyph.line_width, {'field': 'line_width'}) - - def test_vectorfield_color_index_color_clash(self): - vectorfield = VectorField([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='color').options(line_color='color', color_index='color') - with ParamLogStream() as log: - bokeh_renderer.get_plot(vectorfield) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'line_color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/bokeh/testviolinplot.py b/holoviews/tests/plotting/bokeh/test_violinplot.py similarity index 99% rename from holoviews/tests/plotting/bokeh/testviolinplot.py rename to holoviews/tests/plotting/bokeh/test_violinplot.py index d195f552ff..0ae24c7566 100644 --- a/holoviews/tests/plotting/bokeh/testviolinplot.py +++ b/holoviews/tests/plotting/bokeh/test_violinplot.py @@ -8,7 +8,7 @@ from holoviews.operation.stats import univariate_kde from holoviews.util.transform import dim -from .testplot import TestBokehPlot, bokeh_renderer +from .test_plot import TestBokehPlot, bokeh_renderer try: from bokeh.models import LinearColorMapper, CategoricalColorMapper @@ -23,7 +23,7 @@ def setUp(self): import scipy # noqa except: raise SkipTest('Violin plot requires SciPy to compute kde') - super(TestBokehViolinPlot, self).setUp() + super().setUp() def test_violin_simple(self): values = np.random.rand(100) diff --git a/holoviews/tests/plotting/matplotlib/testareaplot.py b/holoviews/tests/plotting/matplotlib/test_areaplot.py similarity index 98% rename from holoviews/tests/plotting/matplotlib/testareaplot.py rename to holoviews/tests/plotting/matplotlib/test_areaplot.py index e7d44f802f..89fd6f2e69 100644 --- a/holoviews/tests/plotting/matplotlib/testareaplot.py +++ b/holoviews/tests/plotting/matplotlib/test_areaplot.py @@ -1,7 +1,7 @@ from holoviews.element import Area from ...utils import LoggingComparisonTestCase -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestAreaPlot(LoggingComparisonTestCase, TestMPLPlot): @@ -50,7 +50,7 @@ def test_area_padding_mixed(self): self.assertEqual(x_range[1], 3.2) self.assertEqual(y_range[0], -2.5) self.assertEqual(y_range[1], 3.5) - + def test_area_padding_hard_range(self): area = Area([(1, 1), (2, 2), (3, 3)]).redim.range(y=(0, 4)).options(padding=0.1) plot = mpl_renderer.get_plot(area) @@ -86,7 +86,7 @@ def test_area_padding_logx(self): self.assertEqual(x_range[1], 3.3483695221017129) self.assertEqual(y_range[0], 0) self.assertEqual(y_range[1], 3.2) - + def test_area_padding_logy(self): area = Area([(1, 1), (2, 2), (3, 3)]).options(padding=0.1, logy=True) plot = mpl_renderer.get_plot(area) diff --git a/holoviews/tests/plotting/matplotlib/testboxwhisker.py b/holoviews/tests/plotting/matplotlib/test_boxwhisker.py similarity index 92% rename from holoviews/tests/plotting/matplotlib/testboxwhisker.py rename to holoviews/tests/plotting/matplotlib/test_boxwhisker.py index 9e34cd93a7..1da9dd2410 100644 --- a/holoviews/tests/plotting/matplotlib/testboxwhisker.py +++ b/holoviews/tests/plotting/matplotlib/test_boxwhisker.py @@ -1,10 +1,8 @@ -from __future__ import absolute_import - import numpy as np from holoviews.element import BoxWhisker -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestMPLBoxWhiskerPlot(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testcallbacks.py b/holoviews/tests/plotting/matplotlib/test_callbacks.py similarity index 96% rename from holoviews/tests/plotting/matplotlib/testcallbacks.py rename to holoviews/tests/plotting/matplotlib/test_callbacks.py index d04f7dd622..7214704aba 100644 --- a/holoviews/tests/plotting/matplotlib/testcallbacks.py +++ b/holoviews/tests/plotting/matplotlib/test_callbacks.py @@ -7,7 +7,7 @@ from holoviews.streams import PointerXY, PointerX -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestCallbackPlot(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testcurveplot.py b/holoviews/tests/plotting/matplotlib/test_curveplot.py similarity index 94% rename from holoviews/tests/plotting/matplotlib/testcurveplot.py rename to holoviews/tests/plotting/matplotlib/test_curveplot.py index 2a76860529..bcdc64c711 100644 --- a/holoviews/tests/plotting/matplotlib/testcurveplot.py +++ b/holoviews/tests/plotting/matplotlib/test_curveplot.py @@ -8,7 +8,7 @@ from holoviews.element import Curve from holoviews.util.transform import dim -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer pd_skip = skipIf(pd is None, 'Pandas is not available') @@ -19,20 +19,20 @@ def test_curve_datetime64(self): dates = [np.datetime64(dt.datetime(2016,1,i)) for i in range(1, 11)] curve = Curve((dates, np.random.rand(10))) plot = mpl_renderer.get_plot(curve) - self.assertEqual(plot.handles['axis'].get_xlim(), (735964.0, 735973.0)) + self.assertEqual(plot.handles['axis'].get_xlim(), (16801.0, 16810.0)) @pd_skip def test_curve_pandas_timestamps(self): dates = pd.date_range('2016-01-01', '2016-01-10', freq='D') curve = Curve((dates, np.random.rand(10))) plot = mpl_renderer.get_plot(curve) - self.assertEqual(plot.handles['axis'].get_xlim(), (735964.0, 735973.0)) + self.assertEqual(plot.handles['axis'].get_xlim(), (16801.0, 16810.0)) def test_curve_dt_datetime(self): dates = [dt.datetime(2016,1,i) for i in range(1, 11)] curve = Curve((dates, np.random.rand(10))) plot = mpl_renderer.get_plot(curve) - self.assertEqual(tuple(map(round, plot.handles['axis'].get_xlim())), (735964.0, 735973.0)) + self.assertEqual(tuple(map(round, plot.handles['axis'].get_xlim())), (16801.0, 16810.0)) def test_curve_heterogeneous_datetime_types_overlay(self): dates64 = [np.datetime64(dt.datetime(2016,1,i)) for i in range(1, 11)] @@ -40,7 +40,7 @@ def test_curve_heterogeneous_datetime_types_overlay(self): curve_dt64 = Curve((dates64, np.random.rand(10))) curve_dt = Curve((dates, np.random.rand(10))) plot = mpl_renderer.get_plot(curve_dt*curve_dt64) - self.assertEqual(tuple(map(round, plot.handles['axis'].get_xlim())), (735964.0, 735974.0)) + self.assertEqual(tuple(map(round, plot.handles['axis'].get_xlim())), (16801.0, 16811.0)) @pd_skip def test_curve_heterogeneous_datetime_types_with_pd_overlay(self): @@ -51,7 +51,7 @@ def test_curve_heterogeneous_datetime_types_with_pd_overlay(self): curve_dt = Curve((dates, np.random.rand(10))) curve_pd = Curve((dates_pd, np.random.rand(10))) plot = mpl_renderer.get_plot(curve_dt*curve_dt64*curve_pd) - self.assertEqual(plot.handles['axis'].get_xlim(), (735964.0, 735976.0)) + self.assertEqual(plot.handles['axis'].get_xlim(), (16801.0, 16813.0)) def test_curve_padding_square(self): curve = Curve([1, 2, 3]).options(padding=0.1) @@ -131,8 +131,8 @@ def test_curve_padding_datetime_square(self): ) plot = mpl_renderer.get_plot(curve) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.80000000005) - self.assertEqual(x_range[1], 736057.19999999995) + self.assertEqual(x_range[0], 16891.8) + self.assertEqual(x_range[1], 16894.2) self.assertEqual(y_range[0], 0.8) self.assertEqual(y_range[1], 3.2) @@ -142,8 +142,8 @@ def test_curve_padding_datetime_nonsquare(self): ) plot = mpl_renderer.get_plot(curve) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.90000000002) - self.assertEqual(x_range[1], 736057.09999999998) + self.assertEqual(x_range[0], 16891.9) + self.assertEqual(x_range[1], 16894.1) self.assertEqual(y_range[0], 0.8) self.assertEqual(y_range[1], 3.2) diff --git a/holoviews/tests/plotting/matplotlib/testelementplot.py b/holoviews/tests/plotting/matplotlib/test_elementplot.py similarity index 97% rename from holoviews/tests/plotting/matplotlib/testelementplot.py rename to holoviews/tests/plotting/matplotlib/test_elementplot.py index 7f7104b613..85f2a01e7d 100644 --- a/holoviews/tests/plotting/matplotlib/testelementplot.py +++ b/holoviews/tests/plotting/matplotlib/test_elementplot.py @@ -4,7 +4,7 @@ from holoviews.element import Image, Curve, Scatter, Scatter3D from holoviews.streams import Stream -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer try: from matplotlib.ticker import FormatStrFormatter, FuncFormatter, PercentFormatter @@ -186,7 +186,8 @@ def test_colormapper_min_max_colors(self): self.assertEqual(cmap._rgba_over, (0, 0, 1.0, 1)) def test_colorbar_label(self): - scatter = Scatter(np.random.rand(100, 3), vdims=["y", "color"]).options(color_index=2, colorbar=True) + scatter = Scatter(np.random.rand(100, 3), vdims=["y", "color"]).options( + color='color', colorbar=True) plot = mpl_renderer.get_plot(scatter) cbar_ax = plot.handles['cax'] self.assertEqual(cbar_ax.get_ylabel(), 'color') @@ -213,5 +214,5 @@ def test_overlay_legend_opts(self): ).options(legend_opts={'framealpha': 0.5, 'facecolor': 'red'}) plot = mpl_renderer.get_plot(overlay) legend_frame = plot.handles['legend'].get_frame() - self.assertEquals(legend_frame.get_alpha(), 0.5) - self.assertEquals(legend_frame.get_facecolor(), (1.0, 0.0, 0.0, 0.5)) + self.assertEqual(legend_frame.get_alpha(), 0.5) + self.assertEqual(legend_frame.get_facecolor(), (1.0, 0.0, 0.0, 0.5)) diff --git a/holoviews/tests/plotting/matplotlib/testerrorbarplot.py b/holoviews/tests/plotting/matplotlib/test_errorbarplot.py similarity index 98% rename from holoviews/tests/plotting/matplotlib/testerrorbarplot.py rename to holoviews/tests/plotting/matplotlib/test_errorbarplot.py index 7cf6f1caed..fe1bdafcb9 100644 --- a/holoviews/tests/plotting/matplotlib/testerrorbarplot.py +++ b/holoviews/tests/plotting/matplotlib/test_errorbarplot.py @@ -3,7 +3,7 @@ from holoviews.core.spaces import HoloMap from holoviews.element import ErrorBars -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestErrorBarPlot(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testgraphplot.py b/holoviews/tests/plotting/matplotlib/test_graphplot.py similarity index 89% rename from holoviews/tests/plotting/matplotlib/testgraphplot.py rename to holoviews/tests/plotting/matplotlib/test_graphplot.py index 52e090711f..e4614ee413 100644 --- a/holoviews/tests/plotting/matplotlib/testgraphplot.py +++ b/holoviews/tests/plotting/matplotlib/test_graphplot.py @@ -1,6 +1,5 @@ -from __future__ import absolute_import - import numpy as np + from holoviews.core.data import Dataset from holoviews.core.options import Cycle from holoviews.core.spaces import HoloMap @@ -13,13 +12,13 @@ except: pass -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestMplGraphPlot(TestMPLPlot): def setUp(self): - super(TestMplGraphPlot, self).setUp() + super().setUp() N = 8 self.nodes = circular_layout(np.arange(N, dtype=np.int32)) @@ -41,50 +40,6 @@ def test_plot_simple_graph(self): self.assertEqual([p.vertices for p in edges.get_paths()], [p.array() for p in self.graph.edgepaths.split()]) - def test_plot_graph_categorical_colored_nodes(self): - g = self.graph2.opts(plot=dict(color_index='Label'), style=dict(cmap='Set1')) - plot = mpl_renderer.get_plot(g) - nodes = plot.handles['nodes'] - facecolors = np.array([[0.89411765, 0.10196078, 0.10980392, 1.], - [0.6 , 0.6 , 0.6 , 1.], - [0.6 , 0.6 , 0.6 , 1.], - [0.6 , 0.6 , 0.6 , 1.], - [0.6 , 0.6 , 0.6 , 1.], - [0.6 , 0.6 , 0.6 , 1.], - [0.6 , 0.6 , 0.6 , 1.], - [0.6 , 0.6 , 0.6 , 1.]]) - self.assertEqual(nodes.get_facecolors(), facecolors) - - def test_plot_graph_numerically_colored_nodes(self): - g = self.graph3.opts(plot=dict(color_index='Weight'), style=dict(cmap='viridis')) - plot = mpl_renderer.get_plot(g) - nodes = plot.handles['nodes'] - self.assertEqual(np.asarray(nodes.get_array()), self.weights) - self.assertEqual(nodes.get_clim(), (self.weights.min(), self.weights.max())) - - def test_plot_graph_categorical_colored_edges(self): - g = self.graph3.opts(plot=dict(edge_color_index='start'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = mpl_renderer.get_plot(g) - edges = plot.handles['edges'] - colors = np.array([[1., 1., 1., 1.], - [0., 0., 0., 1.], - [1., 1., 1., 1.], - [0., 0., 0., 1.], - [1., 1., 1., 1.], - [0., 0., 0., 1.], - [1., 1., 1., 1.], - [0., 0., 0., 1.]]) - self.assertEqual(edges.get_colors(), colors) - - def test_plot_graph_numerically_colored_edges(self): - g = self.graph4.opts(plot=dict(edge_color_index='Weight'), - style=dict(edge_cmap=['#FFFFFF', '#000000'])) - plot = mpl_renderer.get_plot(g) - edges = plot.handles['edges'] - self.assertEqual(edges.get_array(), self.weights) - self.assertEqual(edges.get_clim(), (self.weights.min(), self.weights.max())) - ########################### # Styling mapping # ########################### @@ -200,11 +155,21 @@ def get_graph(i): self.assertEqual(artist.get_linewidths(), [12, 3, 5]) def test_graph_op_node_alpha(self): + import matplotlib as mpl + from packaging.version import Version + edges = [(0, 1), (0, 2)] nodes = Nodes([(0, 0, 0, 0.2), (0, 1, 1, 0.6), (1, 1, 2, 1)], vdims='alpha') graph = Graph((edges, nodes)).options(node_alpha='alpha') - with self.assertRaises(Exception): - mpl_renderer.get_plot(graph) + + if Version(mpl.__version__) < Version("3.4.0"): + # Python 3.6 only support up to matplotlib 3.3 + with self.assertRaises(Exception): + mpl_renderer.get_plot(graph) + else: + plot = mpl_renderer.get_plot(graph) + artist = plot.handles['nodes'] + self.assertEqual(artist.get_alpha(), np.array([0.2, 0.6, 1])) def test_graph_op_edge_color(self): edges = [(0, 1, 'red'), (0, 2, 'green'), (1, 3, 'blue')] @@ -294,7 +259,7 @@ def test_graph_op_edge_line_width_update(self): class TestMplTriMeshPlot(TestMPLPlot): def setUp(self): - super(TestMplTriMeshPlot, self).setUp() + super().setUp() self.nodes = [(0, 0, 0), (0.5, 1, 1), (1., 0, 2), (1.5, 1, 3)] self.simplices = [(0, 1, 2, 0), (1, 2, 3, 1)] @@ -385,11 +350,21 @@ def test_trimesh_op_node_size(self): self.assertEqual(artist.get_sizes(), np.array([9, 4, 64, 16])) def test_trimesh_op_node_alpha(self): + import matplotlib as mpl + from packaging.version import Version + edges = [(0, 1, 2), (1, 2, 3)] nodes = [(-1, -1, 0, 0.2), (0, 0, 1, 0.6), (0, 1, 2, 1), (1, 0, 3, 0.3)] trimesh = TriMesh((edges, Nodes(nodes, vdims='alpha'))).options(node_alpha='alpha') - with self.assertRaises(Exception): - mpl_renderer.get_plot(trimesh) + + if Version(mpl.__version__) < Version("3.4.0"): + # Python 3.6 only support up to matplotlib 3.3 + with self.assertRaises(Exception): + mpl_renderer.get_plot(trimesh) + else: + plot = mpl_renderer.get_plot(trimesh) + artist = plot.handles['nodes'] + self.assertEqual(artist.get_alpha(), np.array([0.2, 0.6, 1, 0.3])) def test_trimesh_op_node_line_width(self): edges = [(0, 1, 2), (1, 2, 3)] @@ -455,7 +430,7 @@ def test_trimesh_op_edge_line_width(self): class TestMplChordPlot(TestMPLPlot): def setUp(self): - super(TestMplChordPlot, self).setUp() + super().setUp() self.edges = [(0, 1, 1), (0, 2, 2), (1, 2, 3)] self.nodes = Dataset([(0, 'A'), (1, 'B'), (2, 'C')], 'index', 'Label') self.chord = Chord((self.edges, self.nodes)) @@ -528,7 +503,7 @@ def test_chord_edge_color_linear_style_mapping_update(self): def test_chord_node_color_linear_style_mapping_update(self): hmap = HoloMap({0: self.make_chord(0), 1: self.make_chord(1)}).options(node_color='Label', framewise=True) - plot = mpl_renderer.get_plot(hmap) + plot = mpl_renderer.get_plot(hmap) arcs = plot.handles['arcs'] nodes = plot.handles['nodes'] self.assertEqual(nodes.get_array(), np.array([0, 1, 2])) diff --git a/holoviews/tests/plotting/matplotlib/testheatmapplot.py b/holoviews/tests/plotting/matplotlib/test_heatmapplot.py similarity index 96% rename from holoviews/tests/plotting/matplotlib/testheatmapplot.py rename to holoviews/tests/plotting/matplotlib/test_heatmapplot.py index 840a801720..5f735be9b5 100644 --- a/holoviews/tests/plotting/matplotlib/testheatmapplot.py +++ b/holoviews/tests/plotting/matplotlib/test_heatmapplot.py @@ -2,7 +2,7 @@ from holoviews.element import HeatMap, Image -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestHeatMapPlot(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testhextilesplot.py b/holoviews/tests/plotting/matplotlib/test_hextilesplot.py similarity index 95% rename from holoviews/tests/plotting/matplotlib/testhextilesplot.py rename to holoviews/tests/plotting/matplotlib/test_hextilesplot.py index c7ed9add9f..32ba7f8b08 100644 --- a/holoviews/tests/plotting/matplotlib/testhextilesplot.py +++ b/holoviews/tests/plotting/matplotlib/test_hextilesplot.py @@ -3,7 +3,7 @@ from holoviews.element import HexTiles -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestHexTilesPlot(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testhistogramplot.py b/holoviews/tests/plotting/matplotlib/test_histogramplot.py similarity index 91% rename from holoviews/tests/plotting/matplotlib/testhistogramplot.py rename to holoviews/tests/plotting/matplotlib/test_histogramplot.py index 8cbf181a69..44a586bdb5 100644 --- a/holoviews/tests/plotting/matplotlib/testhistogramplot.py +++ b/holoviews/tests/plotting/matplotlib/test_histogramplot.py @@ -8,7 +8,7 @@ from holoviews.plotting.util import hex2rgb from ...utils import LoggingComparisonTestCase -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestHistogramPlot(LoggingComparisonTestCase, TestMPLPlot): @@ -19,8 +19,8 @@ def test_histogram_datetime64_plot(self): plot = mpl_renderer.get_plot(hist) artist = plot.handles['artist'] ax = plot.handles['axis'] - self.assertEqual(ax.get_xlim(), (736330.0, 736333.0)) - bounds = [736330.0, 736330.75, 736331.5, 736332.25] + self.assertEqual(ax.get_xlim(), (17167.0, 17170.0)) + bounds = [17167.0, 17167.75, 17168.5, 17169.25] self.assertEqual([p.get_x() for p in artist.patches], bounds) def test_histogram_padding_square(self): @@ -84,8 +84,8 @@ def test_histogram_padding_datetime_square(self): ) plot = mpl_renderer.get_plot(histogram) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.19999999995) - self.assertEqual(x_range[1], 736057.80000000005) + self.assertEqual(x_range[0], 16891.2) + self.assertEqual(x_range[1], 16894.8) self.assertEqual(y_range[0], 0) self.assertEqual(y_range[1], 3.2) @@ -95,8 +95,8 @@ def test_histogram_padding_datetime_nonsquare(self): ) plot = mpl_renderer.get_plot(histogram) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.34999999998) - self.assertEqual(x_range[1], 736057.65000000002) + self.assertEqual(x_range[0], 16891.35) + self.assertEqual(x_range[1], 16894.65) self.assertEqual(y_range[0], 0) self.assertEqual(y_range[1], 3.2) @@ -124,12 +124,16 @@ def test_histogram_categorical_color_op(self): vdims=['y', 'color']).options(color='color') with self.assertRaises(Exception): mpl_renderer.get_plot(histogram) - + def test_histogram_line_color_op(self): histogram = Histogram([(0, 0, '#000'), (0, 1, '#F00'), (0, 2, '#0F0')], vdims=['y', 'color']).options(edgecolor='color') - with self.assertRaises(Exception): - mpl_renderer.get_plot(histogram) + plot = mpl_renderer.get_plot(histogram) + artist = plot.handles['artist'] + children = artist.get_children() + self.assertEqual(children[0].get_edgecolor(), (0, 0, 0, 1)) + self.assertEqual(children[1].get_edgecolor(), (1, 0, 0, 1)) + self.assertEqual(children[2].get_edgecolor(), (0, 1, 0, 1)) def test_histogram_alpha_op(self): histogram = Histogram([(0, 0, 0), (0, 1, 0.2), (0, 2, 0.7)], diff --git a/holoviews/tests/plotting/matplotlib/testlabels.py b/holoviews/tests/plotting/matplotlib/test_labels.py similarity index 89% rename from holoviews/tests/plotting/matplotlib/testlabels.py rename to holoviews/tests/plotting/matplotlib/test_labels.py index 8776c4ca5f..fe8fabe2d5 100644 --- a/holoviews/tests/plotting/matplotlib/testlabels.py +++ b/holoviews/tests/plotting/matplotlib/test_labels.py @@ -5,7 +5,7 @@ from holoviews.element import Labels from holoviews.plotting.util import rgb2hex -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestLabelsPlot(TestMPLPlot): @@ -49,20 +49,6 @@ def test_labels_inverted(self): self.assertEqual(text._y, expected['x'][i]) self.assertEqual(text.get_text(), expected['Label'][i]) - def test_labels_color_mapped(self): - labels = Labels([(0, 1, 0.33333), (1, 0, 0.66666)]).options(color_index=2) - plot = mpl_renderer.get_plot(labels) - artist = plot.handles['artist'] - expected = {'x': np.array([0, 1]), 'y': np.array([1, 0]), - 'Label': ['0.33333', '0.66666']} - colors = [(0.26666666666666666, 0.0039215686274509803, 0.32941176470588235, 1.0), - (0.99215686274509807, 0.90588235294117647, 0.14117647058823529, 1.0)] - for i, text in enumerate(artist): - self.assertEqual(text._x, expected['x'][i]) - self.assertEqual(text._y, expected['y'][i]) - self.assertEqual(text.get_text(), expected['Label'][i]) - self.assertEqual(text.get_color(), colors[i]) - ########################### # Styling mapping # ########################### @@ -89,7 +75,7 @@ def test_label_color_op_update(self): artist = plot.handles['artist'] self.assertEqual([a.get_color() for a in artist], ['#FF0000', '#00FF00', '#0000FF']) - + def test_label_linear_color_op(self): labels = Labels([(0, 0, 0), (0, 1, 1), (0, 2, 2)], vdims='color').options(color='color') diff --git a/holoviews/tests/plotting/matplotlib/testlayoutplot.py b/holoviews/tests/plotting/matplotlib/test_layoutplot.py similarity index 98% rename from holoviews/tests/plotting/matplotlib/testlayoutplot.py rename to holoviews/tests/plotting/matplotlib/test_layoutplot.py index 1904150e42..5d1362b4c1 100644 --- a/holoviews/tests/plotting/matplotlib/testlayoutplot.py +++ b/holoviews/tests/plotting/matplotlib/test_layoutplot.py @@ -5,7 +5,7 @@ from holoviews.streams import Stream from ...utils import LoggingComparisonTestCase -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestLayoutPlot(LoggingComparisonTestCase, TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testoverlayplot.py b/holoviews/tests/plotting/matplotlib/test_overlayplot.py similarity index 98% rename from holoviews/tests/plotting/matplotlib/testoverlayplot.py rename to holoviews/tests/plotting/matplotlib/test_overlayplot.py index 01e127a461..9244b344fb 100644 --- a/holoviews/tests/plotting/matplotlib/testoverlayplot.py +++ b/holoviews/tests/plotting/matplotlib/test_overlayplot.py @@ -4,7 +4,7 @@ from holoviews.element import Curve, Scatter from ...utils import LoggingComparisonTestCase -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer try: from holoviews.plotting.mpl import OverlayPlot @@ -16,7 +16,7 @@ class TestOverlayPlot(LoggingComparisonTestCase, TestMPLPlot): def test_interleaved_overlay(self): """ - Test to avoid regression after fix of https://github.com/ioam/holoviews/issues/41 + Test to avoid regression after fix of https://github.com/holoviz/holoviews/issues/41 """ o = Overlay([Curve(np.array([[0, 1]])) , Scatter([[1,1]]) , Curve(np.array([[0, 1]]))]) OverlayPlot(o) diff --git a/holoviews/tests/plotting/matplotlib/testpathplot.py b/holoviews/tests/plotting/matplotlib/test_pathplot.py similarity index 96% rename from holoviews/tests/plotting/matplotlib/testpathplot.py rename to holoviews/tests/plotting/matplotlib/test_pathplot.py index 825ca3871a..babe14a6d7 100644 --- a/holoviews/tests/plotting/matplotlib/testpathplot.py +++ b/holoviews/tests/plotting/matplotlib/test_pathplot.py @@ -4,7 +4,7 @@ from holoviews.core.spaces import HoloMap from holoviews.element import Polygons, Contours, Path -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestPathPlot(TestMPLPlot): @@ -198,15 +198,6 @@ def test_polygons_line_width_op(self): class TestContoursPlot(TestMPLPlot): - def test_contours_categorical_color(self): - path = Contours([{('x', 'y'): np.random.rand(10, 2), 'z': cat} - for cat in ('B', 'A', 'B')], - vdims='z').opts(plot=dict(color_index='z')) - plot = mpl_renderer.get_plot(path) - artist = plot.handles['artist'] - self.assertEqual(artist.get_array(), np.array([0, 1, 0])) - self.assertEqual(artist.get_clim(), (0, 1)) - def test_contours_color_op(self): contours = Contours([ {('x', 'y'): [(0, 0), (0, 1), (1, 0)], 'color': 'green'}, diff --git a/holoviews/tests/plotting/matplotlib/testplot.py b/holoviews/tests/plotting/matplotlib/test_plot.py similarity index 100% rename from holoviews/tests/plotting/matplotlib/testplot.py rename to holoviews/tests/plotting/matplotlib/test_plot.py diff --git a/holoviews/tests/plotting/matplotlib/testpointplot.py b/holoviews/tests/plotting/matplotlib/test_pointplot.py similarity index 89% rename from holoviews/tests/plotting/matplotlib/testpointplot.py rename to holoviews/tests/plotting/matplotlib/test_pointplot.py index f8caf80c0c..711c8543c5 100644 --- a/holoviews/tests/plotting/matplotlib/testpointplot.py +++ b/holoviews/tests/plotting/matplotlib/test_pointplot.py @@ -4,7 +4,7 @@ from holoviews.core.spaces import HoloMap from holoviews.element import Points -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer from ..utils import ParamLogStream try: @@ -17,7 +17,7 @@ class TestPointPlot(TestMPLPlot): def test_points_non_numeric_size_warning(self): data = (np.arange(10), np.arange(10), list(map(chr, range(94,104)))) - points = Points(data, vdims=['z']).opts(plot=dict(size_index=2)) + points = Points(data, vdims=['z']).opts(plot=dict(size='z')) with ParamLogStream() as log: mpl_renderer.get_plot(points) log_msg = log.stream.read() @@ -27,22 +27,22 @@ def test_points_non_numeric_size_warning(self): def test_points_cbar_extend_both(self): img = Points(([0, 1], [0, 3])).redim(y=dict(range=(1,2))) - plot = mpl_renderer.get_plot(img.opts(colorbar=True, color_index=1)) + plot = mpl_renderer.get_plot(img.opts(colorbar=True, color='y')) self.assertEqual(plot.handles['cbar'].extend, 'both') def test_points_cbar_extend_min(self): img = Points(([0, 1], [0, 3])).redim(y=dict(range=(1, None))) - plot = mpl_renderer.get_plot(img.opts(colorbar=True, color_index=1)) + plot = mpl_renderer.get_plot(img.opts(colorbar=True, color='y')) self.assertEqual(plot.handles['cbar'].extend, 'min') def test_points_cbar_extend_max(self): img = Points(([0, 1], [0, 3])).redim(y=dict(range=(None, 2))) - plot = mpl_renderer.get_plot(img.opts(colorbar=True, color_index=1)) + plot = mpl_renderer.get_plot(img.opts(colorbar=True, color='y')) self.assertEqual(plot.handles['cbar'].extend, 'max') - def test_points_cbar_extend_clime(self): - img = Points(([0, 1], [0, 3])).opts(style=dict(clim=(None, None))) - plot = mpl_renderer.get_plot(img.opts(colorbar=True, color_index=1)) + def test_points_cbar_extend_clim(self): + img = Points(([0, 1], [0, 3])).opts(colorbar=True, color='y', clim=(None, None)) + plot = mpl_renderer.get_plot(img) self.assertEqual(plot.handles['cbar'].extend, 'neither') def test_points_rcparams_do_not_persist(self): @@ -137,8 +137,8 @@ def test_points_padding_datetime_square(self): ) plot = mpl_renderer.get_plot(points) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.80000000005) - self.assertEqual(x_range[1], 736057.19999999995) + self.assertEqual(x_range[0], 16891.8) + self.assertEqual(x_range[1], 16894.2) self.assertEqual(y_range[0], 0.8) self.assertEqual(y_range[1], 3.2) @@ -148,8 +148,8 @@ def test_points_padding_datetime_nonsquare(self): ) plot = mpl_renderer.get_plot(points) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.90000000002) - self.assertEqual(x_range[1], 736057.09999999998) + self.assertEqual(x_range[0], 16891.9) + self.assertEqual(x_range[1], 16894.1) self.assertEqual(y_range[0], 0.8) self.assertEqual(y_range[1], 3.2) @@ -298,23 +298,3 @@ def test_op_ndoverlay_value(self): style = dict(subplot.style[subplot.cyclic_index]) style = subplot._apply_transforms(subplot.current_frame, {}, style) self.assertEqual(style['marker'], marker) - - def test_point_color_index_color_clash(self): - points = Points([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='color').options(color='color', color_index='color') - with ParamLogStream() as log: - mpl_renderer.get_plot(points) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) - - def test_point_size_index_size_clash(self): - points = Points([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims='size').options(s='size', size_index='size') - with ParamLogStream() as log: - mpl_renderer.get_plot(points) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 's' option " - "and declare a size_index; ignoring the size_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/matplotlib/testquadmeshplot.py b/holoviews/tests/plotting/matplotlib/test_quadmeshplot.py similarity index 59% rename from holoviews/tests/plotting/matplotlib/testquadmeshplot.py rename to holoviews/tests/plotting/matplotlib/test_quadmeshplot.py index b93cd466f7..3481c77b28 100644 --- a/holoviews/tests/plotting/matplotlib/testquadmeshplot.py +++ b/holoviews/tests/plotting/matplotlib/test_quadmeshplot.py @@ -2,7 +2,7 @@ from holoviews.element import QuadMesh, Image, Dataset -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestQuadMeshPlot(TestMPLPlot): @@ -14,6 +14,22 @@ def test_quadmesh_invert_axes(self): artist = plot.handles['artist'] self.assertEqual(artist.get_array().data, arr.T[:, ::-1].flatten()) + def test_quadmesh_nodata(self): + arr = np.array([[0, 1, 2], [3, 4, 5]]) + qmesh = QuadMesh(Image(arr)).opts(nodata=0) + plot = mpl_renderer.get_plot(qmesh) + artist = plot.handles['artist'] + self.assertEqual(artist.get_array().data, + np.array([3, 4, 5, np.NaN, 1, 2])) + + def test_quadmesh_nodata_uint(self): + arr = np.array([[0, 1, 2], [3, 4, 5]], dtype='uint32') + qmesh = QuadMesh(Image(arr)).opts(nodata=0) + plot = mpl_renderer.get_plot(qmesh) + artist = plot.handles['artist'] + self.assertEqual(artist.get_array().data, + np.array([3, 4, 5, np.NaN, 1, 2])) + def test_quadmesh_update_cbar(self): xs = ys = np.linspace(0, 6, 10) zs = np.linspace(1, 2, 5) diff --git a/holoviews/tests/plotting/matplotlib/testradialheatmap.py b/holoviews/tests/plotting/matplotlib/test_radialheatmap.py similarity index 96% rename from holoviews/tests/plotting/matplotlib/testradialheatmap.py rename to holoviews/tests/plotting/matplotlib/test_radialheatmap.py index b7949d70e4..221f47b24f 100644 --- a/holoviews/tests/plotting/matplotlib/testradialheatmap.py +++ b/holoviews/tests/plotting/matplotlib/test_radialheatmap.py @@ -10,13 +10,13 @@ except: pass -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class RadialHeatMapPlotTests(TestMPLPlot): def setUp(self): - super(RadialHeatMapPlotTests, self).setUp() + super().setUp() # set up dummy data for convenient tests x = ["Seg {}".format(idx) for idx in range(2)] diff --git a/holoviews/tests/plotting/matplotlib/testrasterplot.py b/holoviews/tests/plotting/matplotlib/test_rasterplot.py similarity index 73% rename from holoviews/tests/plotting/matplotlib/testrasterplot.py rename to holoviews/tests/plotting/matplotlib/test_rasterplot.py index f7882465b9..9db1729c84 100644 --- a/holoviews/tests/plotting/matplotlib/testrasterplot.py +++ b/holoviews/tests/plotting/matplotlib/test_rasterplot.py @@ -2,7 +2,7 @@ from holoviews.element import Raster, Image -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer try: from matplotlib.colors import ListedColormap @@ -20,6 +20,27 @@ def test_raster_invert_axes(self): self.assertEqual(artist.get_array().data, arr.T[::-1]) self.assertEqual(artist.get_extent(), [0, 2, 0, 3]) + def test_raster_nodata(self): + arr = np.array([[0, 1, 2], [3, 4, 5]]) + expected = np.array([[3, 4, 5], + [np.NaN, 1, 2]]) + + raster = Raster(arr).opts(nodata=0) + plot = mpl_renderer.get_plot(raster) + artist = plot.handles['artist'] + self.assertEqual(artist.get_array().data, expected) + + def test_raster_nodata_uint(self): + arr = np.array([[0, 1, 2], [3, 4, 5]], dtype='uint32') + expected = np.array([[3, 4, 5], + [np.NaN, 1, 2]]) + + raster = Raster(arr).opts(nodata=0) + plot = mpl_renderer.get_plot(raster) + artist = plot.handles['artist'] + self.assertEqual(artist.get_array().data, expected) + + def test_image_invert_axes(self): arr = np.array([[0, 1, 2], [3, 4, 5]]) raster = Image(arr).opts(invert_axes=True) diff --git a/holoviews/tests/plotting/matplotlib/testrenderer.py b/holoviews/tests/plotting/matplotlib/test_renderer.py similarity index 96% rename from holoviews/tests/plotting/matplotlib/testrenderer.py rename to holoviews/tests/plotting/matplotlib/test_renderer.py index 30bfd3b2f0..6b10384e71 100644 --- a/holoviews/tests/plotting/matplotlib/testrenderer.py +++ b/holoviews/tests/plotting/matplotlib/test_renderer.py @@ -2,10 +2,6 @@ """ Test cases for rendering exporters """ -from __future__ import unicode_literals - -import os -import sys import subprocess from collections import OrderedDict @@ -68,12 +64,12 @@ def test_get_size_single_plot(self): def test_get_size_row_plot(self): plot = self.renderer.get_plot(self.image1+self.image2) w, h = self.renderer.get_size(plot) - self.assertEqual((w, h), (576, 255)) + self.assertEqual((w, h), (576, 257)) def test_get_size_column_plot(self): plot = self.renderer.get_plot((self.image1+self.image2).cols(1)) w, h = self.renderer.get_size(plot) - self.assertEqual((w, h), (288, 505)) + self.assertEqual((w, h), (288, 509)) def test_get_size_grid_plot(self): grid = GridSpace({(i, j): self.image1 for i in range(3) for j in range(3)}) @@ -92,10 +88,7 @@ def test_render_gif(self): self.assertIn(" 2: - devnull = subprocess.DEVNULL - else: - devnull = open(os.devnull, 'w') + devnull = subprocess.DEVNULL try: subprocess.call(['ffmpeg', '-h'], stdout=devnull, stderr=devnull) except: diff --git a/holoviews/tests/plotting/matplotlib/testsankey.py b/holoviews/tests/plotting/matplotlib/test_sankey.py similarity index 92% rename from holoviews/tests/plotting/matplotlib/testsankey.py rename to holoviews/tests/plotting/matplotlib/test_sankey.py index 894f4a6bf2..1200c4f939 100644 --- a/holoviews/tests/plotting/matplotlib/testsankey.py +++ b/holoviews/tests/plotting/matplotlib/test_sankey.py @@ -3,7 +3,7 @@ from holoviews.core.data import Dataset from holoviews.element import Sankey -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestSankeyPlot(TestMPLPlot): @@ -13,7 +13,7 @@ def test_sankey_simple(self): ('A', 'X', 5), ('A', 'Y', 7), ('A', 'Z', 6), ('B', 'X', 2), ('B', 'Y', 9), ('B', 'Z', 4)] ) - plot = list(mpl_renderer.get_plot(sankey).subplots.values())[0] + plot = mpl_renderer.get_plot(sankey) rects = plot.handles['rects'] labels = plot.handles['labels'] @@ -45,7 +45,7 @@ def test_sankey_label_index(self): (1, 2, 2), (1, 3, 9), (1, 4, 4)], Dataset(enumerate('ABXYZ'), 'index', 'label')) ).options(label_index='label') - plot = list(mpl_renderer.get_plot(sankey).subplots.values())[0] + plot = mpl_renderer.get_plot(sankey) labels = plot.handles['labels'] text_data = {'x': np.array([18.75, 18.75, 1003.75, 1003.75, 1003.75]), diff --git a/holoviews/tests/plotting/matplotlib/testscatter3d.py b/holoviews/tests/plotting/matplotlib/test_scatter3d.py similarity index 98% rename from holoviews/tests/plotting/matplotlib/testscatter3d.py rename to holoviews/tests/plotting/matplotlib/test_scatter3d.py index ae9b28288c..7997011624 100644 --- a/holoviews/tests/plotting/matplotlib/testscatter3d.py +++ b/holoviews/tests/plotting/matplotlib/test_scatter3d.py @@ -1,6 +1,6 @@ from holoviews.element import Scatter3D -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestPointPlot(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testspikeplot.py b/holoviews/tests/plotting/matplotlib/test_spikeplot.py similarity index 89% rename from holoviews/tests/plotting/matplotlib/testspikeplot.py rename to holoviews/tests/plotting/matplotlib/test_spikeplot.py index fc9f06a4ea..08f719d3bf 100644 --- a/holoviews/tests/plotting/matplotlib/testspikeplot.py +++ b/holoviews/tests/plotting/matplotlib/test_spikeplot.py @@ -4,8 +4,7 @@ from holoviews.core.spaces import HoloMap from holoviews.element import Spikes -from ..utils import ParamLogStream -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestSpikesPlot(TestMPLPlot): @@ -67,8 +66,8 @@ def test_spikes_padding_datetime_square(self): ) plot = mpl_renderer.get_plot(spikes) x_range = plot.handles['axis'].get_xlim() - self.assertEqual(x_range[0], 736054.80000000005) - self.assertEqual(x_range[1], 736057.19999999995) + self.assertEqual(x_range[0], 16891.8) + self.assertEqual(x_range[1], 16894.2) def test_spikes_padding_datetime_square_heights(self): spikes = Spikes([(np.datetime64('2016-04-0%d' % i), i) for i in range(1, 4)], vdims=['Height']).options( @@ -76,8 +75,8 @@ def test_spikes_padding_datetime_square_heights(self): ) plot = mpl_renderer.get_plot(spikes) x_range, y_range = plot.handles['axis'].get_xlim(), plot.handles['axis'].get_ylim() - self.assertEqual(x_range[0], 736054.80000000005) - self.assertEqual(x_range[1], 736057.19999999995) + self.assertEqual(x_range[0], 16891.8) + self.assertEqual(x_range[1], 16894.2) self.assertEqual(y_range[0], 0) self.assertEqual(y_range[1], 3.2) @@ -87,8 +86,8 @@ def test_spikes_padding_datetime_nonsquare(self): ) plot = mpl_renderer.get_plot(spikes) x_range = plot.handles['axis'].get_xlim() - self.assertEqual(x_range[0], 736054.90000000002) - self.assertEqual(x_range[1], 736057.09999999998) + self.assertEqual(x_range[0], 16891.9) + self.assertEqual(x_range[1], 16894.1) ########################### # Styling mapping # @@ -118,7 +117,7 @@ def test_spikes_color_op_update(self): self.assertEqual(artist.get_edgecolors(), np.array([ [1, 0, 0, 1], [0, 1, 0, 1], [0, 0, 1, 1]] )) - + def test_spikes_linear_color_op(self): spikes = Spikes([(0, 0, 0), (0, 1, 1), (0, 2, 2)], vdims=['y', 'color']).options(color='color') @@ -187,14 +186,3 @@ def test_op_ndoverlay_value(self): children = subplot.handles['artist'].get_children() for c in children: self.assertEqual(c.get_facecolor(), color) - - def test_spikes_color_index_color_clash(self): - spikes = Spikes([(0, 0, 0), (0, 1, 1), (0, 2, 2)], - vdims=['y', 'color']).options(color='color', color_index='color') - with ParamLogStream() as log: - mpl_renderer.get_plot(spikes) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) - diff --git a/holoviews/tests/plotting/matplotlib/testutils.py b/holoviews/tests/plotting/matplotlib/test_utils.py similarity index 93% rename from holoviews/tests/plotting/matplotlib/testutils.py rename to holoviews/tests/plotting/matplotlib/test_utils.py index 312d87d1c7..5357ac97de 100644 --- a/holoviews/tests/plotting/matplotlib/testutils.py +++ b/holoviews/tests/plotting/matplotlib/test_utils.py @@ -1,11 +1,9 @@ -from __future__ import absolute_import, unicode_literals - import numpy as np from holoviews.element import Polygons from holoviews.plotting.mpl.util import polygons_to_path_patches -from .testplot import TestMPLPlot +from .test_plot import TestMPLPlot class TestUtils(TestMPLPlot): diff --git a/holoviews/tests/plotting/matplotlib/testvectorfieldplot.py b/holoviews/tests/plotting/matplotlib/test_vectorfieldplot.py similarity index 86% rename from holoviews/tests/plotting/matplotlib/testvectorfieldplot.py rename to holoviews/tests/plotting/matplotlib/test_vectorfieldplot.py index 47f8756854..fa57df8c0b 100644 --- a/holoviews/tests/plotting/matplotlib/testvectorfieldplot.py +++ b/holoviews/tests/plotting/matplotlib/test_vectorfieldplot.py @@ -3,8 +3,7 @@ from holoviews.core.spaces import HoloMap from holoviews.element import VectorField -from .testplot import TestMPLPlot, mpl_renderer -from ..utils import ParamLogStream +from .test_plot import TestMPLPlot, mpl_renderer class TestVectorFieldPlot(TestMPLPlot): @@ -90,13 +89,3 @@ def test_vectorfield_line_width_op_update(self): self.assertEqual(artist.get_linewidths(), [1, 4, 8]) plot.update((1,)) self.assertEqual(artist.get_linewidths(), [3, 2, 5]) - - def test_vectorfield_color_index_color_clash(self): - vectorfield = VectorField([(0, 0, 0, 1, 0), (0, 1, 0, 1, 1), (0, 2, 0, 1, 2)], - vdims=['A', 'M', 'color']).options(color='color', color_index='A') - with ParamLogStream() as log: - mpl_renderer.get_plot(vectorfield) - log_msg = log.stream.read() - warning = ("Cannot declare style mapping for 'color' option " - "and declare a color_index; ignoring the color_index.\n") - self.assertEqual(log_msg, warning) diff --git a/holoviews/tests/plotting/matplotlib/testviolinplot.py b/holoviews/tests/plotting/matplotlib/test_violinplot.py similarity index 93% rename from holoviews/tests/plotting/matplotlib/testviolinplot.py rename to holoviews/tests/plotting/matplotlib/test_violinplot.py index 69a9c2baa3..8a1addf252 100644 --- a/holoviews/tests/plotting/matplotlib/testviolinplot.py +++ b/holoviews/tests/plotting/matplotlib/test_violinplot.py @@ -1,10 +1,8 @@ -from __future__ import absolute_import - import numpy as np from holoviews.element import Violin -from .testplot import TestMPLPlot, mpl_renderer +from .test_plot import TestMPLPlot, mpl_renderer class TestMPLViolinPlot(TestMPLPlot): @@ -27,7 +25,7 @@ def test_violin_simple_overlay(self): p2.handles['boxes'][0].get_path().vertices) for b1, b2 in zip(p1.handles['bodies'][0].get_paths(), p2.handles['bodies'][0].get_paths()): self.assertEqual(b1.vertices, b2.vertices) - + def test_violin_multi(self): violin = Violin((np.random.randint(0, 2, 100), np.random.rand(100)), kdims=['A']).sort() r1, r2 = violin.range(1) diff --git a/holoviews/tests/plotting/plotly/testareaplot.py b/holoviews/tests/plotting/plotly/test_areaplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testareaplot.py rename to holoviews/tests/plotting/plotly/test_areaplot.py index 9ca600b18b..cdc200b8da 100644 --- a/holoviews/tests/plotting/plotly/testareaplot.py +++ b/holoviews/tests/plotting/plotly/test_areaplot.py @@ -2,7 +2,7 @@ from holoviews.element import Area -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestAreaPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testbarplot.py b/holoviews/tests/plotting/plotly/test_barplot.py similarity index 99% rename from holoviews/tests/plotting/plotly/testbarplot.py rename to holoviews/tests/plotting/plotly/test_barplot.py index 724513ee44..5661fbd610 100644 --- a/holoviews/tests/plotting/plotly/testbarplot.py +++ b/holoviews/tests/plotting/plotly/test_barplot.py @@ -2,7 +2,7 @@ from holoviews.element import Bars -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestBarsPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testbivariateplot.py b/holoviews/tests/plotting/plotly/test_bivariateplot.py similarity index 97% rename from holoviews/tests/plotting/plotly/testbivariateplot.py rename to holoviews/tests/plotting/plotly/test_bivariateplot.py index c7d8af77c8..19e020787b 100644 --- a/holoviews/tests/plotting/plotly/testbivariateplot.py +++ b/holoviews/tests/plotting/plotly/test_bivariateplot.py @@ -2,7 +2,7 @@ from holoviews.element import Bivariate -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestBivariatePlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testboxwhiskerplot.py b/holoviews/tests/plotting/plotly/test_boxwhiskerplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testboxwhiskerplot.py rename to holoviews/tests/plotting/plotly/test_boxwhiskerplot.py index 01d6a79e1e..9e0b94c9ec 100644 --- a/holoviews/tests/plotting/plotly/testboxwhiskerplot.py +++ b/holoviews/tests/plotting/plotly/test_boxwhiskerplot.py @@ -2,7 +2,7 @@ from holoviews.element import BoxWhisker -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestBoxWhiskerPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testcallbacks.py b/holoviews/tests/plotting/plotly/test_callbacks.py similarity index 100% rename from holoviews/tests/plotting/plotly/testcallbacks.py rename to holoviews/tests/plotting/plotly/test_callbacks.py diff --git a/holoviews/tests/plotting/plotly/testcurveplot.py b/holoviews/tests/plotting/plotly/test_curveplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testcurveplot.py rename to holoviews/tests/plotting/plotly/test_curveplot.py index be5b224c16..82da91fb10 100644 --- a/holoviews/tests/plotting/plotly/testcurveplot.py +++ b/holoviews/tests/plotting/plotly/test_curveplot.py @@ -2,7 +2,7 @@ from holoviews.element import Curve, Tiles -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestCurvePlot(TestPlotlyPlot): @@ -62,7 +62,7 @@ def test_visible(self): class TestMapboxCurvePlot(TestPlotlyPlot): def setUp(self): - super(TestMapboxCurvePlot, self).setUp() + super().setUp() # Precompute coordinates self.xs = [3000000, 2000000, 1000000] diff --git a/holoviews/tests/plotting/plotly/testdash.py b/holoviews/tests/plotting/plotly/test_dash.py similarity index 99% rename from holoviews/tests/plotting/plotly/testdash.py rename to holoviews/tests/plotting/plotly/test_dash.py index f9c9534b8a..94616524f6 100644 --- a/holoviews/tests/plotting/plotly/testdash.py +++ b/holoviews/tests/plotting/plotly/test_dash.py @@ -1,6 +1,6 @@ from dash._callback_context import CallbackContext -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot from holoviews.plotting.plotly.dash import ( to_dash, DashComponents, encode_store_data, decode_store_data ) @@ -19,7 +19,7 @@ class TestHoloViewsDash(TestPlotlyPlot): def setUp(self): - super(TestHoloViewsDash, self).setUp() + super().setUp() # Build Dash app mock self.app = MagicMock() diff --git a/holoviews/tests/plotting/plotly/testdistributionplot.py b/holoviews/tests/plotting/plotly/test_distributionplot.py similarity index 96% rename from holoviews/tests/plotting/plotly/testdistributionplot.py rename to holoviews/tests/plotting/plotly/test_distributionplot.py index 514f89031b..a4ca532fcf 100644 --- a/holoviews/tests/plotting/plotly/testdistributionplot.py +++ b/holoviews/tests/plotting/plotly/test_distributionplot.py @@ -1,6 +1,6 @@ from holoviews.element import Distribution -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestDistributionPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testdynamic.py b/holoviews/tests/plotting/plotly/test_dynamic.py similarity index 99% rename from holoviews/tests/plotting/plotly/testdynamic.py rename to holoviews/tests/plotting/plotly/test_dynamic.py index 99579afbdf..cd5f384693 100644 --- a/holoviews/tests/plotting/plotly/testdynamic.py +++ b/holoviews/tests/plotting/plotly/test_dynamic.py @@ -14,7 +14,7 @@ from bokeh.document import Document from pyviz_comms import Comm -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestDynamicMap(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testelementplot.py b/holoviews/tests/plotting/plotly/test_elementplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testelementplot.py rename to holoviews/tests/plotting/plotly/test_elementplot.py index eed26280ca..2c7b5f38bd 100644 --- a/holoviews/tests/plotting/plotly/testelementplot.py +++ b/holoviews/tests/plotting/plotly/test_elementplot.py @@ -6,7 +6,7 @@ from holoviews.element import Curve, Scatter3D, Path3D from holoviews.streams import PointerX -from .testplot import TestPlotlyPlot, plotly_renderer +from .test_plot import TestPlotlyPlot, plotly_renderer class TestElementPlot(TestPlotlyPlot): @@ -33,7 +33,7 @@ def hook(plot, element): self.assertEqual(plot.state['layout']['title'], 'Called') ### Axis labelling ### - + def test_element_plot_xlabel(self): curve = Curve([(10, 1), (100, 2), (1000, 3)]).options(xlabel='X-Axis') state = self._get_plot_state(curve) @@ -50,7 +50,7 @@ def test_element_plot_zlabel(self): self.assertEqual(state['layout']['scene']['zaxis']['title']['text'], 'Z-Axis') ### Axis ranges ### - + def test_element_plot_xrange(self): curve = Curve([(10, 1), (100, 2), (1000, 3)]) state = self._get_plot_state(curve) @@ -127,7 +127,7 @@ def test_element_plot3d_padding(self): self.assertEqual(state['layout']['scene']['zaxis']['range'], [1.7, 5.3]) ### Axis log ### - + def test_element_plot_logx(self): curve = Curve([(10, 1), (100, 2), (1000, 3)]).options(logx=True) state = self._get_plot_state(curve) @@ -182,7 +182,7 @@ def test_element_plot_zticks_items(self): class TestOverlayPlot(TestPlotlyPlot): - + def test_overlay_state(self): layout = Curve([1, 2, 3]) * Curve([2, 4, 6]) state = self._get_plot_state(layout) @@ -191,7 +191,7 @@ def test_overlay_state(self): self.assertEqual(state['layout']['yaxis']['range'], [1, 6]) ### Axis log ### - + def test_overlay_plot_logx(self): curve = (Curve([(10, 1), (100, 2), (1000, 3)]) * Curve([])).options(logx=True) state = self._get_plot_state(curve) @@ -208,7 +208,7 @@ def test_overlay_plot_logz(self): self.assertEqual(state['layout']['scene']['zaxis']['type'], 'log') ### Axis labelling ### - + def test_overlay_plot_xlabel(self): overlay = Curve([]) * Curve([(10, 1), (100, 2), (1000, 3)]).options(xlabel='X-Axis') state = self._get_plot_state(overlay) diff --git a/holoviews/tests/plotting/plotly/testerrorbarplot.py b/holoviews/tests/plotting/plotly/test_errorbarplot.py similarity index 97% rename from holoviews/tests/plotting/plotly/testerrorbarplot.py rename to holoviews/tests/plotting/plotly/test_errorbarplot.py index e037e1e321..e91c7c8d00 100644 --- a/holoviews/tests/plotting/plotly/testerrorbarplot.py +++ b/holoviews/tests/plotting/plotly/test_errorbarplot.py @@ -2,7 +2,7 @@ from holoviews.element import ErrorBars -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestErrorBarsPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testfiguresize.py b/holoviews/tests/plotting/plotly/test_figuresize.py similarity index 89% rename from holoviews/tests/plotting/plotly/testfiguresize.py rename to holoviews/tests/plotting/plotly/test_figuresize.py index 7519efcd4e..61627dd739 100644 --- a/holoviews/tests/plotting/plotly/testfiguresize.py +++ b/holoviews/tests/plotting/plotly/test_figuresize.py @@ -1,5 +1,5 @@ from holoviews.element import Points -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestImagePlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testgridplot.py b/holoviews/tests/plotting/plotly/test_gridplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testgridplot.py rename to holoviews/tests/plotting/plotly/test_gridplot.py index c485809806..b79421c228 100644 --- a/holoviews/tests/plotting/plotly/testgridplot.py +++ b/holoviews/tests/plotting/plotly/test_gridplot.py @@ -3,7 +3,7 @@ from holoviews.core.spaces import GridSpace from holoviews.element import Scatter, Curve -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestGridPlot(TestPlotlyPlot): @@ -77,7 +77,7 @@ def test_layout_with_grid(self): self.assertEqual(state['layout']['yaxis2']['domain'], [0, 0.5]) self.assertEqual(state['layout']['yaxis3']['domain'], [0.5, 1.0]) - + def test_grid_state(self): grid = GridSpace({(i, j): Curve([i, j]) for i in [0, 1] for j in [0, 1]}) diff --git a/holoviews/tests/plotting/plotly/testhistogram.py b/holoviews/tests/plotting/plotly/test_histogram.py similarity index 96% rename from holoviews/tests/plotting/plotly/testhistogram.py rename to holoviews/tests/plotting/plotly/test_histogram.py index 5eaf43d397..bfdeb5f7e7 100644 --- a/holoviews/tests/plotting/plotly/testhistogram.py +++ b/holoviews/tests/plotting/plotly/test_histogram.py @@ -2,13 +2,13 @@ from holoviews.element import Histogram -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestHistogramPlot(TestPlotlyPlot): def setUp(self): - super(TestHistogramPlot, self).setUp() + super().setUp() self.frequencies = [1, 3, 5, 4, 2, 0] self.edges = [-3, -2, -1, 0, 1, 2] diff --git a/holoviews/tests/plotting/plotly/testimageplot.py b/holoviews/tests/plotting/plotly/test_imageplot.py similarity index 71% rename from holoviews/tests/plotting/plotly/testimageplot.py rename to holoviews/tests/plotting/plotly/test_imageplot.py index 729987231d..defa819398 100644 --- a/holoviews/tests/plotting/plotly/testimageplot.py +++ b/holoviews/tests/plotting/plotly/test_imageplot.py @@ -2,7 +2,7 @@ from holoviews.element import Image -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestImagePlot(TestPlotlyPlot): @@ -21,6 +21,19 @@ def test_image_state(self): self.assertEqual(state['layout']['xaxis']['range'], [0.5, 3.5]) self.assertEqual(state['layout']['yaxis']['range'], [-0.5, 1.5]) + def test_image_nodata(self): + img = Image(([1, 2, 3], [0, 1], np.array([[0, 1, 2], [2, 3, 4]]))).opts(nodata=0) + state = self._get_plot_state(img) + self.assertEqual(state['data'][0]['type'], 'heatmap') + self.assertEqual(state['data'][0]['z'], np.array([[np.NaN, 1, 2], [2, 3, 4]])) + + def test_image_nodata_unint(self): + img = Image(([1, 2, 3], [0, 1], np.array([[0, 1, 2], [2, 3, 4]], + dtype='uint32'))).opts(nodata=0) + state = self._get_plot_state(img) + self.assertEqual(state['data'][0]['type'], 'heatmap') + self.assertEqual(state['data'][0]['z'], np.array([[np.NaN, 1, 2], [2, 3, 4]])) + def test_image_state_inverted(self): img = Image(([1, 2, 3], [0, 1], np.array([[0, 1, 2], [2, 3, 4]]))).options( invert_axes=True) diff --git a/holoviews/tests/plotting/plotly/testlabelplot.py b/holoviews/tests/plotting/plotly/test_labelplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testlabelplot.py rename to holoviews/tests/plotting/plotly/test_labelplot.py index dc0586b446..32a51b2e0f 100644 --- a/holoviews/tests/plotting/plotly/testlabelplot.py +++ b/holoviews/tests/plotting/plotly/test_labelplot.py @@ -2,7 +2,7 @@ from holoviews.element import Labels, Tiles -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestLabelsPlot(TestPlotlyPlot): @@ -55,7 +55,7 @@ def test_visible(self): class TestMapboxLabelsPlot(TestPlotlyPlot): def setUp(self): - super(TestMapboxLabelsPlot, self).setUp() + super().setUp() # Precompute coordinates self.xs = [3000000, 2000000, 1000000] diff --git a/holoviews/tests/plotting/plotly/testlayoutplot.py b/holoviews/tests/plotting/plotly/test_layoutplot.py similarity index 95% rename from holoviews/tests/plotting/plotly/testlayoutplot.py rename to holoviews/tests/plotting/plotly/test_layoutplot.py index 8702235109..0369e2c464 100644 --- a/holoviews/tests/plotting/plotly/testlayoutplot.py +++ b/holoviews/tests/plotting/plotly/test_layoutplot.py @@ -2,7 +2,7 @@ from holoviews.element import Curve, Image -from .testplot import TestPlotlyPlot, plotly_renderer +from .test_plot import TestPlotlyPlot, plotly_renderer class TestLayoutPlot(TestPlotlyPlot): @@ -20,7 +20,7 @@ def test_layout_instantiate_subplots_transposed(self): plot = plotly_renderer.get_plot(layout.options(transpose=True)) positions = [(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)] self.assertEqual(sorted(plot.subplots.keys()), positions) - + def test_layout_state(self): layout = Curve([1, 2, 3]) + Curve([2, 4, 6]) state = self._get_plot_state(layout) diff --git a/holoviews/tests/plotting/plotly/testpath3d.py b/holoviews/tests/plotting/plotly/test_path3d.py similarity index 98% rename from holoviews/tests/plotting/plotly/testpath3d.py rename to holoviews/tests/plotting/plotly/test_path3d.py index e4bb1a7787..99f86ac2e7 100644 --- a/holoviews/tests/plotting/plotly/testpath3d.py +++ b/holoviews/tests/plotting/plotly/test_path3d.py @@ -2,7 +2,7 @@ from holoviews.element import Path3D -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestPath3DPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testplot.py b/holoviews/tests/plotting/plotly/test_plot.py similarity index 100% rename from holoviews/tests/plotting/plotly/testplot.py rename to holoviews/tests/plotting/plotly/test_plot.py diff --git a/holoviews/tests/plotting/plotly/testquadmeshplot.py b/holoviews/tests/plotting/plotly/test_quadmeshplot.py similarity index 70% rename from holoviews/tests/plotting/plotly/testquadmeshplot.py rename to holoviews/tests/plotting/plotly/test_quadmeshplot.py index 3cdf70cbcf..af445ea2a0 100644 --- a/holoviews/tests/plotting/plotly/testquadmeshplot.py +++ b/holoviews/tests/plotting/plotly/test_quadmeshplot.py @@ -2,7 +2,7 @@ from holoviews.element import QuadMesh -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestQuadMeshPlot(TestPlotlyPlot): @@ -19,6 +19,20 @@ def test_quadmesh_state(self): self.assertEqual(state['layout']['xaxis']['range'], [0.5, 5]) self.assertEqual(state['layout']['yaxis']['range'], [-0.5, 1.5]) + def test_quadmesh_nodata(self): + img = QuadMesh(([1, 2, 4], [0, 1], + np.array([[0, 1, 2], [2, 3, 4]]))).opts(nodata=0) + state = self._get_plot_state(img) + self.assertEqual(state['data'][0]['type'], 'heatmap') + self.assertEqual(state['data'][0]['z'], np.array([[np.NaN, 1, 2], [2, 3, 4]])) + + def test_quadmesh_nodata_uint(self): + img = QuadMesh(([1, 2, 4], [0, 1], + np.array([[0, 1, 2], [2, 3, 4]], dtype='uint32'))).opts(nodata=0) + state = self._get_plot_state(img) + self.assertEqual(state['data'][0]['type'], 'heatmap') + self.assertEqual(state['data'][0]['z'], np.array([[np.NaN, 1, 2], [2, 3, 4]])) + def test_quadmesh_state_inverted(self): img = QuadMesh(([1, 2, 4], [0, 1], np.array([[0, 1, 2], [2, 3, 4]]))).options( invert_axes=True) diff --git a/holoviews/tests/plotting/plotly/testrenderer.py b/holoviews/tests/plotting/plotly/test_renderer.py similarity index 99% rename from holoviews/tests/plotting/plotly/testrenderer.py rename to holoviews/tests/plotting/plotly/test_renderer.py index d99a7df5fc..2930f79192 100644 --- a/holoviews/tests/plotting/plotly/testrenderer.py +++ b/holoviews/tests/plotting/plotly/test_renderer.py @@ -2,8 +2,6 @@ """ Test cases for rendering exporters """ -from __future__ import unicode_literals - from collections import OrderedDict from unittest import SkipTest diff --git a/holoviews/tests/plotting/plotly/testrgb.py b/holoviews/tests/plotting/plotly/test_rgb.py similarity index 99% rename from holoviews/tests/plotting/plotly/testrgb.py rename to holoviews/tests/plotting/plotly/test_rgb.py index 954ff85f6a..dc1537684e 100644 --- a/holoviews/tests/plotting/plotly/testrgb.py +++ b/holoviews/tests/plotting/plotly/test_rgb.py @@ -9,7 +9,7 @@ from holoviews.element import RGB, Tiles -from .testplot import TestPlotlyPlot, plotly_renderer +from .test_plot import TestPlotlyPlot, plotly_renderer class TestRGBPlot(TestPlotlyPlot): @@ -249,7 +249,7 @@ def test_rgb_opacity(self): class TestMapboxRGBPlot(TestPlotlyPlot): def setUp(self): - super(TestMapboxRGBPlot, self).setUp() + super().setUp() # Precompute coordinates self.xs = [3000000, 2000000, 1000000] diff --git a/holoviews/tests/plotting/plotly/testscatter3dplot.py b/holoviews/tests/plotting/plotly/test_scatter3dplot.py similarity index 97% rename from holoviews/tests/plotting/plotly/testscatter3dplot.py rename to holoviews/tests/plotting/plotly/test_scatter3dplot.py index 6abb2a9a64..684370ce76 100644 --- a/holoviews/tests/plotting/plotly/testscatter3dplot.py +++ b/holoviews/tests/plotting/plotly/test_scatter3dplot.py @@ -2,7 +2,7 @@ from holoviews.element import Scatter3D -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestScatter3DPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testscatterplot.py b/holoviews/tests/plotting/plotly/test_scatterplot.py similarity index 88% rename from holoviews/tests/plotting/plotly/testscatterplot.py rename to holoviews/tests/plotting/plotly/test_scatterplot.py index e7ed144cdb..5131f20724 100644 --- a/holoviews/tests/plotting/plotly/testscatterplot.py +++ b/holoviews/tests/plotting/plotly/test_scatterplot.py @@ -2,7 +2,7 @@ from holoviews.element import Scatter, Tiles -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestScatterPlot(TestPlotlyPlot): @@ -43,16 +43,16 @@ def test_scatter_colors(self): (0, 1, 'red'), (1, 2, 'green'), (2, 3, 'blue') ], vdims=['y', 'color']).options(color='color') state = self._get_plot_state(scatter) - self.assertEqual(state['data'][0]['marker']['color'], - np.array(['red', 'green', 'blue'])) + self.assertEqual(np.array_equal(state['data'][0]['marker']['color'], + np.array(['red', 'green', 'blue'])), True) def test_scatter_markers(self): scatter = Scatter([ (0, 1, 'square'), (1, 2, 'circle'), (2, 3, 'triangle-up') ], vdims=['y', 'marker']).options(marker='marker') state = self._get_plot_state(scatter) - self.assertEqual(state['data'][0]['marker']['symbol'], - np.array(['square', 'circle', 'triangle-up'])) + self.assertEqual(np.array_equal(state['data'][0]['marker']['symbol'], + np.array(['square', 'circle', 'triangle-up'])), True) def test_scatter_selectedpoints(self): scatter = Scatter([ @@ -112,16 +112,19 @@ def test_scatter_colors(self): (0, 1, 'red'), (1, 2, 'green'), (2, 3, 'blue') ], vdims=['y', 'color']).options(color='color') state = self._get_plot_state(scatter) - self.assertEqual(state['data'][1]['marker']['color'], - np.array(['red', 'green', 'blue'])) + self.assertEqual(np.array_equal(state['data'][1]['marker']['color'], + np.array(['red', 'green', 'blue'])), True) + def test_scatter_markers(self): scatter = Tiles('') * Scatter([ (0, 1, 'square'), (1, 2, 'circle'), (2, 3, 'triangle-up') ], vdims=['y', 'marker']).options(marker='marker') state = self._get_plot_state(scatter) - self.assertEqual(state['data'][1]['marker']['symbol'], - np.array(['square', 'circle', 'triangle-up'])) + self.assertEqual( + np.array_equal( + state['data'][1]['marker']['symbol'], + np.array(['square', 'circle', 'triangle-up'])), True) def test_scatter_selectedpoints(self): scatter = Tiles('') * Scatter([ diff --git a/holoviews/tests/plotting/plotly/testshapeplots.py b/holoviews/tests/plotting/plotly/test_shapeplots.py similarity index 99% rename from holoviews/tests/plotting/plotly/testshapeplots.py rename to holoviews/tests/plotting/plotly/test_shapeplots.py index 274b765deb..ea900c1de5 100644 --- a/holoviews/tests/plotting/plotly/testshapeplots.py +++ b/holoviews/tests/plotting/plotly/test_shapeplots.py @@ -2,7 +2,7 @@ VLine, HLine, Bounds, Box, Rectangles, Segments, Tiles, Path ) import numpy as np -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot default_shape_color = '#2a3f5f' @@ -24,7 +24,7 @@ def assert_shape_element_styling(self, element): class TestMapboxShape(TestPlotlyPlot): def setUp(self): - super(TestMapboxShape, self).setUp() + super().setUp() # Precompute coordinates self.xs = [3000000, 2000000, 1000000] diff --git a/holoviews/tests/plotting/plotly/testspreadplot.py b/holoviews/tests/plotting/plotly/test_spreadplot.py similarity index 97% rename from holoviews/tests/plotting/plotly/testspreadplot.py rename to holoviews/tests/plotting/plotly/test_spreadplot.py index 81e9316840..872b18f2ea 100644 --- a/holoviews/tests/plotting/plotly/testspreadplot.py +++ b/holoviews/tests/plotting/plotly/test_spreadplot.py @@ -2,7 +2,7 @@ from holoviews.element import Spread -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestSpreadPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testsurfaceplot.py b/holoviews/tests/plotting/plotly/test_surfaceplot.py similarity index 97% rename from holoviews/tests/plotting/plotly/testsurfaceplot.py rename to holoviews/tests/plotting/plotly/test_surfaceplot.py index 4ddb280e66..fcfc79e5c3 100644 --- a/holoviews/tests/plotting/plotly/testsurfaceplot.py +++ b/holoviews/tests/plotting/plotly/test_surfaceplot.py @@ -2,7 +2,7 @@ from holoviews.element import Surface -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestSurfacePlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testtableplot.py b/holoviews/tests/plotting/plotly/test_tableplot.py similarity index 94% rename from holoviews/tests/plotting/plotly/testtableplot.py rename to holoviews/tests/plotting/plotly/test_tableplot.py index 7a8df679bc..3253c2fece 100644 --- a/holoviews/tests/plotting/plotly/testtableplot.py +++ b/holoviews/tests/plotting/plotly/test_tableplot.py @@ -1,6 +1,6 @@ from holoviews.element import Table -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestTablePlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/plotly/testtiles.py b/holoviews/tests/plotting/plotly/test_tiles.py similarity index 97% rename from holoviews/tests/plotting/plotly/testtiles.py rename to holoviews/tests/plotting/plotly/test_tiles.py index 13c48bf877..bcd294cd0c 100644 --- a/holoviews/tests/plotting/plotly/testtiles.py +++ b/holoviews/tests/plotting/plotly/test_tiles.py @@ -1,12 +1,12 @@ from holoviews.element import RGB, Tiles, Points, Bounds from holoviews.element.tiles import StamenTerrain, _ATTRIBUTIONS -from .testplot import TestPlotlyPlot, plotly_renderer +from .test_plot import TestPlotlyPlot, plotly_renderer import numpy as np class TestMapboxTilesPlot(TestPlotlyPlot): def setUp(self): - super(TestMapboxTilesPlot, self).setUp() + super().setUp() # Precompute coordinates self.xs = [3000000, 2000000, 1000000] @@ -54,7 +54,7 @@ def test_mapbox_tiles_defaults(self): self.assertEqual(len(layers), 0) def test_styled_mapbox_tiles(self): - tiles = Tiles("").opts(mapboxstyle="dark", accesstoken="token-str").redim.range( + tiles = Tiles().opts(mapboxstyle="dark", accesstoken="token-str").redim.range( x=self.x_range, y=self.y_range ) @@ -99,7 +99,7 @@ def test_raster_layer(self): self.assertEqual(layer["sourcetype"], "raster") self.assertEqual(layer["minzoom"], 3) self.assertEqual(layer["maxzoom"], 7) - self.assertEqual(layer["sourceattribution"], _ATTRIBUTIONS[('stamen', 'com/t')]) + self.assertEqual(layer["sourceattribution"], _ATTRIBUTIONS[('stamen', 'net/t')]) def test_overlay(self): # Base layer is mapbox vector layer diff --git a/holoviews/tests/plotting/plotly/testviolinplot.py b/holoviews/tests/plotting/plotly/test_violinplot.py similarity index 98% rename from holoviews/tests/plotting/plotly/testviolinplot.py rename to holoviews/tests/plotting/plotly/test_violinplot.py index 2d77f5ff91..43aef9d17a 100644 --- a/holoviews/tests/plotting/plotly/testviolinplot.py +++ b/holoviews/tests/plotting/plotly/test_violinplot.py @@ -2,7 +2,7 @@ from holoviews.element import Violin -from .testplot import TestPlotlyPlot +from .test_plot import TestPlotlyPlot class TestViolinPlot(TestPlotlyPlot): diff --git a/holoviews/tests/plotting/testcomms.py b/holoviews/tests/plotting/test_comms.py similarity index 100% rename from holoviews/tests/plotting/testcomms.py rename to holoviews/tests/plotting/test_comms.py diff --git a/holoviews/tests/plotting/testplotutils.py b/holoviews/tests/plotting/test_plotutils.py similarity index 98% rename from holoviews/tests/plotting/testplotutils.py rename to holoviews/tests/plotting/test_plotutils.py index 31b6e13c39..26abcc4ccf 100644 --- a/holoviews/tests/plotting/testplotutils.py +++ b/holoviews/tests/plotting/test_plotutils.py @@ -1,8 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -import sys - -from unittest import SkipTest, skipIf +from unittest import SkipTest import numpy as np @@ -26,9 +22,6 @@ except: bokeh_renderer = None -py2_skip = skipIf(sys.version_info.major == 2, "Not supported in python2") - - class TestOverlayableZorders(ComparisonTestCase): @@ -544,12 +537,10 @@ def test_bokeh_palette_categorical_palettes_not_interpolated(self): for cat in categorical: self.assertTrue(len(set(bokeh_palette_to_palette(cat))) <= 20) - @py2_skip def test_bokeh_colormap_fire(self): colors = process_cmap('fire', 3, provider='bokeh') self.assertEqual(colors, ['#000000', '#eb1300', '#ffffff']) - @py2_skip def test_bokeh_colormap_fire_r(self): colors = process_cmap('fire_r', 3, provider='bokeh') self.assertEqual(colors, ['#ffffff', '#ed1400', '#000000']) @@ -591,7 +582,7 @@ def test_bokeh_palette_perceptually_uniform_reverse(self): self.assertEqual(colors, ['#440154', '#30678D', '#35B778', '#FDE724'][::-1]) def test_color_intervals(self): - levels = [0, 38, 73, 95, 110, 130, 156] + levels = [0, 38, 73, 95, 110, 130, 156] colors = ['#5ebaff', '#00faf4', '#ffffcc', '#ffe775', '#ffc140', '#ff8f20'] cmap, lims = color_intervals(colors, levels, N=10) self.assertEqual(cmap, ['#5ebaff', '#5ebaff', '#00faf4', @@ -599,7 +590,7 @@ def test_color_intervals(self): '#ffc140', '#ff8f20', '#ff8f20']) def test_color_intervals_clipped(self): - levels = [0, 38, 73, 95, 110, 130, 156, 999] + levels = [0, 38, 73, 95, 110, 130, 156, 999] colors = ['#5ebaff', '#00faf4', '#ffffcc', '#ffe775', '#ffc140', '#ff8f20', '#ff6060'] cmap, lims = color_intervals(colors, levels, clip=(10, 90), N=100) self.assertEqual(cmap, ['#5ebaff', '#5ebaff', '#5ebaff', '#00faf4', '#00faf4', diff --git a/holoviews/tests/plotting/testrenderclass.py b/holoviews/tests/plotting/test_renderclass.py similarity index 94% rename from holoviews/tests/plotting/testrenderclass.py rename to holoviews/tests/plotting/test_renderclass.py index 3c76b78aa4..9ff469949f 100644 --- a/holoviews/tests/plotting/testrenderclass.py +++ b/holoviews/tests/plotting/test_renderclass.py @@ -2,8 +2,6 @@ """ Test cases for rendering exporters """ -from __future__ import unicode_literals - from holoviews.element.comparison import ComparisonTestCase from holoviews.plotting import Renderer diff --git a/holoviews/tests/test_annotators.py b/holoviews/tests/test_annotators.py index af66c3c769..21b1721913 100644 --- a/holoviews/tests/test_annotators.py +++ b/holoviews/tests/test_annotators.py @@ -3,7 +3,7 @@ from holoviews.element import Points, Path, Table from holoviews.element.tiles import Wikipedia, Tiles -from holoviews.tests.plotting.bokeh.testplot import TestBokehPlot +from holoviews.tests.plotting.bokeh.test_plot import TestBokehPlot class Test_annotate(TestBokehPlot): diff --git a/holoviews/tests/testselection.py b/holoviews/tests/test_selection.py similarity index 99% rename from holoviews/tests/testselection.py rename to holoviews/tests/test_selection.py index 68109a4059..4ca8894185 100644 --- a/holoviews/tests/testselection.py +++ b/holoviews/tests/test_selection.py @@ -3,7 +3,6 @@ import holoviews as hv import pandas as pd -from holoviews.core.util import unicode, basestring from holoviews.core.options import Cycle, Store from holoviews.element import ErrorBars, Points, Rectangles, Table, VSpan from holoviews.plotting.util import linear_gradient @@ -707,7 +706,7 @@ def setUp(self): import holoviews.plotting.plotly # noqa except: raise SkipTest("Plotly selection tests require plotly.") - super(TestLinkSelectionsPlotly, self).setUp() + super().setUp() self._backend = Store.current_backend Store.set_current_backend('plotly') @@ -723,7 +722,7 @@ def element_color(self, element, color_prop=None): else: color = element.opts.get('style').kwargs['color'] - if isinstance(color, (Cycle, basestring, unicode)): + if isinstance(color, (Cycle, str)): return color else: return list(color) @@ -738,7 +737,7 @@ def setUp(self): import holoviews.plotting.bokeh # noqa except: raise SkipTest("Bokeh selection tests require bokeh.") - super(TestLinkSelectionsBokeh, self).setUp() + super().setUp() self._backend = Store.current_backend Store.set_current_backend('bokeh') @@ -748,7 +747,7 @@ def tearDown(self): def element_color(self, element): color = element.opts.get('style').kwargs['color'] - if isinstance(color, (basestring, unicode)): + if isinstance(color, str): return color else: return list(color) diff --git a/holoviews/tests/teststreams.py b/holoviews/tests/test_streams.py similarity index 92% rename from holoviews/tests/teststreams.py rename to holoviews/tests/test_streams.py index 1705c1a933..3f5e50ec79 100644 --- a/holoviews/tests/teststreams.py +++ b/holoviews/tests/test_streams.py @@ -28,6 +28,17 @@ def test_all_stream_parameters_constant(): % (name, stream_cls.__name__)) +def test_all_linked_stream_parameters_owners(): + "Test to ensure operations can accept parameters in streams dictionary" + stream_classes = param.concrete_descendents(LinkedStream) + for stream_class in stream_classes.values(): + for name, p in stream_class.param.params().items(): + if name != 'name' and (p.owner != stream_class): + msg = ("Linked stream %r has parameter %r which is " + "inherited from %s. Parameter needs to be redeclared " + "in the class definition of this linked stream.") + raise Exception(msg % (stream_class, name, p.owner)) + class TestStreamsDefine(ComparisonTestCase): def setUp(self): @@ -60,24 +71,24 @@ def test_XY_instance(self): def test_XY_set_invalid_class_x(self): regexp = "Parameter 'x' only takes numeric values" - with self.assertRaisesRegexp(ValueError, regexp): + with self.assertRaisesRegex(ValueError, regexp): self.XY.x = 'string' def test_XY_set_invalid_class_y(self): regexp = "Parameter 'y' only takes numeric values" - with self.assertRaisesRegexp(ValueError, regexp): + with self.assertRaisesRegex(ValueError, regexp): self.XY.y = 'string' def test_XY_set_invalid_instance_x(self): xy = self.XY(x=1,y=2) regexp = "Parameter 'x' only takes numeric values" - with self.assertRaisesRegexp(ValueError, regexp): + with self.assertRaisesRegex(ValueError, regexp): xy.x = 'string' def test_XY_set_invalid_instance_y(self): xy = self.XY(x=1,y=2) regexp = "Parameter 'y' only takes numeric values" - with self.assertRaisesRegexp(ValueError, regexp): + with self.assertRaisesRegex(ValueError, regexp): xy.y = 'string' def test_XY_subscriber_triggered(self): @@ -148,7 +159,7 @@ def test_positionY_const_parameter(self): class TestParamsStream(LoggingComparisonTestCase): def setUp(self): - super(TestParamsStream, self).setUp() + super().setUp() class Inner(param.Parameterized): x = param.Number(default = 0) @@ -284,6 +295,22 @@ def subscriber(**kwargs): inner.x = 0 self.assertEqual(values, [{'action': inner.action, 'x': 0}]) + def test_params_stream_batch_watch(self): + tap = Tap(x=0, y=1) + params = Params(parameters=[tap.param.x, tap.param.y]) + + values = [] + def subscriber(**kwargs): + values.append(kwargs) + params.add_subscriber(subscriber) + + tap.param.trigger('x', 'y') + + assert values == [{'x': 0, 'y': 1}] + + tap.event(x=1, y=2) + + assert values == [{'x': 0, 'y': 1}, {'x': 1, 'y': 2}] class TestParamMethodStream(ComparisonTestCase): @@ -358,6 +385,46 @@ def test(x): inner.x = 10 self.assertEqual(dmap[()], Points([10])) + + def test_param_instance_steams_dict(self): + inner = self.inner() + + def test(x): + return Points([x]) + + dmap = DynamicMap(test, streams=dict(x=inner.param.x)) + + inner.x = 10 + self.assertEqual(dmap[()], Points([10])) + + def test_param_class_steams_dict(self): + class ClassParamExample(param.Parameterized): + x = param.Number(default=1) + + def test(x): + return Points([x]) + + dmap = DynamicMap(test, streams=dict(x=ClassParamExample.param.x)) + + ClassParamExample.x = 10 + self.assertEqual(dmap[()], Points([10])) + + def test_panel_param_steams_dict(self): + try: + import panel + except: + raise SkipTest('Panel required for widget support in streams dict') + widget = panel.widgets.FloatSlider(value=1) + + def test(x): + return Points([x]) + + dmap = DynamicMap(test, streams=dict(x=widget)) + + widget.value = 10 + self.assertEqual(dmap[()], Points([10])) + + def test_param_method_depends_no_deps(self): inner = self.inner() stream = ParamMethod(inner.method_no_deps) @@ -627,13 +694,13 @@ def test_simple_rename_constructor(self): def test_invalid_rename_constructor(self): regexp = '(.+?)is not a stream parameter' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): PointerXY(rename={'x':'xtest', 'z':'ytest'}, x=0, y=4) self.assertEqual(str(cm).endswith(), True) def test_clashing_rename_constructor(self): regexp = '(.+?)parameter of the same name' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): PointerXY(rename={'x':'xtest', 'y':'x'}, x=0, y=4) def test_simple_rename_method(self): @@ -644,14 +711,14 @@ def test_simple_rename_method(self): def test_invalid_rename_method(self): xy = PointerXY(x=0, y=4) regexp = '(.+?)is not a stream parameter' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): xy.rename(x='xtest', z='ytest') def test_clashing_rename_method(self): xy = PointerXY(x=0, y=4) regexp = '(.+?)parameter of the same name' - with self.assertRaisesRegexp(KeyError, regexp): + with self.assertRaisesRegex(KeyError, regexp): xy.rename(x='xtest', y='x') def test_update_rename_valid(self): @@ -664,18 +731,18 @@ def test_update_rename_invalid(self): xy = PointerXY(x=0, y=4) renamed = xy.rename(y='ytest') regexp = "ytest' is not a parameter of(.+?)" - with self.assertRaisesRegexp(ValueError, regexp): + with self.assertRaisesRegex(ValueError, regexp): renamed.event(ytest=8) def test_rename_suppression(self): renamed = PointerXY(x=0,y=0).rename(x=None) - self.assertEquals(renamed.contents, {'y':0}) + self.assertEqual(renamed.contents, {'y':0}) def test_rename_suppression_reenable(self): renamed = PointerXY(x=0,y=0).rename(x=None) - self.assertEquals(renamed.contents, {'y':0}) + self.assertEqual(renamed.contents, {'y':0}) reenabled = renamed.rename(x='foo') - self.assertEquals(reenabled.contents, {'foo':0, 'y':0}) + self.assertEqual(reenabled.contents, {'foo':0, 'y':0}) @@ -738,7 +805,7 @@ def test_init_buffer_array(self): def test_buffer_array_ndim_exception(self): error = "Only 2D array data may be streamed by Buffer." - with self.assertRaisesRegexp(ValueError, error): + with self.assertRaisesRegex(ValueError, error): Buffer(np.array([0, 1])) def test_buffer_array_send(self): @@ -759,19 +826,19 @@ def test_buffer_array_patch_larger_than_length(self): def test_buffer_array_send_verify_ndim_fail(self): buff = Buffer(np.array([[0, 1]])) error = 'Streamed array data must be two-dimensional' - with self.assertRaisesRegexp(ValueError, error): + with self.assertRaisesRegex(ValueError, error): buff.send(np.array([1])) def test_buffer_array_send_verify_shape_fail(self): buff = Buffer(np.array([[0, 1]])) error = "Streamed array data expeced to have 2 columns, got 3." - with self.assertRaisesRegexp(ValueError, error): + with self.assertRaisesRegex(ValueError, error): buff.send(np.array([[1, 2, 3]])) def test_buffer_array_send_verify_type_fail(self): buff = Buffer(np.array([[0, 1]])) error = "Input expected to be of type ndarray, got list." - with self.assertRaisesRegexp(TypeError, error): + with self.assertRaisesRegex(TypeError, error): buff.send([1]) @@ -806,14 +873,14 @@ def test_buffer_dict_send_verify_column_fail(self): data = {'x': np.array([0]), 'y': np.array([1])} buff = Buffer(data) error = "Input expected to have columns \['x', 'y'\], got \['x'\]" - with self.assertRaisesRegexp(IndexError, error): + with self.assertRaisesRegex(IndexError, error): buff.send({'x': np.array([2])}) def test_buffer_dict_send_verify_shape_fail(self): data = {'x': np.array([0]), 'y': np.array([1])} buff = Buffer(data) error = "Input columns expected to have the same number of rows." - with self.assertRaisesRegexp(ValueError, error): + with self.assertRaisesRegex(ValueError, error): buff.send({'x': np.array([2]), 'y': np.array([3, 4])}) @@ -822,7 +889,7 @@ class TestBufferDataFrameStream(ComparisonTestCase): def setUp(self): if pd is None: raise SkipTest('Pandas not available') - super(TestBufferDataFrameStream, self).setUp() + super().setUp() def test_init_buffer_dframe(self): data = pd.DataFrame({'x': np.array([1]), 'y': np.array([2])}) @@ -867,7 +934,7 @@ def test_buffer_dframe_send_verify_column_fail(self): data = pd.DataFrame({'x': np.array([0]), 'y': np.array([1])}) buff = Buffer(data, index=False) error = "Input expected to have columns \['x', 'y'\], got \['x'\]" - with self.assertRaisesRegexp(IndexError, error): + with self.assertRaisesRegex(IndexError, error): buff.send(pd.DataFrame({'x': np.array([2])})) def test_clear_buffer_dframe_with_index(self): @@ -882,7 +949,7 @@ class Sum(Derived): def __init__(self, val_streams, exclusive=False, base=0): self.base = base - super(Sum, self).__init__(input_streams=val_streams, exclusive=exclusive) + super().__init__(input_streams=val_streams, exclusive=exclusive) @property def constants(self): @@ -1230,6 +1297,10 @@ def test_selection_expr_stream_hist_invert_xaxis_yaxis(self): def test_selection_expr_stream_polygon_index_cols(self): # Create SelectionExpr on element + try: import shapely # noqa + except: + try: import spatialpandas # noqa + except: raise SkipTest('Shapely required for polygon selection') poly = Polygons([ [(0, 0, 'a'), (2, 0, 'a'), (1, 1, 'a')], [(2, 0, 'b'), (4, 0, 'b'), (3, 1, 'b')], diff --git a/holoviews/tests/util/testtransform.py b/holoviews/tests/util/test_transform.py similarity index 99% rename from holoviews/tests/util/testtransform.py rename to holoviews/tests/util/test_transform.py index 15d0a7bffe..7cbe8fa5a5 100644 --- a/holoviews/tests/util/testtransform.py +++ b/holoviews/tests/util/test_transform.py @@ -2,9 +2,6 @@ """ Unit tests for dim transforms """ -from __future__ import division - -import sys import pickle from collections import OrderedDict @@ -25,7 +22,6 @@ except: xr = None -py2_skip = skipIf(sys.version_info.major == 2, 'Requires Python>2') xr_skip = skipIf(xr is None, "xarray not available") from holoviews.core.data import Dataset @@ -151,6 +147,11 @@ def assert_apply(self, expr, expected, skip_dask=False, skip_no_index=False): check_names=False ) + # Lookup + + def test_int_lookup(self): + expr = dim(0) + self.check_apply(expr, self.linear_ints) def assert_apply_xarray(self, expr, expected, skip_dask=False, skip_no_index=False): import xarray as xr @@ -211,7 +212,6 @@ def assert_apply_xarray(self, expr, expected, skip_dask=False, skip_no_index=Fal expected_dask.compute(), ) - # Unary operators def test_abs_transform(self): @@ -484,7 +484,6 @@ def test_xarray_roll_method(self): self.assert_apply_xarray(expr, self.dataset_xarray.data.z.roll({'x': 1}, roll_coords=False)) @xr_skip - @py2_skip def test_xarray_coarsen_method(self): expr = dim('z').xr.coarsen({'x': 4}).mean() self.assert_apply_xarray(expr, self.dataset_xarray.data.z.coarsen({'x': 4}).mean()) diff --git a/holoviews/tests/util/testutils.py b/holoviews/tests/util/test_utils.py similarity index 72% rename from holoviews/tests/util/testutils.py rename to holoviews/tests/util/test_utils.py index 12e50b6aea..7fb3121bd9 100644 --- a/holoviews/tests/util/testutils.py +++ b/holoviews/tests/util/test_utils.py @@ -3,9 +3,7 @@ Unit tests of the helper functions in utils """ from unittest import SkipTest -import numpy as np -import holoviews as hv from holoviews import notebook_extension from holoviews.element.comparison import ComparisonTestCase from holoviews import Store @@ -40,7 +38,7 @@ def setUp(self): Store.renderers['bokeh'] = bokeh.BokehRenderer.instance() OutputSettings.options = OrderedDict(OutputSettings.defaults.items()) - super(TestOutputUtil, self).setUp() + super().setUp() def tearDown(self): Store.renderers['matplotlib'] = mpl.MPLRenderer.instance() @@ -49,7 +47,7 @@ def tearDown(self): OutputSettings.options = OrderedDict(OutputSettings.defaults.items()) for renderer in Store.renderers.values(): renderer.comm_manager = CommManager - super(TestOutputUtil, self).tearDown() + super().tearDown() def test_output_util_svg_string(self): self.assertEqual(OutputSettings.options.get('fig', None), None) @@ -89,47 +87,13 @@ def setUp(self): Store.current_backend = 'matplotlib' self.store_copy = OptionTree(sorted(Store.options().items()), groups=Options._option_groups) - super(TestOptsUtil, self).setUp() + super().setUp() def tearDown(self): Store.current_backend = self.backend Store.options(val=self.store_copy) Store._custom_options = {k:{} for k in Store._custom_options.keys()} - super(TestOptsUtil, self).tearDown() - - def test_cell_opts_util_style(self): - mat1 = hv.Image(np.random.rand(5,5), name='mat1') - self.assertEqual(mat1.id, None) - opts("Image (cmap='hot')", mat1) - self.assertNotEqual(mat1.id, None) - - self.assertEqual( - Store.lookup_options('matplotlib', - mat1, 'style').options.get('cmap',None),'hot') - self.log_handler.assertContains('WARNING', 'Double positional argument signature of opts is deprecated') - - def test_cell_opts_util_plot(self): - - mat1 = hv.Image(np.random.rand(5,5), name='mat1') - - self.assertEqual(mat1.id, None) - opts("Image [show_title=False]", mat1) - self.assertNotEqual(mat1.id, None) - self.assertEqual( - Store.lookup_options('matplotlib', - mat1, 'plot').options.get('show_title',True), False) - self.log_handler.assertContains('WARNING', 'Double positional argument signature of opts is deprecated') - - def test_cell_opts_util_norm(self): - mat1 = hv.Image(np.random.rand(5,5), name='mat1') - self.assertEqual(mat1.id, None) - opts("Image {+axiswise}", mat1) - self.assertNotEqual(mat1.id, None) - - self.assertEqual( - Store.lookup_options('matplotlib', - mat1, 'norm').options.get('axiswise',True), True) - self.log_handler.assertContains('WARNING', 'Double positional argument signature of opts is deprecated') + super().tearDown() def test_opts_builder_repr(self): magic= "Bivariate [bandwidth=0.5] (cmap='jet') Points [logx=True] (size=2)" diff --git a/holoviews/tests/utils.py b/holoviews/tests/utils.py index 63571071c5..1fa1d024dc 100644 --- a/holoviews/tests/utils.py +++ b/holoviews/tests/utils.py @@ -29,7 +29,7 @@ def __init__(self, *args, **kwargs): 'INFO':'param.param.message()', 'VERBOSE':'param.param.verbose()', 'DEBUG':'param.param.debug()'} - super(MockLoggingHandler, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def emit(self, record): "Store a message to the instance's messages dictionary" @@ -65,7 +65,7 @@ def assertEndsWith(self, level, substring): substring=repr(substring))) else: self.messages[level].pop(-1) - + def assertContains(self, level, substring): """ @@ -95,7 +95,7 @@ class LoggingComparisonTestCase(ComparisonTestCase): """ def setUp(self): - super(LoggingComparisonTestCase, self).setUp() + super().setUp() log = param.parameterized.get_logger() self.handlers = log.handlers log.handlers = [] @@ -103,7 +103,7 @@ def setUp(self): log.addHandler(self.log_handler) def tearDown(self): - super(LoggingComparisonTestCase, self).tearDown() + super().tearDown() log = param.parameterized.get_logger() log.handlers = self.handlers messages = self.log_handler.messages diff --git a/holoviews/util/__init__.py b/holoviews/util/__init__.py index 55a071c872..12a7744217 100644 --- a/holoviews/util/__init__.py +++ b/holoviews/util/__init__.py @@ -1,29 +1,26 @@ import os, sys, inspect, shutil from collections import defaultdict +from inspect import Parameter, Signature from types import FunctionType - - -try: - from pathlib import Path -except: - Path = None +from pathlib import Path import param + from pyviz_comms import extension as _pyviz_extension from ..core import ( Dataset, DynamicMap, HoloMap, Dimensioned, ViewableElement, StoreOptions, Store ) -from ..core.options import options_policy, Keywords, Options +from ..core.options import Keywords, Options, options_policy from ..core.operation import Operation from ..core.overlay import Overlay -from ..core.util import basestring, merge_options_to_dict, OrderedDict +from ..core.util import merge_options_to_dict, OrderedDict from ..core.operation import OperationCallable from ..core import util from ..operation.element import function -from ..streams import Stream, Params +from ..streams import Stream, Params, streams_list_from_dict from .settings import OutputSettings, list_formats, list_backends Store.output_settings = OutputSettings @@ -94,7 +91,7 @@ class opts(param.ParameterizedFunction): strict, invalid keywords prevent the options being applied.""") def __init__(self, *args, **kwargs): # Needed for opts specific __signature__ - super(opts, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def __call__(self, *args, **params): if not params and not args: @@ -102,22 +99,6 @@ def __call__(self, *args, **params): elif params and not args: return Options(**params) - if len(args) == 1: - msg = ("Positional argument signature of opts is deprecated, " - "use opts.defaults instead.\nFor instance, instead of " - "opts('Points (size=5)') use opts.defaults(opts.Points(size=5))") - self.param.warning(msg) - self._linemagic(args[0]) - elif len(args) == 2: - msg = ("Double positional argument signature of opts is deprecated, " - "use the .options method instead.\nFor instance, instead of " - "opts('Points (size=5)', points) use points.opts(opts.Points(size=5))") - - self.param.warning(msg) - - self._cellmagic(args[0], args[1]) - - @classmethod def _group_kwargs_to_options(cls, obj, kwargs): "Format option group kwargs into canonical options format" @@ -195,8 +176,8 @@ def apply_groups(cls, obj, options=None, backend=None, clone=True, **kwargs): Applies options on an object or nested group of objects, returning a new object with the options applied. This method - accepts the separate option namespaces explicitly (i.e 'plot', - 'style' and 'norm'). + accepts the separate option namespaces explicitly (i.e. 'plot', + 'style', and 'norm'). If the options are to be set directly on the object a simple format may be used, e.g.: @@ -229,7 +210,7 @@ def apply_groups(cls, obj, options=None, backend=None, clone=True, **kwargs): Returns: Returns the object or a clone with the options applied """ - if isinstance(options, basestring): + if isinstance(options, str): from ..util.parser import OptsSpec try: options = OptsSpec.parse(options) @@ -246,7 +227,7 @@ def apply_groups(cls, obj, options=None, backend=None, clone=True, **kwargs): @classmethod def _process_magic(cls, options, strict, backends=None): - if isinstance(options, basestring): + if isinstance(options, str): from .parser import OptsSpec try: ns = get_ipython().user_ns # noqa except: ns = globals() @@ -260,26 +241,14 @@ def _process_magic(cls, options, strict, backends=None): return options, True return options, False - @classmethod - def _cellmagic(cls, options, obj, strict=False): - "Deprecated, not expected to be used by any current code" - options, failure = cls._process_magic(options, strict) - if failure: return obj - if not isinstance(obj, Dimensioned): - return obj - else: - return StoreOptions.set_options(obj, options) - @classmethod def _linemagic(cls, options, strict=False, backend=None): - "Deprecated, not expected to be used by any current code" backends = None if backend is None else [backend] options, failure = cls._process_magic(options, strict, backends=backends) if failure: return with options_policy(skip_invalid=True, warn_on_skip=False): StoreOptions.apply_customizations(options, Store.options(backend=backend)) - @classmethod def defaults(cls, *options, **kwargs): """Set default options for a session. @@ -296,7 +265,6 @@ def defaults(cls, *options, **kwargs): cls._linemagic(cls._expand_options(merge_options_to_dict(options)), backend=kwargs.get('backend')) - @classmethod def _expand_by_backend(cls, options, backend): """ @@ -398,7 +366,7 @@ def _options_error(cls, opt, objtype, backend, valid_options): else: raise ValueError('Unexpected option %r for %s type ' 'when using the %r extension. No ' - 'similar options founds.' % + 'similar options found.' % (opt, objtype, backend)) # Check option is invalid for all backends @@ -436,7 +404,7 @@ def _builder_reprs(cls, options, namespace=None, ns=None): namespace is typically given as 'hv' if fully qualified namespaces are desired. """ - if isinstance(options, basestring): + if isinstance(options, str): from .parser import OptsSpec if ns is None: try: ns = get_ipython().user_ns # noqa @@ -481,9 +449,9 @@ def builder(cls, spec=None, **kws): return Options(spec, **kws) mismatched[loaded_backend] = list(keys - valid) - invalid = keys - all_valid_kws # Keys not found for any backend + invalid = keys - all_valid_kws # Keys not found for any backend if mismatched and not invalid: # Keys found across multiple backends - msg = ('{prefix} keywords supplied are mixed across backends. ' + msg = ('{prefix}keywords supplied are mixed across backends. ' 'Keyword(s) {info}') info = ', '.join('%s are invalid for %s' % (', '.join(repr(el) for el in v), k) @@ -506,15 +474,10 @@ def builder(cls, spec=None, **kws): filtered_keywords = [k for k in completions if k not in cls._no_completion] sorted_kw_set = sorted(set(filtered_keywords)) - if sys.version_info.major == 2: - kws = ', '.join('{opt}=None'.format(opt=opt) for opt in sorted_kw_set) - builder.__doc__ = '{element}({kws})'.format(element=element, kws=kws) - else: - from inspect import Parameter, Signature - signature = Signature([Parameter('spec', Parameter.POSITIONAL_OR_KEYWORD)] - + [Parameter(kw, Parameter.KEYWORD_ONLY) - for kw in sorted_kw_set]) - builder.__signature__ = signature + signature = Signature([Parameter('spec', Parameter.POSITIONAL_OR_KEYWORD)] + + [Parameter(kw, Parameter.KEYWORD_ONLY) + for kw in sorted_kw_set]) + builder.__signature__ = signature return classmethod(builder) @classmethod @@ -540,7 +503,6 @@ def _element_keywords(cls, backend, elements=None): @classmethod def _update_backend(cls, backend): - if cls.__original_docstring__ is None: cls.__original_docstring__ = cls.__doc__ @@ -554,17 +516,11 @@ def _update_backend(cls, backend): filtered_keywords = [k for k in all_keywords if k not in cls._no_completion] sorted_kw_set = sorted(set(filtered_keywords)) - if sys.version_info.major == 2: - kws = ', '.join('{opt}=None'.format(opt=opt) for opt in sorted_kw_set) - old_doc = cls.__original_docstring__.replace( - 'params(strict=Boolean, name=String)','') - cls.__doc__ = '\n opts({kws})'.format(kws=kws) + old_doc - else: - from inspect import Parameter, Signature - signature = Signature([Parameter('args', Parameter.VAR_POSITIONAL)] - + [Parameter(kw, Parameter.KEYWORD_ONLY) - for kw in sorted_kw_set]) - cls.__init__.__signature__ = signature + from inspect import Parameter, Signature + signature = Signature([Parameter('args', Parameter.VAR_POSITIONAL)] + + [Parameter(kw, Parameter.KEYWORD_ONLY) + for kw in sorted_kw_set]) + cls.__init__.__signature__ = signature Store._backend_switch_hooks.append(opts._update_backend) @@ -625,7 +581,7 @@ def __call__(self, *args, **options): line, obj = None,None if len(args) > 2: raise TypeError('The opts utility accepts one or two positional arguments.') - if len(args) == 1 and not isinstance(args[0], basestring): + if len(args) == 1 and not isinstance(args[0], str): obj = args[0] elif len(args) == 1: line = args[0] @@ -638,9 +594,6 @@ def __call__(self, *args, **options): for k in options.keys(): if k not in Store.output_settings.allowed: raise KeyError('Invalid keyword: %s' % k) - if 'filename' in options: - self.param.warning('The filename argument of output is deprecated. ' - 'Use hv.save instead.') def display_fn(obj, renderer): try: @@ -656,11 +609,8 @@ def display_fn(obj, renderer): else: Store.output_settings.output(line=line, help_prompt=help_prompt, **options) -if sys.version_info.major == 2: - output.__doc__ = Store.output_settings._generate_docstring(signature=True) -else: - output.__doc__ = Store.output_settings._generate_docstring(signature=False) - output.__init__.__signature__ = Store.output_settings._generate_signature() +output.__doc__ = Store.output_settings._generate_docstring(signature=False) +output.__init__.__signature__ = Store.output_settings._generate_signature() def renderer(name): @@ -806,9 +756,9 @@ def save(obj, filename, fmt='auto', backend=None, resources='cdn', toolbar=None, obj = obj.opts(toolbar=None) if kwargs: renderer_obj = renderer_obj.instance(**kwargs) - if Path is not None and isinstance(filename, Path): + if isinstance(filename, Path): filename = str(filename.absolute()) - if isinstance(filename, basestring): + if isinstance(filename, str): supported = [mfmt for tformats in renderer_obj.mode_formats.values() for mfmt in tformats] formats = filename.split('.') @@ -902,7 +852,7 @@ class Dynamic(param.ParameterizedFunction): shared_data = param.Boolean(default=False, doc=""" Whether the cloned DynamicMap will share the same cache.""") - streams = param.List(default=[], doc=""" + streams = param.ClassSelector(default=[], class_=(list, dict), doc=""" List of streams to attach to the returned DynamicMap""") def __call__(self, map_obj, **params): @@ -929,9 +879,32 @@ def _get_streams(self, map_obj, watch=True): of supplied stream classes and instances are processed and added to the list. """ + if isinstance(self.p.streams, dict): + streams = defaultdict(dict) + stream_specs, params = [], {} + for name, p in self.p.streams.items(): + if not isinstance(p, param.Parameter): + raise ValueError("Stream dictionary must map operation keywords " + "to parameter names. Cannot handle %r type." + % type(p)) + if inspect.isclass(p.owner) and issubclass(p.owner, Stream): + if p.name != name: + streams[p.owner][p.name] = name + else: + streams[p.owner] = {} + else: + params[name] = p + stream_specs = streams_list_from_dict(params) + # Note that the correct stream instance will only be created + # correctly of the parameter's .owner points to the correct + # class (i.e the parameter isn't defined on a superclass) + stream_specs += [stream(rename=rename) for stream, rename in streams.items()] + else: + stream_specs = self.p.streams + streams = [] op = self.p.operation - for stream in self.p.streams: + for stream in stream_specs: if inspect.isclass(stream) and issubclass(stream, Stream): stream = stream() elif not (isinstance(stream, Stream) or util.is_param_method(stream)): @@ -940,9 +913,10 @@ def _get_streams(self, map_obj, watch=True): if isinstance(op, Operation): updates = {k: op.p.get(k) for k, v in stream.contents.items() if v is None and k in op.p} - if updates: + if not isinstance(stream, Params): reverse = {v: k for k, v in stream._rename.items()} - stream.update(**{reverse.get(k, k): v for k, v in updates.items()}) + updates = {reverse.get(k, k): v for k, v in updates.items()} + stream.update(**updates) streams.append(stream) params = {} diff --git a/holoviews/util/command.py b/holoviews/util/command.py index 77acb749f4..72bc563d2a 100755 --- a/holoviews/util/command.py +++ b/holoviews/util/command.py @@ -5,8 +5,6 @@ holoviews Conversion_Example.ipynb """ -from __future__ import absolute_import, print_function - import sys import os import argparse diff --git a/holoviews/util/parser.py b/holoviews/util/parser.py index e006113fd7..253d37caf0 100644 --- a/holoviews/util/parser.py +++ b/holoviews/util/parser.py @@ -8,10 +8,10 @@ Pyparsing is required by matplotlib and will therefore be available if HoloViews is being used in conjunction with matplotlib. """ -from __future__ import division -import param from itertools import groupby + import numpy as np +import param import pyparsing as pp from ..core.options import Options, Cycle, Palette @@ -96,7 +96,7 @@ def todict(cls, parseresult, mode='parens', ns={}): if val is False: elements =list(items) # Assume anything before ) or } can be joined with commas - # (e.g tuples with spaces in them) + # (e.g. tuples with spaces in them) joiner=',' if any(((')' in el) or ('}' in el)) for el in elements) else '' grouped[-1] += joiner + joiner.join(elements) diff --git a/holoviews/util/settings.py b/holoviews/util/settings.py index f26a4a8efb..7c0bcffe55 100644 --- a/holoviews/util/settings.py +++ b/holoviews/util/settings.py @@ -1,8 +1,7 @@ - from collections import defaultdict from ..core import OrderedDict from ..core import Store -from ..core.util import basestring + class KeywordSettings(object): """ @@ -157,7 +156,7 @@ class OutputSettings(KeywordSettings): 'left', 'bottom', 'right', 'top', 'top_left', 'top_right', 'bottom_left', 'bottom_right', 'left_top', 'left_bottom', 'right_top', 'right_bottom'], - 'css' : {k: basestring + 'css' : {k: str for k in ['width', 'height', 'padding', 'margin', 'max-width', 'min-width', 'max-height', 'min-height', 'outline', 'float']}} @@ -231,7 +230,7 @@ def _generate_docstring(cls, signature=False): % cls.defaults['info']) css = ("css : Optional css style attributes to apply to the figure image tag") widget_location = "widget_location : The position of the widgets relative to the plot" - + descriptions = [backend, fig, holomap, widgets, fps, max_frames, size, dpi, filename, info, css, widget_location] keywords = ['backend', 'fig', 'holomap', 'widgets', 'fps', 'max_frames', diff --git a/holoviews/util/transform.py b/holoviews/util/transform.py index cd97870079..c280454369 100644 --- a/holoviews/util/transform.py +++ b/holoviews/util/transform.py @@ -1,5 +1,3 @@ -from __future__ import division - import operator import sys @@ -10,7 +8,7 @@ from ..core.data import PandasInterface from ..core.dimension import Dimension -from ..core.util import basestring, pd, resolve_dependent_value, unique_iterator +from ..core.util import pd, resolve_dependent_value, unique_iterator def _maybe_map(numpy_fn): @@ -219,13 +217,18 @@ class dim(object): _accessor = None def __init__(self, obj, *args, **kwargs): + from panel.widgets import Widget ops = [] self._ns = np.ndarray self.coerce = kwargs.get('coerce', True) - if isinstance(obj, basestring): + if isinstance(obj, str): self.dimension = Dimension(obj) - elif isinstance(obj, Dimension): + elif isinstance(obj, (Dimension, int)): + self.dimension = obj + elif isinstance(obj, param.Parameter): self.dimension = obj + elif isinstance(obj, Widget): + self.dimension = obj.param.value else: self.dimension = obj.dimension ops = obj.ops @@ -234,7 +237,7 @@ def __init__(self, obj, *args, **kwargs): else: fn = None if fn is not None: - if not (isinstance(fn, function_types+(basestring,)) or + if not (isinstance(fn, function_types+(str,)) or any(fn in funcs for funcs in self._all_funcs)): raise ValueError('Second argument must be a function, ' 'found %s type' % type(fn)) @@ -254,7 +257,7 @@ def _current_accessor(self): return self.ops[-1]['fn'] def __call__(self, *args, **kwargs): - if (not self.ops or not isinstance(self.ops[-1]['fn'], basestring) or + if (not self.ops or not isinstance(self.ops[-1]['fn'], str) or 'accessor' not in self.ops[-1]['kwargs']): raise ValueError("Cannot call method on %r expression. " "Only methods accessed via namspaces, " @@ -267,11 +270,25 @@ def __call__(self, *args, **kwargs): new_op = dict(op, args=args, kwargs=kwargs) return self.clone(self.dimension, self.ops[:-1]+[new_op]) - def __getattr__(self, attr): - if attr in dir(self): + def __getattribute__(self, attr): + self_dict = super().__getattribute__('__dict__') + if '_ns' not in self_dict: # Not yet initialized + return super().__getattribute__(attr) + ns = self_dict['_ns'] + ops = super().__getattribute__('ops') + if ops and ops[-1]['kwargs'].get('accessor'): + try: + ns = getattr(ns, ops[-1]['fn']) + except Exception: + # If the namespace doesn't know the method we are + # calling then we are using custom API of the dim + # transform itself, so set namespace to None + ns = None + extras = {ns_attr for ns_attr in dir(ns) if not ns_attr.startswith('_')} + if attr in extras and attr not in super(dim, self).__dir__(): return type(self)(self, attr, accessor=True) - raise AttributeError("%r object has no attribute %r" % - (type(self).__name__, attr)) + else: + return super().__getattribute__(attr) def __dir__(self): ns = self._ns @@ -318,13 +335,22 @@ def params(self): from panel.widgets.base import Widget if isinstance(op_arg, Widget): op_arg = op_arg.param.value + if isinstance(op_arg, dim): + params.update(op_arg.params) + elif isinstance(op_arg, slice): + if isinstance(op_arg.start, param.Parameter): + params[op_arg.start.name+str(id(op_arg.start))] = op_arg.start + if isinstance(op_arg.stop, param.Parameter): + params[op_arg.stop.name+str(id(op_arg.stop))] = op_arg.stop + if isinstance(op_arg.step, param.Parameter): + params[op_arg.step.name+str(id(op_arg.step))] = op_arg.step if (isinstance(op_arg, param.Parameter) and isinstance(op_arg.owner, param.Parameterized)): - params[op_arg.name+str(id(op))] = op_arg + params[op_arg.name+str(id(op_arg))] = op_arg + return params # Namespace properties - @property def df(self): return self.clone(dim_type=df_dim) @@ -337,6 +363,9 @@ def np(self): def xr(self): return self.clone(dim_type=xr_dim) + def __getitem__(self, *index): + return type(self)(self, operator.getitem, *index) + # Builtin functions def __abs__(self): return type(self)(self, abs) def __round__(self, ndigits=None): @@ -397,7 +426,9 @@ def clip(self, min=None, max=None): def any(self, *args, **kwargs): return type(self)(self, np.any, *args, **kwargs) def all(self, *args, **kwargs): return type(self)(self, np.all, *args, **kwargs) def cumprod(self, *args, **kwargs): return type(self)(self, np.cumprod, *args, **kwargs) - def cumsum(self, *args, **kwargs): return type(self)(self, np.cumsum, *args, **kwargs) + def cumsum(self, *args, **kwargs): return type(self)(self, np.cumsum, *args, + axis=kwargs.pop('axis',0), + **kwargs) def max(self, *args, **kwargs): return type(self)(self, np.max, *args, **kwargs) def mean(self, *args, **kwargs): return type(self)(self, np.mean, *args, **kwargs) def min(self, *args, **kwargs): return type(self)(self, np.min, *args, **kwargs) @@ -480,7 +511,7 @@ def pipe(cls, func, *args, **kwargs): """ args = list(args) # make mutable for k, arg in enumerate(args): - if isinstance(arg, basestring): + if isinstance(arg, str): args[k] = cls(arg) return cls(args[0], func, *args[1:], **kwargs) @@ -499,7 +530,9 @@ def applies(self, dataset, strict=False): """ from ..element import Graph - if isinstance(self.dimension, dim): + if isinstance(self.dimension, param.Parameter): + applies = True + elif isinstance(self.dimension, dim): applies = self.dimension.applies(dataset) elif self.dimension.name == '*': applies = True @@ -536,7 +569,7 @@ def _resolve_op(self, op, dataset, data, flat, expanded, ranges, kwargs['axis'] = None fn = fn_name - if isinstance(fn, basestring): + if isinstance(fn, str): accessor = kwargs.pop('accessor', None) fn_args = [] else: @@ -549,6 +582,12 @@ def _resolve_op(self, op, dataset, data, flat, expanded, ranges, dataset, flat, expanded, ranges, all_values, keep_index, compute, strict ) + elif isinstance(arg, slice): + arg = slice( + resolve_dependent_value(arg.start), + resolve_dependent_value(arg.stop), + resolve_dependent_value(arg.step) + ) arg = resolve_dependent_value(arg) fn_args.append(arg) fn_kwargs = {} @@ -558,6 +597,12 @@ def _resolve_op(self, op, dataset, data, flat, expanded, ranges, dataset, flat, expanded, ranges, all_values, keep_index, compute, strict ) + elif isinstance(v, slice): + v = slice( + resolve_dependent_value(v.start), + resolve_dependent_value(v.stop), + resolve_dependent_value(v.step) + ) fn_kwargs[k] = resolve_dependent_value(v) args = tuple(fn_args[::-1] if op['reverse'] else fn_args) kwargs = dict(fn_kwargs) @@ -567,7 +612,7 @@ def _apply_fn(self, dataset, data, fn, fn_name, args, kwargs, accessor, drange): if (((fn is norm) or (fn is lognorm)) and drange != {} and not ('min' in kwargs and 'max' in kwargs)): data = fn(data, *drange) - elif isinstance(fn, basestring): + elif isinstance(fn, str): method = getattr(data, fn, None) if method is None: mtype = 'attribute' if accessor else 'method' @@ -634,6 +679,8 @@ def apply(self, dataset, flat=False, expanded=None, ranges={}, all_values=False, from ..element import Graph dimension = self.dimension + if isinstance(dimension, int): + dimension = dataset.get_dimension(dimension) if expanded is None: expanded = not ((dataset.interface.gridded and dimension in dataset.kdims) or (dataset.interface.multi and dataset.interface.isunique(dataset, dimension, True))) @@ -671,6 +718,9 @@ def apply(self, dataset, flat=False, expanded=None, ranges={}, all_values=False, if dimension.name == '*': data = dataset.data eldim = None + elif isinstance(dimension, param.Parameter): + data = getattr(dimension.owner, dimension.name) + eldim = None else: lookup = dimension if strict else dimension.name eldim = dataset.get_dimension(lookup).name @@ -723,14 +773,14 @@ def __repr__(self): fn_name = self._unary_funcs[fn] format_string = '{fn}' + prev else: - if isinstance(fn, basestring): + if isinstance(fn, str): fn_name = fn else: fn_name = fn.__name__ if fn in self._builtin_funcs: fn_name = self._builtin_funcs[fn] format_string = '{fn}'+prev - elif isinstance(fn, basestring): + elif isinstance(fn, str): if accessor: sep = '' if op_repr.endswith(')') or prev_accessor else ')' format_string = prev+sep+'.{fn}' @@ -768,7 +818,7 @@ def __repr__(self): format_string += '{kwargs}' # Insert accessor - if i == 0 and self._accessor: + if i == 0 and self._accessor and ')' in format_string: idx = format_string.index(')') format_string = ''.join([ format_string[:idx], ').', self._accessor, @@ -798,7 +848,7 @@ class df_dim(dim): _accessor = 'pd' def __init__(self, obj, *args, **kwargs): - super(df_dim, self).__init__(obj, *args, **kwargs) + super().__init__(obj, *args, **kwargs) self._ns = pd.Series def interface_applies(self, dataset, coerce): @@ -841,7 +891,7 @@ def __init__(self, obj, *args, **kwargs): except ImportError: raise ImportError("XArray could not be imported, dim().xr " "requires the xarray to be available.") - super(xr_dim, self).__init__(obj, *args, **kwargs) + super().__init__(obj, *args, **kwargs) self._ns = xr.DataArray def interface_applies(self, dataset, coerce): diff --git a/setup.py b/setup.py index 099cc3b38b..02d9d50955 100644 --- a/setup.py +++ b/setup.py @@ -13,9 +13,10 @@ install_requires = [ "param >=1.9.3,<2.0", "numpy >=1.0", - "pyviz_comms >=0.7.3", - "panel >=0.8.0", - "pandas", + "pyviz_comms >=0.7.4", + "panel >=0.9.5", + "colorcet", + "pandas >=0.20.0", ] extras_require = {} @@ -25,7 +26,7 @@ # IPython Notebook + pandas + matplotlib + bokeh extras_require["recommended"] = extras_require["notebook"] + [ - "matplotlib >=2.2", + "matplotlib >=3", "bokeh >=1.1.0", ] @@ -37,9 +38,9 @@ "plotly >=4.0", 'dash >=1.16', "streamz >=0.5.0", - "datashader ==0.11.1", + "datashader >=0.11.1", "ffmpeg", - "cftime", + "cftime <1.5.0", # cftime.utime deprecated "netcdf4", "dask", "scipy", @@ -50,29 +51,26 @@ if sys.version_info.major > 2: extras_require["examples"].extend( [ - "spatialpandas", - "pyarrow <1.0", + "pyarrow", "ibis-framework >=1.3", ] # spatialpandas incompatibility ) # Extra third-party libraries extras_require["extras"] = extras_require["examples"] + [ - "cyordereddict", "pscript ==0.7.1", ] # Test requirements extras_require['tests'] = [ - 'nose', + 'pytest', + 'pytest-cov', 'mock', - 'flake8 ==3.6.0', + 'flake8', 'coveralls', 'path.py', - 'matplotlib >=2.2,<3.1', + 'matplotlib >=3', 'nbsmoke >=0.2.0', - 'pytest-cov ==2.5.1', - 'pytest <6.0', 'nbconvert <6', 'twine', 'rfc3986', @@ -83,7 +81,7 @@ extras_require["basic_tests"] = ( extras_require["tests"] - + ["matplotlib >=2.1", "bokeh >=1.1.0", "pandas"] + + ["matplotlib >=3", "bokeh >=1.1.0", "pandas"] + extras_require["notebook"] ) @@ -93,12 +91,11 @@ "deepdiff", "nbconvert ==5.3.1", "jsonschema ==2.6.0", - "cyordereddict", "ipython ==5.4.1", ] extras_require['doc'] = extras_require['examples'] + [ - 'nbsite >0.5.2', + 'nbsite >=0.6.8a36', 'sphinx', 'sphinx_holoviz_theme', 'mpl_sample_data >=3.1.3', @@ -113,15 +110,15 @@ "param >=1.7.0", "setuptools >=30.3.0", "pyct >=0.4.4", - "python <3.8", + "python <3.9", + "pip", ] -# Everything including cyordereddict (optimization) and nosetests +# Everything for examples and nosetests extras_require["all"] = list( set(extras_require["unit_tests"]) | set(extras_require["nbtests"]) ) - def get_setup_version(reponame): """ Helper to get the current version from either git describe or the @@ -131,7 +128,7 @@ def get_setup_version(reponame): version_file_path = os.path.join(basepath, reponame, ".version") try: from param import version - except: + except ImportError: version = None if version is not None: return version.Version.setup_version( @@ -167,8 +164,6 @@ def get_setup_version(reponame): classifiers=[ "License :: OSI Approved :: BSD License", "Development Status :: 5 - Production/Stable", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", diff --git a/tox.ini b/tox.ini index 86a2b781eb..a396f679d2 100644 --- a/tox.ini +++ b/tox.ini @@ -2,8 +2,8 @@ # tox config (works with tox alone). [tox] -# python version test group extra envs extra commands -envlist = {py27,py35,py36,py37}-{flakes,unit,examples,all_recommended,regression}-{default}-{dev,pkg} +# python version test group extra envs extra commands +envlist = {py36,py37,py38}-{flakes,unit,examples,all_recommended}-{default}-{dev,pkg} [_flakes] description = Flake check python @@ -13,13 +13,7 @@ commands = flake8 holoviews [_unit] description = Run unit tests with coverage deps = .[unit_tests] -passenv = TRAVIS TRAVIS_* -commands = nosetests holoviews.tests --with-doctest --with-coverage --cover-package=holoviews - -[_regression] -description = Run regression tests -deps = .[nbtests] -commands = python ./test_notebooks.py +commands = pytest holoviews --cov=./holoviews [_examples] description = Test that default examples run @@ -45,13 +39,12 @@ changedir = {envtmpdir} commands = examples-pkg: {[_pkg]commands} unit: {[_unit]commands} flakes: {[_flakes]commands} - regression: {[_regression]commands} + examples: {[_examples]commands} all_recommended: {[_all_recommended]commands} deps = unit: {[_unit]deps} flakes: {[_flakes]deps} all_recommended: {[_all_recommended]deps} - regression: {[_regression]deps} [flake8] include = *.py